def __init__(self, info: dict): self.info = attrdict.AttrMap(info) data = attrdict.AttrMap(self.info[self.info.type]) self.full_id = f"{data.owner_id}_{data.id}" self.as_attach = f"doc{self.full_id}" if "access_key" in self.info: self.as_attach += f"_{data.access_key}"
def test_bot_run(run_thread=True): # Bot's from tests.test_bot import src config = attrdict.AttrMap( toml.load(str(pathlib.Path() / "tests" / "test_bot" / "config.toml"))) settings = dict(token=config.api.token, group_id=config.api.group_id, version=config.api.version, owner=config.api.owner, wait=config.longpoll.wait, debug=True, config=config) reactions = [] signals = [] for var in src.__dict__.values(): if isinstance(var, vq.Reaction): reactions.append(var) elif isinstance(var, vq.Signal): signals.append(var) reactions = vq.ReactionsList(reactions) signals = vq.SignalsList(signals) bot = vq.Bot(reactions=reactions, signals=signals, **settings) if run_thread: thread = threading.Thread(target=main_user) thread.start() bot.run()
def context(mocker, lambda_context): print('setup moto fixture') context = attrdict.AttrMap() # Uncomment for a sanity check - this should work if unpatching is working properly # requests.get('https://www.google.com') orig_env = os.environ.copy() os.environ['NAMESPACE'] = 'test' os.environ['CONFIG'] = 'config-bucket' context.os = {'environ': os.environ} context.mock_lambda_context = lambda_context # Uncomment to use patcher-style mocking # context.sts_mock = moto.mock_sts() # context.sns_mock = moto.mock_sns() # context.sts_mock.start() # context.sns_mock.start() yield context mocker.stopall() # Uncomment to use patcher-style mocking # context.sts_mock.stop() # context.sns_mock.stop() # Uncomment for a sanity check - this should work if unpatching is working properly # requests.get('https://www.google.com') os.environ = orig_env print('teardown moto fixture')
def context(mocker): context = attrdict.AttrMap() orig_env = os.environ.copy() context.os = {'environ': os.environ} context.prefix = app.__name__ context.mock_cfnresponse_send = mocker.patch( f'{context.prefix}.cfnresponse.send', autospec=True) context.mock_requests_post = mocker.patch( f'{context.prefix}.requests.post', autospec=True) context.mock_cfn = mocker.patch(f'{context.prefix}.cfn', autospec=True) yield context os.environ = orig_env mocker.stopall()
def context(mocker): context = attrdict.AttrMap() orig_env = os.environ.copy() context.os = {'environ': os.environ} context.prefix = app.__name__ context.mock_cfnresponse_send = mocker.patch( f'{context.prefix}.cfnresponse.send', autospec=True) context.mock_ct = mocker.patch(f'{context.prefix}.ct', autospec=True) context.mock_cur = mocker.patch(f'{context.prefix}.cur', autospec=True) context.mock_orgs = mocker.patch(f'{context.prefix}.orgs', autospec=True) context.mock_s3 = mocker.patch(f'{context.prefix}.s3', autospec=True) yield context os.environ = orig_env mocker.stopall()
async def __anext__(self) -> attrdict.AttrMap: await self._get_info() data = dict(act="a_check", wait=self.wait, **self.info) # required async with aiohttp.ClientSession() as session: async with session.post(url=self.info.server, data=data, ssl=ssl.SSLContext()) as response: response = await response.json() response = attrdict.AttrMap(response) if "failed" in response: await self._resolve_faileds(response) return [] else: return response.updates
async def get_info(self, *fields, name_case: str = "nom") -> User: """ Получает всю нужную информацию о пользователе методом ```users.get``` """ # TODO: Name cases self.info = await current.api.users.get( user_ids=[self._user_id], fields=",".join(fields), name_case=name_case, ) self.info = attrdict.AttrMap(self.info[0]) # Quick access self.fn = self.info.first_name self.ln = self.info.last_name self.id = self.info.id return self
async def __anext__(self) -> attrdict.AttrMap: data = dict(act="a_check", wait=self.wait, **self.info) # required # async with self.session: async with self._session.post(url=self.url, data=data, ssl=ssl.SSLContext()) as response: response = await response.json() response = attrdict.AttrMap(response) self.info.update(ts=response.ts) if "failed" in response: await self._resolve_faileds(response) return [] else: return response.updates
def __init__(self, info: dict): self.info = attrdict.AttrMap(info) self.full_id = f"{self.info.owner_id}_{self.info.id}" self.as_attach = f"photo{self.full_id}" if "access_key" in self.info: self.as_attach += f"_{self.info.access_key}"
def main(**kwargs): if shutil.which('terraform') is None: raise click.ClickException( "cannot find 'terraform' executable on PATH") opts = attrdict.AttrMap(kwargs) state_dir = pathlib.Path(opts.state or '.terraform-anygen') terraform_dir = state_dir / 'terraform' out_dir = state_dir / 'out' def rmtree_out_dir(): if out_dir.exists(): shutil.rmtree(str(out_dir)) if not state_dir.exists(): terraform_dir.mkdir(parents=True) debug_dir = state_dir / 'debug' if debug_dir.exists(): shutil.rmtree(str(debug_dir)) debug_dir.mkdir() terraform = python_terraform.Terraform( working_dir=terraform_dir, targets=['module.body.' + i for i in opts.target]) if not opts.destroy: rmtree_out_dir() model_dir = pathlib.Path(opts.model or '.') path = [model_dir] age = anygen.AnygenEngine() ag = age.create( path=path, classes=[ 'terraform' + i for i in ( ['.' + j for j in opts.classes.split(',')] if opts.classes else ['']) ]) ag_result = _debugdump.wrap(ag.produce, debug_dir / 'terraform')(*opts["def"]) tf_data = attrdict.AttrDict() tf_data += ag_result.get("terraform", {}) if opts.destroy: tf_data = {"provider": tf_data.get("provider", [])} else: data_external = {} path_str = os.pathsep.join(str(i.resolve()) for i in path) for k, v in ag_result.get("anygen", {}).items(): query = { "path": path_str, "classes": v["classes"], "debug_dump": str(debug_dir.joinpath('anygen.' + k).absolute()) } for k2, v2 in v.get("args", {}).items(): query["arg_" + k2] = '${jsonencode("%s")}' % v2.replace( '\\', '\\\\').replace('"', '\\"') data_external[k] = { "program": [sys.executable, "-m", "terraform_anygen._gen"], "query": query } if data_external: tf_data += {"data": {"external": data_external}} output = ag_result.get("output", None) if output: tf_data += { "output": dict((k, dict(value=v)) for k, v in output.items()) } main_tf_data = attrdict.AttrDict() main_tf_data += {"module": {"body": {"source": "./body"}}} tf_backend = ag_result.get("backend", None) if tf_backend is not None: main_tf_data += {"terraform": {"backend": tf_backend}} dump_json(terraform_dir.joinpath("main.tf.json"), main_tf_data) body_module_dir = terraform_dir / "body" body_module_dir.mkdir(exist_ok=True) dump_json(body_module_dir.joinpath("main.tf.json"), tf_data) checked_tf( terraform.init(capture_output=False, force_copy=opts.force_backend_copy), 'init') if opts.destroy: checked_tf( terraform.destroy(capture_output=False, force=opts.yes, parallelism=opts.jobs, no_color=python_terraform.IsNotFlagged, refresh=opts.refresh), 'destroy') rmtree_out_dir() else: checked_tf( terraform.apply(capture_output=False, skip_plan=opts.yes, parallelism=opts.jobs, refresh=opts.refresh, no_color=python_terraform.IsNotFlagged), 'apply') on_sucess_classes = ag_result.get("on_success", {}).get("classes", []) if on_sucess_classes: tf_result = terraform.cmd( 'state pull', capture_output=True, ) checked_captured_tf(tf_result, 'state pull') tfstate = json.loads(tf_result[1]) expected_outfiles = set() def jinjafilter_outfile(name): expected_outfiles.add(name) return out_dir.joinpath(name).resolve() ag_success = age.create( path=path, classes=on_sucess_classes, extras=dict(jinjafilter=dict(outfile=jinjafilter_outfile))) for i in tfstate["modules"]: if i["path"] == ["root", "body"]: body_module_outputs = i["outputs"] break else: raise click.ClickException( "failed to find 'body' module in terraform state") body_module_outputs = dict( (k, v["value"]) for k, v in body_module_outputs.items()) ag_success_result = \ _debugdump.wrap(ag_success.produce, debug_dir / 'on_success')( outputs=body_module_outputs ) out_dir.mkdir() for k, v in ag_success_result.get("files", {}).items(): if isinstance(v, str): v = dict(content=v) expected_outfiles.discard(k) rel_path = pathlib.Path(k) if rel_path.is_absolute( ) or '..' in rel_path.parts or not rel_path.parts: raise click.ClickException( "output file path cannot be absolute, contain '..', or be empty: {}" .format(rel_path)) file_path = out_dir / rel_path file_path.parent.mkdir(parents=True, exist_ok=True) file_path.write_text(v["content"]) file_mode = v.get("chmod", None) if file_mode is not None: file_path.chmod(file_mode) if expected_outfiles: raise click.ClickException( "the following files have been referenced via 'outfile' but not produced: " + ', '.join(sorted(expected_outfiles))) text = ag_success_result.get("text", None) if text: if not ag_success_result.get("plaintext", False): text = ansimarkup.parse(text) click.echo(text)
def main(**kwargs): """ Options --only, --skip take space separated lists and can be supplied multiple times with lists being accumulated. Note that it is necessary to use quoting or backslash to include space as a part of an option value. Names specified in --only and --skip are expanded using Bash-style brace expansion [1] and are treated as Unix shell-style wildcards [2]. Note that if any of these features are used, it is necessary to use quoting. [1] https://github.com/trendels/braceexpand [2] https://docs.python.org/3.7/library/fnmatch.html Options --tags/--skip-tags accept boolean expressions, with "|" meaning OR, "&" meaning AND, and "!" meaning NOT, "0" and "1" meaning true and false constants. Brackets are supported. Tags containing spaces can be specified in double quotes. Entire expression has to be quoted to avoid shell from interfering. Multiple --tag/--skip-tags are joined with OR. Tags currently defined in the model are used for existing resources, as opposed to those which where defined when those resources where previously brought up. Both direct and indirect dependencies and dependents are considered in --only, --tags, --skip, --skip-tags. Options --only and --tags also select dependencies of these resources for bringing up, and dependents of these resources for bringing down or forgetting. Options --skip and --skip-tags guarantee that resources selected by them are not going to be processed and will also skip bringing up of dependent resources and bringing down or forgetting of dependencies of these resources. """ opts = attrdict.AttrMap(kwargs) notification_cb = None try: opts.only = names_to_re(join_split(opts.only)) opts.skip = names_to_re(join_split(opts.skip)) opts.tags = make_tags_matcher(opts.tags) opts.skip_tags = make_tags_matcher(opts.skip_tags) model, notification_cb = load_model(opts.file, opts.workspace) if opts.help_modes: help_modes_and_exit(model) mode_values = process_modes(model.modes, opts.mode) validate_and_finalize_model(model) with model.state as state: notification_cb('lock') if opts.edit: edited_state_str = json.dumps(state, indent=4, sort_keys=True) while True: edited_state_str = click.edit(text=edited_state_str, extension='.json') if edited_state_str is None: click.confirm("File was not saved. Continue execution with unedited state?", abort=True) else: try: edited_state = json.loads(edited_state_str) except json.JSONDecodeError as e: click.confirm("Error parsing json: {}. Continue editing?".format(e), abort=True) continue state.clear() state.update(edited_state) state.write() break if state: state_workspace = state.get("workspace", "") if opts.workspace != state_workspace: raise click.ClickException( "Workspace stored in state (\"{}\") " "does not match workspace selected via --workspace (\"{}\")".format( state_workspace, opts.workspace ) ) else: state["workspace"] = opts.workspace report_sets = [] existing_resources = state.setdefault('resources', {}) existing_resources, alias_renames = process_aliases(existing_resources, model.aliases) current_tags = {} for k, v in existing_resources.items(): current_tags[k] = set(v.get('tags', [])) for k, v in model.resources.items(): current_tags[k] = v.data.tags report_sets.append(( "Existing resources (clean)", sorted(k for k, v in existing_resources.items() if not v['dirty']) )) report_sets.append(( "Existing resources (dirty)", sorted(k for k, v in existing_resources.items() if v['dirty']) )) report_sets.append(( "Resources defined in the model", sorted(model.resources) )) report_sets.append(( "New resources", sorted(set(model.resources) - set(existing_resources)) )) for k, v in sorted(inverse_setdict(current_tags).items()): report_sets.append(( "Resources tagged as '{}'".format(k), sorted(v) )) existing_dependencies = dict((k, set(v['deps'])) for k, v in existing_resources.items()) def is_included(res_name): return not( opts.only and not opts.only.match(res_name) or opts.tags and not opts.tags(current_tags[res_name]) ) def is_excluded(res_name): return ( opts.skip and opts.skip.match(res_name) or opts.skip_tags and opts.skip_tags(current_tags[res_name]) ) def is_included_and_not_excluded(res_name): return is_included(res_name) and not is_excluded(res_name) resources_to_down = set(existing_resources) if opts.down: resources_to_up = [] else: resources_to_up = model.resource_order resources_to_down -= set(model.resources) down_action_str = "forget" if opts.forget else "bring down" report_sets.append((f"All resources to {down_action_str}", sorted(resources_to_down))) report_sets.append(("All resources to bring up", sorted(resources_to_up))) report_sets.append(( f"Explicitly selected resources to {down_action_str}", sorted(filter(is_included_and_not_excluded, resources_to_down)) )) report_sets.append(( "Explicitly selected resources to bring up", sorted(filter(is_included_and_not_excluded, resources_to_up)) )) resources_to_down = set(filter(is_included, resources_to_down)) resources_to_down = with_all_dependents(resources_to_down, existing_dependencies) resources_to_down -= with_all_dependencies( set(filter(is_excluded, resources_to_down)), existing_dependencies ) resources_to_down = list(reversed(toposort_dependencies(resources_to_down, existing_dependencies))) resources_to_up = set(filter(is_included, resources_to_up)) resources_to_up = with_all_dependencies(resources_to_up, model.dependencies) resources_to_up -= with_all_dependents( set(filter(is_excluded, resources_to_up)), model.dependencies ) resources_to_up = toposort_dependencies(resources_to_up, model.dependencies) if opts.forget: report_sets.append((f"Will forget", sorted(resources_to_down))) else: report_sets.append((f"Will bring down (in this order)", resources_to_down)) report_sets.append(("Will bring up (in this order)", resources_to_up)) what_padding = max((len(what) for what, which in report_sets if which), default=0) for what, which in report_sets: if which: click.echo("{}{:<{}} : {}".format( click.style(what, underline=True), '', what_padding - len(what), ", ".join(which) )) if alias_renames: click.echo("The following resources are renamed: {}".format(", ".join( "{}->{}".format(i, j) for i, j in alias_renames ))) if opts.graph: make_graph( model, current_tags, existing_resources, existing_dependencies, resources_to_up, resources_to_down ).view() if not resources_to_down and not resources_to_up: click.echo("Nothing to do!") if not opts.pretend: if not opts.yes and (resources_to_down or resources_to_up): click.confirm("Proceed?", abort=True) if existing_resources is not state['resources']: state['resources'] = existing_resources state.write() success = False work_dir = pathlib.Path(tempfile.mkdtemp(prefix="cnstlltn.")) messages = [] notification_cb('start') try: if opts.forget: for res_name in resources_to_down: del state['resources'][res_name] state.write() click.echo("Forgotten the following resources: {}".format( ", ".join(f"'{i}'" for i in sorted(resources_to_down)) )) else: for res_i, res_name in enumerate(resources_to_down): res_dir = work_dir / "down-{:04}-{}".format(res_i, res_name) notification_cb('resource-down-start', res_name) down_resource( debug=opts.debug, step=opts.step, messages=messages, res_dir=res_dir, res_name=res_name, state=state, mode_values=mode_values ) notification_cb('resource-down-done', res_name) resources_vars = {} for res_i, res_name in enumerate(resources_to_up): resource = model.resources[res_name] res_dir = work_dir / "up-{:04}-{}".format(res_i, res_name) notification_cb('resource-up-start', res_name) up_resource( debug=opts.debug, step=opts.step, full=opts.full, messages=messages, model=model, res_dir=res_dir, resource=resource, resources_vars=resources_vars, state=state, ignore_identity_change=opts.ignore_identity_change, ignore_checkpoints=opts.ignore_checkpoints, ignore_precheck=opts.ignore_precheck, mode_values=mode_values ) notification_cb('resource-up-done', res_name) finally: if opts.debug and not success or opts.keep_work: click.echo("keeping working directory: {}".format(work_dir)) else: shutil.rmtree(work_dir) for message in messages: click.echo(ansimarkup.parse(message.rstrip())) if opts.mementos: write_mementos(opts.mementos, state) notification_cb('success') except Exception as e: if notification_cb: if isinstance(e, click.exceptions.Abort): notification_cb('abort') else: notification_cb('fail') if not opts.debug and not isinstance(e, (click.exceptions.ClickException, click.exceptions.Abort)): click.secho("error: {}".format(e), err=True, fg='red') sys.exit(1) else: raise
def run(reload, once_time, debug): """ Запускает бота """ click.clear() if reload: args = sys.argv[1:] if "-r" in args: args.remove("-r") if "--reload" in args: args.remove("--reload") args.append("--once-time") # I tried to do smth for stopping flood. # Еhe best solution I can create is doing # click.clear() before process start # # print("> All prints you see will be changed to logger later.") # prev_out = None # proc = subprocess.run(["bot", *args], stderr=subprocess.STDOUT) # prev_out = proc.stderr # while True: # print("Run") # proc = subprocess.run(["bot", *args], stderr=subprocess.STDOUT) # if prev_out == proc.stderr and prev_out is not None: # proc = subprocess.run(["bot", *args], capture_output=True) # else: # proc = subprocess.run(["bot", *args], stderr=subprocess.STDOUT) # # prev_out = proc.stderr # print("Reload...") while True: click.secho("Listen", fg="green") proc = subprocess.run(["bot", *args]) click.secho("Found some changes in bot's code. Reload...", fg="yellow") elif once_time: # Your bot project path sys.path.append(os.getcwd()) class AllEventsHandler(PatternMatchingEventHandler): def on_any_event(self, event): self.bot.reaload_now = True event_handler = AllEventsHandler( ignore_patterns=["__pycache__", "*.pyc"], ignore_directories=True) # Bot's import src config = attrdict.AttrMap(toml.load("config.toml")) URL = (config.api.URL if "URL" in config.api else "https://api.vk.com/method/") settings = dict( token=config.api.token, group_id=config.api.group_id, version=config.api.version, owner=config.api.owner, wait=config.longpoll.wait, debug=debug, URL=URL, config=config, ) reactions = [] signals = [] for var in src.__dict__.values(): if isinstance(var, vq.Reaction): reactions.append(var) elif isinstance(var, vq.Signal): signals.append(var) reactions = vq.ReactionsList(reactions) signals = vq.SignalsList(signals) bot = vq.Bot(reactions=reactions, signals=signals, **settings) AllEventsHandler.bot = bot observer = Observer() observer.schedule(event_handler, ".", recursive=True) observer.start() bot.run() observer.stop() observer.join() else: # Your bot project path sys.path.append(os.getcwd()) # Bot's import src config = attrdict.AttrMap(toml.load("config.toml")) # Все эти конструкции дико костыльные. # Глобальные изменения будут в 1.0 URL = (config.api.URL if "URL" in config.api else "https://api.vk.com/method/") settings = dict( token=config.api.token, group_id=config.api.group_id, version=config.api.version, owner=config.api.owner, wait=config.longpoll.wait, URL=URL, debug=debug, config=config, ) reactions = [] signals = [] for var in src.__dict__.values(): if isinstance(var, vq.Reaction): reactions.append(var) elif isinstance(var, vq.Signal): signals.append(var) reactions = vq.ReactionsList(reactions) signals = vq.SignalsList(signals) bot = vq.Bot(reactions=reactions, signals=signals, **settings) bot.run()
def context(table_with_records): context = attrdict.AttrMap() context.table = table_with_records yield context