def format_help(help): """Formats the help string.""" help = help.replace(' check', str(crayons.green(' check'))) help = help.replace(' uninstall', str(crayons.yellow(' uninstall', bold=True))) help = help.replace(' install', str(crayons.yellow(' install', bold=True))) help = help.replace(' lock', str(crayons.red(' lock', bold=True))) help = help.replace(' run', str(crayons.blue(' run'))) help = help.replace(' shell', str(crayons.blue(' shell', bold=True))) help = help.replace(' update', str(crayons.yellow(' update'))) additional_help = """ Usage Examples: Create a new project using Python 3: $ {0} Install all dependencies for a project (including dev): $ {1} Create a lockfile: $ {2} Commands:""".format( crayons.red('pipenv --three'), crayons.red('pipenv install --dev'), crayons.red('pipenv lock') ) help = help.replace('Commands:', additional_help) return help
def do_install_dependencies(dev=False, only=False, bare=False, requirements=False, allow_global=False): """"Executes the install functionality.""" if requirements: bare = True # Load the Pipfile. p = pipfile.load(project.pipfile_location) # Load the lockfile if it exists, or if only is being used (e.g. lock is being used). if only or not project.lockfile_exists: if not bare: click.echo(crayons.yellow('Installing dependencies from Pipfile...')) lockfile = json.loads(p.lock()) else: if not bare: click.echo(crayons.yellow('Installing dependencies from Pipfile.lock...')) with open(project.lockfile_location) as f: lockfile = json.load(f) # Install default dependencies, always. deps = lockfile['default'] if not only else {} # Add development deps if --dev was passed. if dev: deps.update(lockfile['develop']) # Convert the deps to pip-compatible arguments. deps_path = convert_deps_to_pip(deps) # --requirements was passed. if requirements: with open(deps_path) as f: click.echo(f.read()) sys.exit(0) # pip install: with spinner(): c = pip_install(r=deps_path, allow_global=allow_global) if c.return_code != 0: click.echo(crayons.red('An error occured while installing!')) click.echo(crayons.blue(format_pip_error(c.err))) sys.exit(c.return_code) if not bare: click.echo(crayons.blue(format_pip_output(c.out, r=deps_path))) # Cleanup the temp requirements file. if requirements: os.remove(deps_path)
def do_create_virtualenv(three=None, python=None): """Creates a virtualenv.""" click.echo(crayons.yellow('Creating a virtualenv for this project...'), err=True) # The user wants the virtualenv in the project. if PIPENV_VENV_IN_PROJECT: cmd = ['virtualenv', project.virtualenv_location, '--prompt=({0})'.format(project.name)] else: # Default: use pew. cmd = ['pew', 'new', project.name, '-d'] # Pass a Python version to virtualenv, if needed. if python: click.echo('{0} {1} {2}'.format(crayons.yellow('Using'), crayons.red(python), crayons.yellow('to create virtualenv...'))) elif three is False: python = 'python2' elif three is True: python = 'python3' if python: cmd = cmd + ['-p', python] # Actually create the virtualenv. with spinner(): c = delegator.run(cmd, block=False) click.echo(crayons.blue(c.out), err=True) # Say where the virtualenv is. do_where(virtualenv=True, bare=False)
def do_purge(bare=False, downloads=False, allow_global=False): """Executes the purge functionality.""" if downloads: if not bare: click.echo(crayons.yellow('Clearing out downloads directory...')) shutil.rmtree(project.download_location) return freeze = delegator.run('{0} freeze'.format(which_pip(allow_global=allow_global))).out installed = freeze.split() # Remove setuptools and friends from installed, if present. for package_name in ['setuptools', 'pip', 'wheel', 'six', 'packaging', 'pyparsing', 'appdirs']: for i, package in enumerate(installed): if package.startswith(package_name): del installed[i] if not bare: click.echo('Found {0} installed package(s), purging...'.format(len(installed))) command = '{0} uninstall {1} -y'.format(which_pip(allow_global=allow_global), ' '.join(installed)) c = delegator.run(command) if not bare: click.echo(crayons.blue(c.out)) click.echo(crayons.yellow('Environment now purged and fresh!'))
def install(package_name=False, more_packages=False, dev=False, three=False, python=False, system=False, lock=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python) # Allow more than one package to be provided. package_names = (package_name,) + more_packages # Install all dependencies, if none was provided. if package_name is False: click.echo(crayons.yellow('No package provided, installing all dependencies.'), err=True) do_init(dev=dev, allow_global=system) sys.exit(0) for package_name in package_names: click.echo('Installing {0}...'.format(crayons.green(package_name))) # pip install: with spinner(): c = pip_install(package_name, allow_global=system) click.echo(crayons.blue(format_pip_output(c.out))) # TODO: This # Ensure that package was successfully installed. try: assert c.return_code == 0 except AssertionError: click.echo('{0} An error occurred while installing {1}!'.format(crayons.red('Error: '), crayons.green(package_name))) click.echo(crayons.blue(format_pip_error(c.err))) sys.exit(1) if dev: click.echo('Adding {0} to Pipfile\'s {1}...'.format(crayons.green(package_name), crayons.red('[dev-packages]'))) else: click.echo('Adding {0} to Pipfile\'s {1}...'.format(crayons.green(package_name), crayons.red('[packages]'))) # Add the package to the Pipfile. project.add_package_to_pipfile(package_name, dev) # Ego boost. easter_egg(package_name) if lock: do_lock()
def init_spin(self): while not self.stop_running.is_set(): next_val = next(self.spinner_cycle) if sys.version_info[0] == 2: next_val = str(crayons.blue(next_val.encode('utf-8'))) sys.stdout.write(next_val) sys.stdout.flush() time.sleep(0.07) sys.stdout.write('\b')
def ensure_latest_pip(): """Updates pip to the latest version.""" # Ensure that pip is installed. c = delegator.run('{0} install pip'.format(which_pip())) # Check if version is out of date. if 'however' in c.err: # If version is out of date, update. click.echo(crayons.yellow('Pip is out of date... updating to latest.')) c = delegator.run('{0} install pip --upgrade'.format(which_pip()), block=False) click.echo(crayons.blue(c.out))
def uninstall(package_name=False, more_packages=False, three=None, python=False, system=False, lock=False, dev=False, all=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python) package_names = (package_name,) + more_packages pipfile_remove = True # Un-install all dependencies, if --all was provided. if all is True: if not dev: click.echo(crayons.yellow('Un-installing all packages from virtualenv...')) do_purge(allow_global=system) sys.exit(0) # Uninstall [dev-packages], if --dev was provided. if dev: if 'dev-packages' in project.parsed_pipfile: click.echo(crayons.yellow('Un-installing {0}...'.format(crayons.red('[dev-packages]')))) package_names = project.parsed_pipfile['dev-packages'] pipfile_remove = False else: click.echo(crayons.yellow('No {0} to uninstall.'.format(crayons.red('[dev-packages]')))) sys.exit(0) if package_name is False and not dev: click.echo(crayons.red('No package provided!')) sys.exit(1) for package_name in package_names: click.echo('Un-installing {0}...'.format(crayons.green(package_name))) c = delegator.run('{0} uninstall {1} -y'.format(which_pip(allow_global=system), package_name)) click.echo(crayons.blue(c.out)) if pipfile_remove: if dev: click.echo('Removing {0} from Pipfile\'s {1}...'.format(crayons.green(package_name), crayons.red('[dev-packages]'))) else: click.echo('Removing {0} from Pipfile\'s {1}...'.format(crayons.green(package_name), crayons.red('[packages]'))) project.remove_package_from_pipfile(package_name, dev) if lock: do_lock()
def do_download_dependencies(dev=False, only=False, bare=False): """"Executes the download functionality.""" # Load the Pipfile. p = pipfile.load(project.pipfile_location) # Load the Pipfile. if not bare: click.echo(crayons.yellow('Downloading dependencies from Pipfile...')) lockfile = json.loads(p.lock()) # Install default dependencies, always. deps = lockfile['default'] if not only else {} # Add development deps if --dev was passed. if dev: deps.update(lockfile['develop']) # Convert the deps to pip-compatible arguments. deps = convert_deps_to_pip(deps, r=False) # Actually install each dependency into the virtualenv. name_map = {} for package_name in deps: if not bare: click.echo('Downloading {0}...'.format(crayons.green(package_name))) # pip install: c = pip_download(package_name) if not bare: click.echo(crayons.blue(c.out)) parsed_output = parse_install_output(c.out) for filename, name in parsed_output: name_map[filename] = name return name_map
# local files. here = Path(__file__).parent.absolute() repository_root = (here / ".." / "..").resolve() sys.path.insert(0, str(repository_root)) sys.path.insert(0, str(here)) from fmc_requests import fmc_authenticate, fmc_post, fmc_get # noqa # Authenticate with FMC fmc_authenticate() # Create an Access Policy print(blue("\n==> Creating a new Access Policy on FMC")) access_policy = { "type": "AccessPolicy", "name": config['FMC']['POLICY_NAME'], "description":"DevNet Race policy", "defaultAction":{"action": "BLOCK"}, } created_policy = '' policy_exist = '' policycheck = fmc_get("policy/accesspolicies") for item in policycheck['items']: if item['name'] == config['FMC']['POLICY_NAME']: policy_exist = item['name'] policy_id = item['id'] created_policy == fmc_get("policy/accesspolicies")['items']
def actually_resolve_reps(deps, index_lookup, markers_lookup, project, sources, verbose, clear, pre): class PipCommand(pip.basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] for dep in deps: t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1] with open(t, 'w') as f: f.write(dep) if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable(dep[len('-e '):]) else: constraint = [c for c in pip.req.parse_requirements(t, session=pip._vendor.requests)][0] # extra_constraints = [] if ' -i ' in dep: index_lookup[constraint.name] = project.get_source(url=dep.split(' -i ')[1]).get('name') if constraint.markers: markers_lookup[constraint.name] = str(constraint.markers).replace('"', "'") constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if verbose: print('Using pip: {0}'.format(' '.join(pip_args))) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, session=session) if verbose: logging.log.verbose = True resolved_tree = set() resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages try: resolved_tree.update(resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click.echo( '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n ' 'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.' ''.format( crayons.red('Warning', bold=True), crayons.red('$ pipenv install --skip-lock'), crayons.red('$ pipenv graph') ), err=True) click.echo(crayons.blue(e)) if 'no version found at all' in str(e): click.echo(crayons.blue('Please check your version specifier and version number. See PEP440 for more information.')) raise RuntimeError return resolved_tree
return observables # If this script is the "main" script, run... if __name__ == "__main__": # Get the list of events from AMP amp_events = get_amp_events() # Extract the Malware event observables amp_observables = extract_observables(amp_events) # Save the MAC addresses of the endpoints where malware executed to a JSON # file. In the ISE Mission we will read this file and quarantine these # endpoints. mac_addresses_path = repository_root / "mission-data/mac-addresses.json" print(blue(f"\n==> Saving MAC addresses to: {mac_addresses_path}")) with open(mac_addresses_path, "w") as file: mac_addresses = [o["mac_address"] for o in amp_observables] json.dump(mac_addresses, file, indent=2) # Save the malware SHA256 hashes to a JSON file. We will use these in the # ThreatGrid Mission. sha256_list_path = repository_root / "mission-data/sha256-list.json" print(blue(f"\n==> Saving SHA256 hashes to: {sha256_list_path}")) with open(sha256_list_path, "w") as file: sha256_hashes = [o["sha256"] for o in amp_observables] json.dump(sha256_hashes, file, indent=2) print(green("AMP Workflow Step Completed!!!"))
def console_error(bot_info, content): print(crayons.blue(bot_info) + crayons.red(f" {content}"))
default = default_cosmetics() async def reset(client): await client.party.me.set_outfit(default.default_skin) await client.party.me.set_banner(season_level=123) await client.party.me.set_backpack(default.defaultbackpack) await client.party.me.set_pickaxe(default.default_pickaxe) to_run = [] for x in accounts: to_run.append(MyClient(x["email"], x["password"])) print(crayons.green("\n\n██╗ ██╗ ██████╗ ██████╗ ████████╗")) print(crayons.green("╚██╗██╔╝ ██╔══██╗██╔═══██╗╚══██╔══╝")) print(crayons.green(" ╚███╔╝█████╗██████╔╝██║ ██║ ██║ ")) print(crayons.green(" ██╔██╗╚════╝██╔══██╗██║ ██║ ██║ ")) print(crayons.green("██╔╝ ██╗ ██████╔╝╚██████╔╝ ██║ ")) print(crayons.green("╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚═╝ \n")) print(crayons.red("--------------------------------")) print( crayons.blue( f'Fortnite Python bot made by brain and TJ. Version: {version}')) print(crayons.blue('Join the discord: https://discord.gg/JwUgaua')) print(crayons.red("--------------------------------")) loop.run_until_complete(fortnitepy.start_multiple(to_run))
def actually_resolve_deps( deps, index_lookup, markers_lookup, project, sources, clear, pre, req_dir=None, ): from .patched.notpip._internal import basecommand from .patched.notpip._internal.req import parse_requirements from .patched.notpip._internal.exceptions import DistributionNotFound from .patched.notpip._vendor.requests.exceptions import HTTPError from pipenv.patched.piptools.resolver import Resolver from pipenv.patched.piptools.repositories.pypi import PyPIRepository from pipenv.patched.piptools.scripts.compile import get_pip_command from pipenv.patched.piptools import logging as piptools_logging from pipenv.patched.piptools.exceptions import NoCandidateFound from .vendor.requirementslib import Requirement from ._compat import TemporaryDirectory, NamedTemporaryFile class PipCommand(basecommand.Command): """Needed for pip-tools.""" name = "PipCommand" constraints = [] cleanup_req_dir = False if not req_dir: req_dir = TemporaryDirectory(suffix="-requirements", prefix="pipenv-") cleanup_req_dir = True for dep in deps: if not dep: continue url = None if " -i " in dep: dep, url = dep.split(" -i ") req = Requirement.from_line(dep) # extra_constraints = [] if url: index_lookup[req.name] = project.get_source(url=url).get("name") # strip the marker and re-add it later after resolution # but we will need a fallback in case resolution fails # eg pypiwin32 if req.markers: markers_lookup[req.name] = req.markers.replace('"', "'") constraints.append(req.constraint_line) pip_command = get_pip_command() constraints_file = None pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if environments.is_verbose(): print("Using pip: {0}".format(" ".join(pip_args))) with NamedTemporaryFile( mode="w", prefix="pipenv-", suffix="-constraints.txt", dir=req_dir.name, delete=False, ) as f: if sources: requirementstxt_sources = " ".join(pip_args) if pip_args else "" requirementstxt_sources = requirementstxt_sources.replace( " --", "\n--") f.write(u"{0}\n".format(requirementstxt_sources)) f.write(u"\n".join([_constraint for _constraint in constraints])) constraints_file = f.name pip_options, _ = pip_command.parser.parse_args(pip_args) pip_options.cache_dir = PIPENV_CACHE_DIR session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, use_json=False, session=session) constraints = parse_requirements(constraints_file, finder=pypi.finder, session=pypi.session, options=pip_options) constraints = [c for c in constraints] if environments.is_verbose(): logging.log.verbose = True piptools_logging.log.verbose = True resolved_tree = set() resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages hashes = None try: results = resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS) hashes = resolver.resolve_hashes(results) resolved_tree.update(results) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click_echo( "{0}: Your dependencies could not be resolved. You likely have a " "mismatch in your sub-dependencies.\n " "First try clearing your dependency cache with {1}, then try the original command again.\n " "Alternatively, you can use {2} to bypass this mechanism, then run " "{3} to inspect the situation.\n " "Hint: try {4} if it is a pre-release dependency." "".format( crayons.red("Warning", bold=True), crayons.red("$ pipenv lock --clear"), crayons.red("$ pipenv install --skip-lock"), crayons.red("$ pipenv graph"), crayons.red("$ pipenv lock --pre"), ), err=True, ) click_echo(crayons.blue(str(e)), err=True) if "no version found at all" in str(e): click_echo( crayons.blue( "Please check your version specifier and version number. See PEP440 for more information." )) if cleanup_req_dir: req_dir.cleanup() raise RuntimeError if cleanup_req_dir: req_dir.cleanup() return (resolved_tree, hashes, markers_lookup, resolver)
def actually_resolve_deps( deps, index_lookup, markers_lookup, project, sources, verbose, clear, pre, req_dir=None ): from .patched.notpip._internal import basecommand from .patched.notpip._internal.req import parse_requirements from .patched.notpip._internal.exceptions import DistributionNotFound from .patched.notpip._vendor.requests.exceptions import HTTPError from pipenv.patched.piptools.resolver import Resolver from pipenv.patched.piptools.repositories.pypi import PyPIRepository from pipenv.patched.piptools.scripts.compile import get_pip_command from pipenv.patched.piptools import logging as piptools_logging from pipenv.patched.piptools.exceptions import NoCandidateFound from ._compat import TemporaryDirectory, NamedTemporaryFile class PipCommand(basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] cleanup_req_dir = False if not req_dir: req_dir = TemporaryDirectory(suffix='-requirements', prefix='pipenv-') cleanup_req_dir = True for dep in deps: if not dep: continue url = None if ' -i ' in dep: dep, url = dep.split(' -i ') req = Requirement.from_line(dep) # req.as_line() is theoratically the same as dep, but is guarenteed to # be normalized. This is safer than passing in dep. # TODO: Stop passing dep lines around; just use requirement objects. constraints.append(req.as_line()) # extra_constraints = [] if url: index_lookup[req.name] = project.get_source(url=url).get('name') if req.markers: markers_lookup[req.name] = req.markers.replace('"', "'") constraints_file = None pip_command = get_pip_command() pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) with NamedTemporaryFile(mode='w', prefix='pipenv-', suffix='-constraints.txt', dir=req_dir.name, delete=False) as f: if sources: requirementstxt_sources = ' '.join(pip_args).replace(' --', '\n--') f.write(u'{0}\n'.format(requirementstxt_sources)) f.write(u'\n'.join([_constraint for _constraint in constraints])) constraints_file = f.name if verbose: print('Using pip: {0}'.format(' '.join(pip_args))) pip_args = pip_args.extend(['--cache-dir', PIPENV_CACHE_DIR]) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) pypi = PyPIRepository( pip_options=pip_options, use_json=True, session=session ) if verbose: logging.log.verbose = True piptools_logging.log.verbose = True resolved_tree = set() resolver = Resolver( constraints=parse_requirements( constraints_file, finder=pypi.finder, session=pypi.session, options=pip_options, ), repository=pypi, clear_caches=clear, prereleases=pre, ) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages try: resolved_tree.update(resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click_echo( '{0}: Your dependencies could not be resolved. You likely have a ' 'mismatch in your sub-dependencies.\n ' 'You can use {1} to bypass this mechanism, then run {2} to inspect ' 'the situation.\n ' 'Hint: try {3} if it is a pre-release dependency.' ''.format( crayons.red('Warning', bold=True), crayons.red('$ pipenv install --skip-lock'), crayons.red('$ pipenv graph'), crayons.red('$ pipenv lock --pre'), ), err=True, ) click_echo(crayons.blue(str(e)), err=True) if 'no version found at all' in str(e): click_echo( crayons.blue( 'Please check your version specifier and version number. See PEP440 for more information.' ) ) if cleanup_req_dir: req_dir.cleanup() raise RuntimeError if cleanup_req_dir: req_dir.cleanup() return resolved_tree, resolver
def toolboxprint( text, green_words=None, yellow_words=None, red_words=None, cyan_words=None, magenta_words=None, bold_words=None, ): """Prints a string with fancy colourization for common items like numbers.""" s = [] text = str(text) ts = text numbers = get_numbers(text) emails = get_email_addresses(text) gw = green_words if green_words is not None else [] yw = yellow_words if yellow_words is not None else [] rw = red_words if red_words is not None else [] cw = cyan_words if cyan_words is not None else [] mw = magenta_words if magenta_words is not None else [] bw = bold_words if bold_words is not None else [] text = str(text) words = text.split() n = len(words) next_is_currency = False replace_dollars = False for i, t in enumerate(words): if t[0] == "$" and len(t) > 1: ts = ts.replace(t, str(crayons.green(str(t), bold=True))) elif (i + 1) < n and t == "$": if words[i + 1] in numbers: replace_dollars = True next_is_currency = True elif t in numbers: if next_is_currency: ts = ts.replace(t, str(crayons.green(str(t), bold=True))) next_is_currency = False else: # guard against replacing substrings of numbers by checking for # delimiting whitespace if i == 0: ts = ts.replace( "%s " % (t), "%s " % (str(crayons.cyan(str(t), bold=True)))) elif i == n - 1: ts = ts.replace( " %s" % (t), " %s" % (str(crayons.cyan(str(t), bold=True)))) else: s1 = " %s " % (t) s2 = " %s " % (str(crayons.cyan(str(t), bold=True))) ts = ts.replace(s1, s2) elif t.startswith("0x"): ts = ts.replace(t, str(crayons.magenta(str(t), bold=True))) elif "%" in t: ts = ts.replace(t, str(crayons.cyan(str(t), bold=True))) elif t in emails: ts = ts.replace(t, str(crayons.blue(str(t), bold=True))) elif t in gw: ts = ts.replace(t, str(crayons.green(str(t), bold=False))) elif t in yw: ts = ts.replace(t, str(crayons.yellow(str(t), bold=False))) elif t in rw: ts = ts.replace(t, str(crayons.red(str(t), bold=False))) elif t in cw: ts = ts.replace(t, str(crayons.cyan(str(t), bold=False))) elif t in mw: ts = ts.replace(t, str(crayons.magenta(str(t), bold=False))) elif t in bw: ts = ts.replace(t, str(crayons.white(str(t), bold=True))) if replace_dollars: ds = str(crayons.green(str("$"), bold=True)) ts = ts.replace("$ ", ds) print(ts)
#!/usr/bin/env python3 import crayons # print 'red string' in red print(crayons.red('red string')) # Red White and Blue text print('{} white {}'.format(crayons.red('red'), crayons.blue('blue'))) crayons.disable() # disables the crayons package print('{} white {}'.format(crayons.red('red'), crayons.blue('blue'))) crayons.DISABLE_COLOR = False # enable the crayons package # THis line will print in color because color is enabled print('{} white {}'.format(crayons.red('red'), crayons.blue('blue'))) # print 'red string' in red print(crayons.red('red string', bold=True)) # print 'yellow string' in yellow print(crayons.yellow('yellow string', bold=True)) # print 'magenta string' in magenta print(crayons.magenta('magenta string', bold=True)) # print 'white string' in white print(crayons.white('white string', bold=True))
def publish(project_file: str, packages_path: str, keep_environment: bool = False, bundles: list = None, show_result: bool = False, rebuild: bool = False, env_name: str = None, force: bool = False): project_file = os.path.realpath(project_file) packages_path = os.path.realpath(packages_path) if packages_path else None env = context.get_env(env_name) if not env.token: raise Exception('You must authenticate first') if not os.path.isfile(project_file): raise Exception( 'File {0} not found or not a valid file'.format(project_file)) if packages_path: if not os.path.isdir(packages_path): raise Exception( 'Packages path {0} not found'.format(packages_path)) else: packages_path = os.path.dirname(project_file) project = os.path.split(project_file)[1] if not force and not click.confirm( 'Project {0} will be compiled and published in [{1}], ' 'do you confirm?'.format(crayons.green(project), crayons.green(env.name))): raise Exception('Cancelled') process = read_json_file(project_file) # setting up process data if keep_environment: process['keep_environment'] = True process['rebuild'] = rebuild buildroot_pkg = process.pop('buildroot_pkg', None) mk_filename = '{0}.mk'.format(buildroot_pkg) if not buildroot_pkg: raise Exception('Missing buildroot_pkg in project file') if bundles: process['bundles'] = [ b for b in process['bundles'] if b['name'] in bundles ] if not process['bundles']: raise Exception( 'Could not match any bundle from: {0}'.format(bundles)) # find buildroot_pkg under packages_path mk_path = next( (i[0] for i in os.walk(packages_path) if mk_filename in i[2]), None) if not mk_path: raise Exception( 'Could not find buildroot mk file for package {0} in {1}'.format( buildroot_pkg, packages_path)) basename = os.path.basename(mk_path) if basename != buildroot_pkg: raise Exception( 'The package folder containing the .mk file has to be named {0}'. format(buildroot_pkg)) pkg_path = os.path.dirname(mk_path) work_dir = tempfile.mkdtemp() try: package = '{0}.tar.gz'.format(buildroot_pkg) source_path = os.path.join(work_dir, package) try: subprocess.check_output( ['tar', 'zhcf', source_path, buildroot_pkg], stderr=subprocess.STDOUT, cwd=os.path.join(pkg_path)) except subprocess.CalledProcessError as ex: raise Exception(ex.output.decode()) click.echo( 'Submitting release process for project {0} using file {1}'.format( project_file, package)) click.echo('URL: {0}'.format(env.bundle_url)) headers = {'Authorization': 'MOD {0}'.format(env.token)} result = requests.post('{0}/'.format(env.bundle_url), json=process, headers=headers) if result.status_code == 401: raise Exception( 'Invalid token - please authenticate (see \'modcli auth\')') elif result.status_code != 200: raise Exception('Error: {0}'.format(result.text)) release_process = result.json() click.echo('Release process created: {0}'.format( release_process['id'])) click.echo('Uploading buildroot package {0} ...'.format(package)) with open(source_path, 'rb') as fh: data = fh.read() headers = {'Content-Type': 'application/octet-stream'} result = requests.post(release_process['source-href'], data=data, headers=headers) if result.status_code == 401: raise Exception( 'Invalid token - please authenticate (see \'modcli auth\')') elif result.status_code != 201: raise Exception('Error: {0}'.format(result.text)) checksum = result.text.lstrip('"').rstrip('"') result_checksum = md5(data).hexdigest() if checksum == result_checksum: click.echo('Checksum match ok!') else: raise Exception('Checksum mismatch: {0} <> {1}'.format( checksum, result_checksum)) finally: click.echo('Cleaning up...') shutil.rmtree(work_dir, ignore_errors=True) release_process_url = release_process['href'] click.echo( crayons.blue( 'Process url: {0}?pretty=true'.format(release_process_url))) click.echo(crayons.green('Done')) if show_result: click.echo('Retrieving release process from {0} ...'.format( release_process_url)) release_process_full = requests.get( '{0}?pretty=true'.format(release_process_url)).text click.echo( crayons.blue( '================ Release Process {0} ================'.format( release_process['id']))) click.echo(release_process_full) click.echo( crayons.blue( '================ End Release Process ================'))
def print_game(self, player, game_map): border = u"\u25A0" b = player.get_position() map_size = game_map.get_size() spacer_lines = (self.height - (map_size[0] + 2)) // 2 spacer_columns = (self.width - ((map_size[1] + 2) * 2)) // 2 # Before Spacer self.__spacer(spacer_lines) # Spacer for centering map border top for m in range(spacer_columns): print(' ', end='') # Border On Line for l in range(map_size[1] + 2): print(crayons.blue(border), end=' ') print() for j in range(map_size[0]): # Spacer for centering map for m in range(spacer_columns): print(' ', end='') # Start border for each line print(crayons.blue(border), end=' ') # Display Map for k in range(map_size[1]): if k == b[0] and j == b[1]: print(crayons.red("P"), end=' ') else: if game_map[j][k].seen_status(): if not game_map[j][k].has_item('jewels'): if game_map[j][k].get_color() == 'red': print(crayons.red(game_map[j][k].get_icon()), end=' ') elif game_map[j][k].get_color() == 'green': print(crayons.green(game_map[j][k]).get_icon(), end=' ') elif game_map[j][k].get_color() == 'yellow': print(crayons.yellow( game_map[j][k]).get_icon(), end=' ') elif game_map[j][k].get_color() == 'blue': print(crayons.blue(game_map[j][k]).get_icon(), end=' ') elif game_map[j][k].get_color() == 'black': print(crayons.black(game_map[j][k]).get_icon(), end=' ') elif game_map[j][k].get_color() == 'magenta': print(crayons.magenta( game_map[j][k]).get_icon(), end=' ') elif game_map[j][k].get_color() == 'cyan': print(crayons.cyan(game_map[j][k]).get_icon(), end=' ') elif game_map[j][k].get_color() == 'white': print(crayons.white(game_map[j][k]).get_icon(), end=' ') else: print(crayons.green(game_map[j][k]).get_icon(), end=' ') else: print(crayons.cyan("J"), end=' ') else: if system() == "Windows": print('X', end=' ') else: print(u"\u25A0", end=' ') # End border for each line print(crayons.blue(border)) # Spacer for centering map border bottom for m in range(spacer_columns): print(' ', end='') # Border After End Line of Map for l in range(map_size[1] + 2): print(crayons.blue(border), end=' ') print() print() print("(Press K) Keybindings!".center(self.width - 1)) self.__spacer(spacer_lines - 2) print(game_map[b[1]][b[0]].print_tile(player.inventory).center( self.width)) self.print_stats(player)
def resolve_deps(deps, which, which_pip, project, sources=None, verbose=False, python=False, clear=False, pre=False): """Given a list of dependencies, return a resolved list of dependencies, using pip-tools -- and their hashes, using the warehouse API / pip. """ index_lookup = {} markers_lookup = {} python_path = which('python') with HackedPythonVersion(python_version=python, python_path=python_path): class PipCommand(pip.basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] for dep in deps: t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt')[1] with open(t, 'w') as f: f.write(dep) if dep.startswith('-e '): constraint = pip.req.InstallRequirement.from_editable( dep[len('-e '):]) else: constraint = [ c for c in pip.req.parse_requirements( t, session=pip._vendor.requests) ][0] # extra_constraints = [] if ' -i ' in dep: index_lookup[constraint.name] = project.get_source( url=dep.split(' -i ')[1]).get('name') if constraint.markers: markers_lookup[constraint.name] = str( constraint.markers).replace('"', "'") constraints.append(constraint) pip_command = get_pip_command() pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if verbose: print('Using pip: {0}'.format(' '.join(pip_args))) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, session=session) if verbose: logging.log.verbose = True results = [] resolved_tree = set() resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages try: resolved_tree.update( resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click.echo( '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n ' 'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.' ''.format(crayons.red('Warning', bold=True), crayons.red('$ pipenv install --skip-lock'), crayons.red('$ pipenv graph')), err=True) click.echo(crayons.blue(e)) sys.exit(1) for result in resolved_tree: if not result.editable: name = pep423_name(result.name) version = clean_pkg_version(result.specifier) index = index_lookup.get(result.name) if not markers_lookup.get(result.name): markers = str( result.markers) if result.markers and 'extra' not in str( result.markers) else None else: markers = markers_lookup.get(result.name) collected_hashes = [] if 'python.org' in '|'.join([source['url'] for source in sources]): try: # Grab the hashes from the new warehouse API. r = requests.get( 'https://pypi.org/pypi/{0}/json'.format(name)) api_releases = r.json()['releases'] cleaned_releases = {} for api_version, api_info in api_releases.items(): cleaned_releases[clean_pkg_version( api_version)] = api_info for release in cleaned_releases[version]: collected_hashes.append(release['digests']['sha256']) collected_hashes = [ 'sha256:' + s for s in collected_hashes ] # Collect un-collectable hashes. if not collected_hashes: collected_hashes = list( list(resolver.resolve_hashes([result ]).items())[0][1]) except (ValueError, KeyError): pass d = {'name': name, 'version': version, 'hashes': collected_hashes} if index: d.update({'index': index}) if markers: d.update({'markers': markers.replace('"', "'")}) results.append(d) return results
headers=headers) if (response.status_code == 204): print( "Done!..Applied Quarantine policy to the rogue endpoint...MAC: {0} Threat is now contained...." .format(items)) else: print("An error has ocurred with the following code %(error)s" % {'error': response.status_code}) if __name__ == "__main__": maclist_path = repository_root / "mission-data/mac-addresses.json" maclist = readmacaddr_file(maclist_path) #TODO Call the function for getting ANC policy and store it in the policylist variable env_lab.print_missing_mission_warn(env_lab.get_line()) policylist = MISSION #TODO call the function for applying policy to the endpoints env_lab.print_missing_mission_warn(env_lab.get_line()) # # Finally, post a message to the Webex Teams Room to brag!!! print(blue("\n==> Posting message to Webex Teams")) teams = webexteamssdk.WebexTeamsAPI(env_user.WEBEX_TEAMS_ACCESS_TOKEN) teams.messages.create( roomId=env_user.WEBEX_TEAMS_ROOM_ID, markdown=f"**ISE Mission completed!!!** \n\n" f"I have applied quarantine policy to the rogue endpoints! \n\n") print(green("ISE Mission Completed!!!"))
def blue(string, **kwargs): return crayons.blue(string)
f"https://{host}/api/v2/iocs/feeds/domains", params=query_parameters, ) response.raise_for_status() samples = response.json()["data"]["items"] print(green(f"Successfully retrieved data on " f"{len(samples)} malware samples")) return samples # If this script is the "main" script, run... if __name__ == "__main__": if len(sys.argv) == 2: _, query_domain = sys.argv else: print(f"{white('Usage:', bold=True)} {Path(__file__).name} DOMAIN") sys.exit(1) query_start = datetime.utcnow() - timedelta(days=TIME_PERIOD) query_end = datetime.utcnow() #TODO Call the function for query feed. malware_samples = malware_samples_path = here / f"{query_domain}-malware-samples-data.json" print(blue(f"\n==> Saving samples data to: {malware_samples_path}")) #TODO: Pass the proper path to the open function with open(malware_samples_path, "w") as file: json.dump(malware_samples, file, indent=2)
def console_success(bot_info, content): print(crayons.blue(bot_info) + crayons.green(f" {content}"))
def check(): click.echo(crayons.yellow('Checking PEP 508 requirements...')) # Run the PEP 508 checker in the virtualenv. c = delegator.run('{0} {1}'.format(which('python'), pep508checker.__file__.rstrip('cdo'))) results = json.loads(c.out) # Load the pipfile. p = pipfile.Pipfile.load(project.pipfile_location) # Assert each specified requirement. for marker, specifier in p.data['_meta']['requires'].items(): if marker in results: try: assert results[marker] == specifier except AssertionError: click.echo('Specifier {0} does not match {1}.'.format(crayons.red(marker), crayons.blue(specifier))) sys.exit(1) click.echo('Passed!')
def uninstall(package_name=False, more_packages=False, three=None, python=False, system=False, lock=False, dev=False, all=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python) package_names = (package_name, ) + more_packages pipfile_remove = True # Un-install all dependencies, if --all was provided. if all is True: if not dev: click.echo( crayons.yellow( 'Un-installing all packages from virtualenv...')) do_purge(allow_global=system) sys.exit(0) # Uninstall [dev-packages], if --dev was provided. if dev: if 'dev-packages' in project.parsed_pipfile: click.echo( crayons.yellow('Un-installing {0}...'.format( crayons.red('[dev-packages]')))) package_names = project.parsed_pipfile['dev-packages'] pipfile_remove = False else: click.echo( crayons.yellow('No {0} to uninstall.'.format( crayons.red('[dev-packages]')))) sys.exit(0) if package_name is False and not dev: click.echo(crayons.red('No package provided!')) sys.exit(1) for package_name in package_names: click.echo('Un-installing {0}...'.format(crayons.green(package_name))) c = delegator.run('{0} uninstall {1} -y'.format( which_pip(allow_global=system), package_name)) click.echo(crayons.blue(c.out)) if pipfile_remove: if dev: click.echo('Removing {0} from Pipfile\'s {1}...'.format( crayons.green(package_name), crayons.red('[dev-packages]'))) else: click.echo('Removing {0} from Pipfile\'s {1}...'.format( crayons.green(package_name), crayons.red('[packages]'))) project.remove_package_from_pipfile(package_name, dev) if lock: do_lock()
from process_db import generate_exchange_tree from prediction import generate_predictions_tree from search_problem import ArbitrageSearchProblem from bfs import arbitrageBFS if __name__ == '__main__': """Main CLI entrypoint""" arguments = docopt(__doc__, version='Alpha') pricef = arguments['FILE'] verbose = arguments['--verbose'] figures = arguments['--figures'] print('\n{} {} {}\n'.format( crayons.blue('Welcome to yaba (Yet Another Bitcoin Arbitrage).'), crayons.green('CA$H and CO1NZ', bold=True), crayons.blue('will be flowing shortly.'))) print(crayons.yellow('loading prices database {}...'.format(pricef))) conn = None try: conn = sqlite3.connect(pricef) except Exception as e: print( crayons.red( 'failed to load sqlite3 database file {}!'.format(pricef))) print(crayons.red(e)) print(crayons.red('exiting...')) sys.exit(1)
def print_blue(txt, bold=False): print(crayons.blue(txt, bold)) return None
def install(package_name=False, more_packages=False, dev=False, three=False, python=False, system=False, lock=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python) # Allow more than one package to be provided. package_names = (package_name, ) + more_packages # Install all dependencies, if none was provided. if package_name is False: click.echo(crayons.yellow( 'No package provided, installing all dependencies.'), err=True) do_init(dev=dev, allow_global=system) sys.exit(0) for package_name in package_names: # Proper-case incoming package name (check against API). old_name = [k for k in convert_deps_from_pip(package_name).keys()][0] try: new_name = proper_case(old_name) except IOError as e: click.echo('{0} {1}'.format(crayons.red('Error: '), e.args[0], crayons.green(package_name))) continue package_name = package_name.replace(old_name, new_name) click.echo('Installing {0}...'.format(crayons.green(package_name))) # pip install: with spinner(): c = pip_install(package_name, allow_global=system) click.echo(crayons.blue(format_pip_output(c.out))) # TODO: This # Ensure that package was successfully installed. try: assert c.return_code == 0 except AssertionError: click.echo('{0} An error occurred while installing {1}!'.format( crayons.red('Error: '), crayons.green(package_name))) click.echo(crayons.blue(format_pip_error(c.err))) sys.exit(1) if dev: click.echo('Adding {0} to Pipfile\'s {1}...'.format( crayons.green(package_name), crayons.red('[dev-packages]'))) else: click.echo('Adding {0} to Pipfile\'s {1}...'.format( crayons.green(package_name), crayons.red('[packages]'))) # Add the package to the Pipfile. project.add_package_to_pipfile(package_name, dev) # Ego boost. easter_egg(package_name) if lock: do_lock()
def install(package_name=False, more_packages=False, dev=False, three=False, python=False, system=False, lock=False, no_hashes=False, ignore_hashes=False, ignore_pipfile=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python) # Allow more than one package to be provided. package_names = (package_name, ) + more_packages # Install all dependencies, if none was provided. if package_name is False: click.echo(crayons.yellow( 'No package provided, installing all dependencies.'), err=True) do_init(dev=dev, allow_global=system, ignore_hashes=ignore_hashes, ignore_pipfile=ignore_pipfile) sys.exit(0) for package_name in package_names: click.echo('Installing {0}...'.format(crayons.green(package_name))) # pip install: with spinner(): c = pip_install(package_name, ignore_hashes=True, allow_global=system) click.echo(crayons.blue(format_pip_output(c.out))) # TODO: This # Ensure that package was successfully installed. try: assert c.return_code == 0 except AssertionError: click.echo('{0} An error occurred while installing {1}!'.format( crayons.red('Error: '), crayons.green(package_name))) click.echo(crayons.blue(format_pip_error(c.err))) sys.exit(1) if dev: click.echo('Adding {0} to Pipfile\'s {1}...'.format( crayons.green(package_name), crayons.red('[dev-packages]'))) else: click.echo('Adding {0} to Pipfile\'s {1}...'.format( crayons.green(package_name), crayons.red('[packages]'))) # Add the package to the Pipfile. try: project.add_package_to_pipfile(package_name, dev) except ValueError as e: click.echo('{0} {1}'.format( crayons.red('ERROR (PACKAGE NOT INSTALLED):'), e)) # Ego boost. easter_egg(package_name) if lock: do_lock(no_hashes=no_hashes)
def actually_resolve_deps( deps, index_lookup, markers_lookup, project, sources, verbose, clear, pre, req_dir=None, ): from .vendor.packaging.markers import default_environment from .patched.notpip._internal import basecommand from .patched.notpip._internal.cmdoptions import no_binary, only_binary from .patched.notpip._internal.req import parse_requirements from .patched.notpip._internal.exceptions import DistributionNotFound from .patched.notpip._vendor.requests.exceptions import HTTPError from pipenv.patched.piptools.resolver import Resolver from pipenv.patched.piptools.repositories.pypi import PyPIRepository from pipenv.patched.piptools.scripts.compile import get_pip_command from pipenv.patched.piptools import logging as piptools_logging from pipenv.patched.piptools.exceptions import NoCandidateFound from .vendor.requirementslib import Requirement from ._compat import TemporaryDirectory, NamedTemporaryFile class PipCommand(basecommand.Command): """Needed for pip-tools.""" name = "PipCommand" constraints = [] cleanup_req_dir = False if not req_dir: req_dir = TemporaryDirectory(suffix="-requirements", prefix="pipenv-") cleanup_req_dir = True for dep in deps: if not dep: continue url = None if " -i " in dep: dep, url = dep.split(" -i ") req = Requirement.from_line(dep) # extra_constraints = [] if url: index_lookup[req.name] = project.get_source(url=url).get("name") # strip the marker and re-add it later after resolution # but we will need a fallback in case resolution fails # eg pypiwin32 if req.markers: markers_lookup[req.name] = req.markers.replace('"', "'") constraints.append(req.constraint_line) pip_command = get_pip_command() constraints_file = None pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if verbose: print("Using pip: {0}".format(" ".join(pip_args))) with NamedTemporaryFile( mode="w", prefix="pipenv-", suffix="-constraints.txt", dir=req_dir.name, delete=False, ) as f: if sources: requirementstxt_sources = " ".join(pip_args) if pip_args else "" requirementstxt_sources = requirementstxt_sources.replace(" --", "\n--") f.write(u"{0}\n".format(requirementstxt_sources)) f.write(u"\n".join([_constraint for _constraint in constraints])) constraints_file = f.name pip_options, _ = pip_command.parser.parse_args(pip_args) pip_options.cache_dir = PIPENV_CACHE_DIR session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, use_json=False, session=session) constraints = parse_requirements( constraints_file, finder=pypi.finder, session=pypi.session, options=pip_options ) constraints = [c for c in constraints] if verbose: logging.log.verbose = True piptools_logging.log.verbose = True resolved_tree = set() resolver = Resolver( constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre ) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages hashes = None try: results = resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS) hashes = resolver.resolve_hashes(results) resolved_tree.update(results) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click_echo( "{0}: Your dependencies could not be resolved. You likely have a " "mismatch in your sub-dependencies.\n " "You can use {1} to bypass this mechanism, then run {2} to inspect " "the situation.\n " "Hint: try {3} if it is a pre-release dependency." "".format( crayons.red("Warning", bold=True), crayons.red("$ pipenv install --skip-lock"), crayons.red("$ pipenv graph"), crayons.red("$ pipenv lock --pre"), ), err=True, ) click_echo(crayons.blue(str(e)), err=True) if "no version found at all" in str(e): click_echo( crayons.blue( "Please check your version specifier and version number. See PEP440 for more information." ) ) if cleanup_req_dir: req_dir.cleanup() raise RuntimeError if cleanup_req_dir: req_dir.cleanup() return (resolved_tree, hashes, markers_lookup, resolver)
def do_install_dependencies(dev=False, only=False, bare=False, requirements=False, allow_global=False, ignore_hashes=False): """"Executes the install functionality.""" if requirements: bare = True # Load the lockfile if it exists, or if only is being used (e.g. lock is being used). if only or not project.lockfile_exists: if not bare: click.echo( crayons.yellow('Installing dependencies from Pipfile...')) lockfile = split_vcs(project._lockfile) else: if not bare: click.echo( crayons.yellow('Installing dependencies from Pipfile.lock...')) with open(project.lockfile_location) as f: lockfile = split_vcs(json.load(f)) # Install default dependencies, always. deps = lockfile['default'] if not only else {} vcs_deps = lockfile.get('default-vcs', {}) # Add development deps if --dev was passed. if dev: deps.update(lockfile['develop']) vcs_deps.update(lockfile.get('develop-vcs', {})) if ignore_hashes: # Remove hashes from generated requirements. for k, v in deps.items(): if 'hash' in v: del v['hash'] # Convert the deps to pip-compatible arguments. hashed_deps_path = convert_deps_to_pip(deps) vcs_deps_path = convert_deps_to_pip(vcs_deps) # --requirements was passed. if requirements: with open(hashed_deps_path) as f: click.echo(f.read()) with open(vcs_deps_path) as f: click.echo(f.read()) sys.exit(0) # pip install: with spinner(): c = pip_install(r=hashed_deps_path, ignore_hashes=ignore_hashes, allow_global=allow_global) if c.return_code != 0: click.echo(crayons.red('An error occured while installing!')) click.echo(crayons.blue(format_pip_error(c.err))) click.echo( crayons.yellow( 'You can supply the --ignore-hashes option to \'pipenv install\' to bypass this feature.' )) sys.exit(c.return_code) if not bare: click.echo(crayons.blue(format_pip_output(c.out, r=hashed_deps_path))) with spinner(): c = pip_install(r=vcs_deps_path, ignore_hashes=True, allow_global=allow_global) if c.return_code != 0: click.echo(crayons.red('An error occured while installing!')) click.echo(crayons.blue(format_pip_error(c.err))) click.echo( crayons.yellow( 'You can supply the --ignore-hashes option to pip install to bypass this feature.' )) sys.exit(c.return_code) if not bare: click.echo(crayons.blue(format_pip_output(c.out, r=vcs_deps_path))) # Cleanup the temp requirements file. if requirements: os.remove(hashed_deps_path) os.remove(vcs_deps_path)
f"Received formatted list of observables. Total observables: {len(ctr_observables)}" )) # MISSION14: Pass to the function properly formatted observables obtained in Step 7. # Hint: Check the function and put correct variable there too. env.print_missing_mission_warn( env.get_line()) # Delete this line when mission is complete. ctr_intel = ctr_enrich_observe(ctr_access_token, 'MISSION14') print(green(f"Received Sightings")) report_time = datetime.now().isoformat() ctr_report_path = here / f"ctr_report_{report_time}.json" print( blue( f"\n==> Found indicators and sightings. Saving info to: {ctr_report_path}" )) with open(ctr_report_path, "w") as file: json.dump(ctr_intel, file, indent=2) ctr_enrich_print_scr_report(ctr_intel) """ Step 8. Use response capabilities of AMP for Endpoints module in CTR to block this malicious file from execution on all Computers in our network. """ print(white("\nStep 8")) # MISSION15: assign function output to correct variable and pass it to function ctr_add_to_amp_scd to perform necessary action. # Hint: make sure to pass this variable to the function in validation section too! env.print_missing_mission_warn( env.get_line()) # Delete this line when mission is complete.
def ctr_enrich_print_scr_report(intel): print(white("\n==> Here is what CTR has found:")) for module in intel: print( white( f"\n==> Module: {module['module']} : {module['module_type_id']}" )) if module["data"]: if module["module"] == "AMP EDR": print( blue( f"\n ==> Count of Indicators: {module['data']['indicators']['count']} " )) for indicator in module["data"]["indicators"]["docs"]: print( blue( f" ==> {indicator['description']} : {indicator['tags']}" )) print( blue( f"\n ==> Count of Sightings: {module['data']['sightings']['count']} " )) sighting = module['data']['sightings']['docs'][0] print( blue( f" ==> Most recent sighting: {sighting['description']}" )) if sighting["targets"]: print( blue( f"\n ==> Targets found: {len(sighting['targets'])}" )) target = sighting["targets"][0] print( blue( f" ==> Most recent target: {target['type']} observed: {target['observed_time']['start_time']}" )) for observable in target["observables"]: print( blue( f" ==> Target {observable['type']} : {observable['value']}" )) print(blue(f" ==> Target OS: {target['os']}")) elif module["module"] == "AMP File Reputation": for key in module["data"].keys(): print( blue( f" ==> Count of {key}: {module['data'][key]['count']}" )) elif module["module"] == "AMP Global Intelligence": for key in module["data"].keys(): print( blue( f" ==> Count of {key}: {module['data'][key]['count']}" )) else: print(blue("\n==> DO DATA"))
def actually_resolve_reps(deps, index_lookup, markers_lookup, project, sources, verbose, clear, pre): from pip9 import basecommand, req from pip9._vendor import requests as pip_requests class PipCommand(basecommand.Command): """Needed for pip-tools.""" name = 'PipCommand' constraints = [] req_dir = tempfile.mkdtemp(prefix='pipenv-', suffix='-requirements') for dep in deps: if dep: if dep.startswith('-e '): constraint = req.InstallRequirement.from_editable(dep[len('-e '):]) else: fd, t = tempfile.mkstemp(prefix='pipenv-', suffix='-requirement.txt', dir=req_dir) with os.fdopen(fd, 'w') as f: f.write(dep) constraint = [c for c in req.parse_requirements(t, session=pip_requests)][0] # extra_constraints = [] if ' -i ' in dep: index_lookup[constraint.name] = project.get_source(url=dep.split(' -i ')[1]).get('name') if constraint.markers: markers_lookup[constraint.name] = str(constraint.markers).replace('"', "'") constraints.append(constraint) rmtree(req_dir) pip_command = get_pip_command() pip_args = [] if sources: pip_args = prepare_pip_source_args(sources, pip_args) if verbose: print('Using pip: {0}'.format(' '.join(pip_args))) pip_options, _ = pip_command.parse_args(pip_args) session = pip_command._build_session(pip_options) pypi = PyPIRepository(pip_options=pip_options, use_json=True, session=session) if verbose: logging.log.verbose = True piptools_logging.log.verbose = True resolved_tree = set() resolver = Resolver(constraints=constraints, repository=pypi, clear_caches=clear, prereleases=pre) # pre-resolve instead of iterating to avoid asking pypi for hashes of editable packages try: resolved_tree.update(resolver.resolve(max_rounds=PIPENV_MAX_ROUNDS)) except (NoCandidateFound, DistributionNotFound, HTTPError) as e: click.echo( '{0}: Your dependencies could not be resolved. You likely have a mismatch in your sub-dependencies.\n ' 'You can use {1} to bypass this mechanism, then run {2} to inspect the situation.' ''.format( crayons.red('Warning', bold=True), crayons.red('$ pipenv install --skip-lock'), crayons.red('$ pipenv graph') ), err=True) click.echo(crayons.blue(str(e)), err=True) if 'no version found at all' in str(e): click.echo(crayons.blue('Please check your version specifier and version number. See PEP440 for more information.')) raise RuntimeError return resolved_tree, resolver
"""A program that calls functions from Python stdlib and 3rd-party libs. Since we've excluded events from files in installation path, we shouldn't receive any events from stdlib or 3rd-party libs. Neither will C calls since they are not catched by settrace (https://stackoverflow.com/q/16115027/2142577). """ from collections import Counter import crayons import cyberbrain cyberbrain.init() c = Counter() c["red"] += 1 c["blue"] += 1 c["red"] += 1 c.most_common(10) crayons.blue("blue") cyberbrain.register(c)
#!/usr/bin/env python3 import crayons print(crayons.magenta('Wael')) print(crayons.blue('ElJarrah'))
def check(three=None, python=False): # Ensure that virtualenv is available. ensure_project(three=three, python=python, validate=False) click.echo(crayons.yellow('Checking PEP 508 requirements...')) # Run the PEP 508 checker in the virtualenv. c = delegator.run('{0} {1}'.format(which('python'), pep508checker.__file__.rstrip('cdo'))) results = json.loads(c.out) # Load the pipfile. p = pipfile.Pipfile.load(project.pipfile_location) failed = False # Assert each specified requirement. for marker, specifier in p.data['_meta']['requires'].items(): if marker in results: try: assert results[marker] == specifier except AssertionError: failed = True click.echo('Specifier {0} does not match {1} ({2}).'.format(crayons.green(marker), crayons.blue(specifier), crayons.red(results[marker]))) if failed: click.echo(crayons.red('Failed!')) sys.exit(1) else: click.echo(crayons.green('Passed!'))
# If this script is the "main" script, run... if __name__ == "__main__": #TODO: Use the right function to fill the amp_events variable with the AMP events env_lab.print_missing_mission_warn(env_lab.get_line()) #TODO: Use the right function to fill the amp_observables variable with extracted observables from the AMP events env_lab.print_missing_mission_warn(env_lab.get_line()) # Save the MAC addresses of the endpoints where malware executed to a JSON # file. In the ISE Mission we will read this file and quarantine these # endpoints. mac_addresses_path = repository_root / "mission-data/mac-addresses.json" print(blue(f"\n==> Saving MAC addresses to: {mac_addresses_path}")) with open(mac_addresses_path, "w") as file: mac_addresses = [o["mac_address"] for o in amp_observables] json.dump(mac_addresses, file, indent=2) # Save the malware SHA256 hashes to a JSON file. We will use these in the # ThreatGrid Mission. sha256_list_path = repository_root / "mission-data/sha256-list.json" print(blue(f"\n==> Saving SHA256 hashes to: {sha256_list_path}")) #TODO: open a file and write to it, similar to the code in lines 160-162. However, this time use the sha256_list_path, and instead of mac addresses. use the sha256 hashes. (Tip: try to print amp_observables, so that you know what to search for.) env_lab.print_missing_mission_warn(env_lab.get_line()) # Finally, post a message to the Webex Teams Room to brag!!! print(blue("\n==> Posting message to Webex Teams"))
def display_loop_prompt(): return input(crayons.cyan(":") + crayons.blue(">") + " ").lower().split()
except: print("Using Master") changed_files = check_output("git --no-pager diff --name-status HEAD", shell=True) for line in changed_files.decode('utf-8').rstrip().split('\n'): tool_failed = False change = line.split('\t')[0] if change not in ['M', 'A', 'C', 'R', 'X']: continue fs = line.split('\t')[1] if not fs.lower().endswith(".cwl"): continue print(crayons.blue(f"Testing CWL Validation: {fs} \n")) file_validation_status = check_call(f"cwltool --validate {fs}", shell=True) if file_validation_status != 0: print(f'Tool Failed Validation: {fs}') tool_failed = True if file_validation_status == 0: print(crayons.green(f"Tool Passed Validation: {fs}\n")) print(crayons.blue(f"Testing Repo Requirements\n")) with open(fs) as f: for index, line in enumerate(f.readlines()): line = line.rstrip() if index == 3: break if index == 0: