def execute(self, args): super().execute(args) self.git = Executable('git') for filename in self.get_all_filenames(): if os.path.splitext(filename)[1] not in ['.mo', '.png', '.jpg']: self.process_file(filename, args.copyright_holder_string) return 0
class SyncFiles(Command): """Uses the sitecopy program to upload / sync static files to a remote webserver.""" keyword = 'syncfiles' def get_site_name(self): if not os.path.isfile('sitecopy.rc'): raise DomainException(message='Could not find a sitecopy config file "sitecopy.rc" in the current directory') with open('sitecopy.rc') as config_file: for line in config_file: match = re.match('.*(?!#).*site\s+(\S+)', line) if match: return match.group(1) raise DomainException(message='Cannot find the site name in sitecopy.rc') def assemble(self): self.parser.add_argument('-f', '--fetch-first', action='store_true', dest='fetch_first', help='first fetch data from the site to ensure we are in sync') self.parser.add_argument('site_name', default=None, nargs='?', help='the name of the site as listed in sitecopy.rc') def execute(self, args): super().execute(args) self.sitecopy = Executable('sitecopy') try: site_name = args.site_name or self.get_site_name() local_info_file = os.path.expanduser(os.path.join('~', '.sitecopy', site_name)) if args.fetch_first or not os.path.exists(local_info_file): if os.path.exists(local_info_file): os.remove(local_info_file) self.sitecopy.check_call('-r sitecopy.rc --fetch'.split()+[site_name]) self.sitecopy.check_call('-r sitecopy.rc --update'.split()+[site_name]) except subprocess.CalledProcessError as ex: raise DomainException(message='Running "%s" failed with exit code %s' % (' '.join(ex.cmd), ex.returncode))
def execute(self, args): super(NgrokStart, self).execute(args) env_with_path = dict(os.environ) env_with_path['PATH'] += '%s%s' % (os.pathsep, os.path.expanduser(args.path)) if args.named_tunnel: return Executable('ngrok', verbose=True).check_call( ['start', '--region=%s' % args.region] + args.named_tunnel, env=env_with_path) else: if args.vagrant: vagrant_ssh_config = VagrantMachine( args.vagrant).get_ssh_config() hostname = vagrant_ssh_config['HostName'] port = vagrant_ssh_config['Port'] elif args.ssh: hostname = args.ssh port = args.port else: hostname = None port = args.port hostname_port = ':'.join([i for i in [hostname, port] if i]) return Executable('ngrok', verbose=True).check_call( ['tcp', '--region=%s' % args.region, hostname_port], env=env_with_path)
class UpdateCopyright(Command): """Updates the dates found in copyright statements found on the first line of files based on git history.""" keyword = 'update-copyright' def assemble(self): super().assemble() self.parser.add_argument( 'copyright_holder_string', nargs='?', default='.*', help= 'requires this copyright holder to be present in a copyright line (by default any comment line with Copyright is matched)' ) def execute(self, args): super().execute(args) self.git = Executable('git') for filename in self.get_all_filenames(): if os.path.splitext(filename)[1] not in ['.mo', '.png', '.jpg']: self.process_file(filename, args.copyright_holder_string) return 0 def process_file(self, filename, copyright_holder_string): with open(filename, encoding='utf-8') as infile: first_line = infile.readline() match = re.match( '^(?P<start>(#|/[*]|\')\s*Copyright\s*)(?P<years>[0-9, -]+)(?P<end>.*%s.*$)' % copyright_holder_string, first_line) if match: new_line = '%s%s %s\n' % (match.group('start'), self.get_date_string(filename), match.group('end')) out_filename = '%s.t' % filename with open(out_filename, 'w', encoding='utf-8') as outfile: outfile.write(new_line) shutil.copyfileobj(infile, outfile) shutil.move(out_filename, filename) def get_all_filenames(self): return [ filename.decode('utf-8') for filename in self.git.check_output(['ls-files']).split() ] def get_date_string(self, filename): years = sorted({ date[:4].decode('utf-8') for date in self.git.check_output( 'log --pretty=format:%ad --date=short --follow'.split() + [filename]).split() }) if len(years) > 3: date_string = '%s-%s' % (years[0], years[-1]) else: date_string = ', '.join(years) return date_string
def do_shell_expansions(self, directory, commandline): replaced_command = [] for i in commandline: if i.startswith('$(') and i.endswith(')'): shellcommand = i[2] shell_args = i[3:-1].split(' ') output = Executable(shellcommand).Popen(shell_args, cwd=directory, stdout=subprocess.PIPE).communicate()[0] for line in output.splitlines(): replaced_command.append(line) else: replaced_command.append(i) return replaced_command
def create_database(self, super_user_name=None): Executable('mysqladmin').check_call(['create'] + self.login_args( login_username=super_user_name) + [self.database_name]) if self.user_name: Executable('mysql').check_call( self.login_args(login_username=super_user_name) + [ '-e', 'grant all privileges on %s.* to %s;' % (self.database_name, self.user_name) ]) Executable('mysqladmin').check_call( ['flush-privileges'] + self.login_args(login_username=super_user_name)) return 0
def execute(self, args): super().execute(args) self.sitecopy = Executable('sitecopy') try: site_name = args.site_name or self.get_site_name() local_info_file = os.path.expanduser(os.path.join('~', '.sitecopy', site_name)) if args.fetch_first or not os.path.exists(local_info_file): if os.path.exists(local_info_file): os.remove(local_info_file) self.sitecopy.check_call('-r sitecopy.rc --fetch'.split()+[site_name]) self.sitecopy.check_call('-r sitecopy.rc --update'.split()+[site_name]) except subprocess.CalledProcessError as ex: raise DomainException(message='Running "%s" failed with exit code %s' % (' '.join(ex.cmd), ex.returncode))
def restore_all_databases(self, filename): with closing(gzip.open(filename, 'rb')) as zipped_file: proc = Executable('psql').Popen(['-d', 'template1'], stdin=subprocess.PIPE) for line in zipped_file: proc.stdin.write(line) return 0
def model_examples(fixture): # These examples are built to run outside of our infrastructure, hence have to be run like this: for example in ['modeltests1.py', 'modeltests2.py', 'modeltests3.py']: Executable('nosetests').check_call([ '--first-package-wins', 'reahl/doc/examples/tutorial/%s' % example ])
def create_db_user(self, super_user_name=None, create_with_password=True): create_password_option = 'P' if create_with_password else '' with as_domain_exception(ExecutableNotInstalledException): Executable('createuser').check_call(['-DSRl%s' % create_password_option] + self.login_args(login_username=super_user_name) + [self.user_name]) return 0
def ssh_config_file(self): try: with tempfile.NamedTemporaryFile('w', delete=False) as ssh_config: Executable('vagrant').check_call( ['ssh-config', self.machine_name], stdout=ssh_config) yield ssh_config finally: os.remove(ssh_config.name)
def drop_database(self, super_user_name=None, yes=False): cmd_args = self.login_args(login_username=super_user_name) if yes: cmd_args.append('-f') Executable('mysqladmin').check_call(['drop'] + cmd_args + [self.database_name]) return 0
def restore_all_databases(self, filename, super_user_name=None): with closing(gzip.open(filename, 'rb')) as zipped_file: proc = Executable('mysql').Popen( self.login_args(login_username=super_user_name), stdin=subprocess.PIPE) for line in zipped_file: proc.stdin.write(line) return 0
def drop_database(self, yes=False): cmd_args = self.login_args last_part = ['-i', self.database_name] if yes: last_part = [self.database_name] Executable('dropdb').check_call(cmd_args + last_part) return 0
def backup_database(self, directory): today = date.today() filename = '%s.psql.%s' % (self.database_name, today.strftime('%A')) full_path = os.path.join(directory, filename) with io.open(full_path, 'w') as destination_file: cmd_args = ['-Fc', '-o'] + self.login_args + [self.database_name] Executable('pg_dump').check_call(cmd_args, stdout=destination_file) return 0
def create_database(self, super_user_name=None): owner_option = ['-O', self.user_name] if self.user_name else [] with as_domain_exception(ExecutableNotInstalledException): Executable('createdb').check_call(['-Eunicode'] + self.login_args(login_username=super_user_name) + ['-T', 'template0'] + owner_option + [self.database_name]) return 0
def restore_all_databases(self, filename, super_user_name=None): with closing(gzip.open(filename, 'rb')) as zipped_file: with as_domain_exception(ExecutableNotInstalledException): proc = Executable('psql').Popen(self.login_args(login_username=super_user_name) + ['-d', 'postgres'], stdin=subprocess.PIPE) for line in zipped_file: proc.stdin.write(line) return 0
def execute(self, args): project = Project.from_file(self.workspace, self.workspace.startup_directory) with project.paths_set(): try: if args.restart: ServerSupervisor(sys.argv[1:] + ['--dont-restart'], args.max_seconds_between_restarts, ['.'] + args.monitored_directories).run() else: config_directory = args.config_directory six.print_('\nUsing config from %s\n' % config_directory, flush=True) try: reahl_server = ReahlWebServer.fromConfigDirectory( config_directory, args.port) except pkg_resources.DistributionNotFound as ex: terminate_keys = 'Ctrl+Break' if platform.system( ) == 'Windows' else 'Ctrl+C' six.print_('\nPress %s to terminate\n\n' % terminate_keys, flush=True) raise CouldNotConfigureServer(ex) reahl_server.start(connect=True) six.print_('\n\nServing http on port %s, https on port %s (config=%s, flush=True)' % \ (args.port, args.port+363, config_directory)) terminate_keys = 'Ctrl+Break' if platform.system( ) == 'Windows' else 'Ctrl+C' six.print_('\nPress %s to terminate\n\n' % terminate_keys, flush=True) notify = Executable('notify-send') try: notify.call(['Reahl', 'Server restarted']) except: pass reahl_server.wait_for_server_to_complete() except KeyboardInterrupt: six.print_('\nShutting down', flush=True) except CouldNotConfigureServer as ex: six.print_(ex, flush=True) return 0
def backup_database(self, directory, super_user_name=None): today = date.today() filename = '%s.psql.%s' % (self.database_name, today.strftime('%A')) full_path = os.path.join(directory, filename) with io.open(full_path, 'w') as destination_file: cmd_args = ['-Fc', '-o'] + self.login_args(login_username=super_user_name) + [self.database_name] with as_domain_exception(ExecutableNotInstalledException): Executable('pg_dump').check_call(cmd_args, stdout=destination_file) return 0
def drop_database(self, super_user_name=None, yes=False): cmd_args = self.login_args(login_username=super_user_name) + ['--if-exists'] last_part = ['-i', self.database_name] if yes: last_part = [self.database_name] with as_domain_exception(ExecutableNotInstalledException): Executable('dropdb').check_call(cmd_args + last_part) return 0
def new_git_directory(self, initialised=True): git_directory = temp_dir() if initialised: with open(os.devnull, 'w') as DEVNULL: Executable('git').check_call(['init'], cwd=git_directory.name, stdout=DEVNULL, stderr=DEVNULL) return git_directory
def function(self, project, args): @contextmanager def nop_context_manager(): yield context_manager = project.generated_setup_py if args.generate_setup_py else nop_context_manager with context_manager(): command = self.do_shell_expansions(project.directory, args.shell_commandline) return Executable(command[0]).call(command[1:], cwd=project.directory)
def create_db_user(self, super_user_name=None, create_with_password=True): super().create_db_user(super_user_name=super_user_name, create_with_password=create_with_password) identified = 'by \'%s\'' % self.password if create_with_password else 'with \'auth_sock\'' Executable('mysql').check_call( self.login_args(login_username=super_user_name) + [ '-e', 'create user %s identified %s;' % (self.user_name, identified) ]) return 0
class SlaveProcess(object): def __init__(self, command, args): self.process = None self.args = args self.executable = Executable(command) def terminate(self, timeout=5): logging.getLogger(__name__).debug('Terminating process with PID[%s]' % self.process.pid) self.process.terminate() self.wait_to_die(timeout=timeout) def wait_to_die(self, timeout): TimeoutExpired = Py2TimeoutExpired if six.PY2 else subprocess.TimeoutExpired try: if six.PY2: self.py2_process_wait_within_timeout(timeout) else: self.process.wait(timeout=timeout) except TimeoutExpired: self.process.kill() def py2_process_wait_within_timeout(self, timeout): thread = Thread(target=self.process.wait) thread.start() thread.join(timeout) if thread.isAlive(): raise Py2TimeoutExpired() def spawn_new_process(self): return self.executable.Popen(self.args, env=os.environ.copy()) def is_running(self): return self.process.poll() is None def start(self): self.process = self.spawn_new_process() self.register_orphan_killer(self.create_orphan_killer(self.process)) logging.getLogger(__name__).debug('Starting process with PID[%s]' % self.process.pid) def create_orphan_killer(self, process): def kill_orphan_on_exit(possible_orphan_process): logging.getLogger(__name__).debug('Cleanup: ensuring process with PID[%s] has terminated' % possible_orphan_process.pid) try: possible_orphan_process.kill() logging.getLogger(__name__).debug('Had to kill process(orphan) with PID[%s]' % possible_orphan_process.pid) except (OSError if six.PY2 else ProcessLookupError): logging.getLogger(__name__).debug('Process with PID[%s] seems terminated already, no need to kill it' % possible_orphan_process.pid) return functools.partial(kill_orphan_on_exit, process) def register_orphan_killer(self, kill_function): atexit.register(kill_function) def restart(self): self.terminate() self.start()
def function(self, project, options, args): if not args: raise Exception( 'You have to supply the destination of the push as <target_spec>' ) return Executable('devpi').check_call([ 'push', '%s-%s' % (project.project_name, project.version_for_setup()), args[0] ], cwd=project.directory)
def backup_database(self, directory, super_user_name=None): today = date.today() filename = '%s.%s.sql.gz' % (self.database_name, today.strftime('%A')) full_path = os.path.join(directory, filename) with closing(gzip.open(full_path, 'wb')) as zipped_file: proc = Executable('mysqldump').Popen( self.login_args(login_username=super_user_name) + [self.database_name], stdout=subprocess.PIPE) for line in proc.stdout: zipped_file.write(line) return 0
def function(self, project, options, args): if not args: print('No shell command specified to run', file=sys.stderr) return 1 @contextmanager def nop_context_manager(): yield context_manager = project.generated_setup_py if options.generate_setup_py else nop_context_manager with context_manager(): command = self.do_shell_expansions(project.directory, args) return Executable(command[0]).call(command[1:], cwd=project.directory)
class DockerContainer: def __init__(self, container_name): self.container_name = container_name self.docker = Executable('docker', verbose=True) def get_ssh_args(self): return ['-o', 'UserKnownHostsFile=/dev/null', '-o', 'StrictHostKeyChecking=no'] @property def ip_address(self): res = self.docker.run(['inspect', '-f', '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}', self.container_name], stdout=subprocess.PIPE, encoding='utf-8') return res.stdout.strip() @property def user_name(self): res = self.docker.run(['container', 'exec', self.container_name, 'printenv', 'REAHL_USER'], stdout=subprocess.PIPE, encoding='utf-8') return res.stdout.strip() @property def ssh_to(self): return '%s@%s' % (self.user_name, self.ip_address)
def backup_all_databases(self, directory): today = date.today() hostname = self.host if hostname == 'localhost': hostname = socket.gethostname() filename = '%s-all.%s.sql.gz' % (hostname, today.strftime('%A')) full_path = os.path.join(directory, filename) with closing(gzip.open(full_path, 'wb')) as zipped_file: proc = Executable('pg_dumpall').Popen(['-o'] + self.login_args, stdout=subprocess.PIPE) for line in proc.stdout: zipped_file.write(line) return 0
def new_chrome_options(self): from selenium.webdriver.chrome.options import Options options = Options() options.add_argument('--disable-preconnect') options.add_argument('--dns-prefetch-disable') # options.add_argument('--start-maximized') # This breaks xpra pair programming currently. options.add_argument( '--no-sandbox' ) # Needed to be able to run a user-installed version of chromium on travis options.binary_location = Executable( 'chromium-browser' ).executable_file # To run a custom-installed chromium as picked up by the PATH #--enable-http-pipelining #--learning #--single-process return options