def hook_install(self): cfg = self.config self.hook_uninstall() self.generate_locales((u'fr_CH.UTF-8',)) try_makedirs(u'/etc/mysql') debconf, mysql = u'debconf-set-selections', u'mysql-server mysql-server' # Tip : http://ubuntuforums.org/showthread.php?t=981801 self.cmd(debconf, input=u'{0}/root_password select {1}'.format(mysql, cfg.mysql_root_password)) self.cmd(debconf, input=u'{0}/root_password_again select {1}'.format(mysql, cfg.mysql_root_password)) self.install_packages(WebuiHooks.PACKAGES) self.restart_ntp() self.info(u'Import Web UI database and create user') hostname = socket.gethostname() self.cmd(u'service mysql start', fail=False) self.mysql_do(u"DROP USER ''@'localhost'; DROP USER ''@'{0}';".format(hostname), fail=False) self.mysql_do(u"GRANT ALL PRIVILEGES ON *.* TO 'root'@'%%' WITH GRANT OPTION;") self.mysql_do(u'DROP DATABASE IF EXISTS webui') self.mysql_do(cli_input=open(self.local_config.site_database_file, u'r', u'utf-8').read()) self.mysql_do(u"GRANT ALL ON webui.* TO 'webui'@'%%' IDENTIFIED BY '{0}';".format(cfg.mysql_user_password)) self.info(u'Configure Apache 2') self.cmd(u'a2enmod rewrite') self.info(u'Copy and pre-configure Web UI') rsync(u'www/', self.local_config.site_directory, archive=True, delete=True, exclude_vcs=True, recursive=True) chown(self.local_config.site_directory, DAEMON_USER, DAEMON_GROUP, recursive=True) self.local_config.encryption_key = WebuiHooks.randpass(32) self.info(u'Expose Apache 2 service') self.open_port(80, u'TCP')
def test_transcode_the_media_assets(self): with mock.patch('encodebox.celeryconfig.CELERY_ALWAYS_EAGER', True, create=True): from encodebox import tasks media_filenames = sorted( f for f in os.listdir(MEDIA_INPUTS_DIRECTORY) if not f.startswith('.git')) for index, filename in enumerate(media_filenames, 1): index, name = unicode(index), basename(filename) in_relpath = join('2', index, 'uploaded', name) in_abspath = join(LOCAL_DIRECTORY, in_relpath) unguessable = generate_unguessable_filename( SETTINGS['filenames_seed'], name) try_makedirs(dirname(in_abspath)) shutil.copy(join(MEDIA_INPUTS_DIRECTORY, filename), in_abspath) tasks.transcode(json.dumps(in_relpath)) ok_( exists(join(LOCAL_DIRECTORY, '2', index, 'completed', name))) ok_(self.is_empty(join(LOCAL_DIRECTORY, '2', index, 'failed'))) ok_( self.is_empty(join(LOCAL_DIRECTORY, '2', index, 'uploaded'))) ok_( exists( join(REMOTE_DIRECTORY, '2', index, unguessable + '.smil'))) rsync(source=join(REMOTE_DIRECTORY, '2', index), destination=join(MEDIA_REMOTE_DIRECTORY, filename), destination_is_dir=True, archive=True, delete=True, makedest=True, recursive=True)
def download_media(self, media, destination_path): u""" Download a media asset by rsync-ing its directory from the shared storage mount point of the orchestrator ! """ # FIXME detect name based on hostname ? os.chmod(self.id_rsa, 0600) api_host, local_cfg = self.api_host, self.api_local_config src_path = local_cfg.storage_medias_path(media) if not src_path: raise ValueError(to_bytes(u'Unable to retrieve shared storage uploads directory.')) # Mirror the remote directory of the media from the source directory of the shared storage rsync(u'{0}:{1}'.format(api_host, os.path.dirname(src_path)), destination_path, cli_output=True, makedest=True, archive=True, progress=True, rsync_path=u'sudo rsync', extra='ssh -i {0}'.format(self.id_rsa))
def hook_install(self): local_cfg = self.local_config self.hook_uninstall() self.generate_locales((u'fr_CH.UTF-8',)) self.install_packages(OrchestraHooks.PACKAGES + OrchestraHooks.JUJU_PACKAGES, ppas=OrchestraHooks.PPAS) self.restart_ntp() self.info(u'Copy Orchestra and the local charms repository of OSCIED') rsync(local_cfg.api_path, local_cfg.site_directory, **self.rsync_kwargs) chown(local_cfg.site_directory, DAEMON_USER, DAEMON_GROUP, recursive=True) self.info(u'Expose RESTful API, MongoDB & RabbitMQ service') self.open_port(80, u'TCP') # Orchestra RESTful API self.open_port(27017, u'TCP') # MongoDB port mongod and mongos instances #self.open_port(27018, u'TCP') # MongoDB port when running with shardsvr setting #self.open_port(27019, u'TCP') # MongoDB port when running with configsvr setting #self.open_port(28017, u'TCP') # MongoDB port for the web status page. This is always +1000 self.open_port(5672, u'TCP') # RabbitMQ service
def upload_media(self, filepath, backup_in_remote=True): u"""Upload a media asset by rsync-ing the local file to the shared storage mount point of the orchestrator !""" # FIXME detect name based on hostname ? os.chmod(self.id_rsa, 0600) api_host, local_cfg = self.api_host, self.api_local_config bkp_path = local_cfg.storage_uploads_path + u'_bkp/' dst_path = local_cfg.storage_uploads_path if not dst_path: raise ValueError(to_bytes(u'Unable to retrieve shared storage uploads directory.')) if backup_in_remote: # Mirror the local file into a 'backup' directory on the shared storage, then into the destination directory rsync(filepath, u'{0}:{1}'.format(api_host, bkp_path), cli_output=True, makedest=True, archive=True, progress=True, rsync_path=u'sudo rsync', extra='ssh -i {0}'.format(self.id_rsa)) sync_bkp_to_upload = u'sudo rsync -ah --progress {0} {1}'.format(bkp_path, dst_path) ssh(api_host, cli_output=True, id=self.id_rsa, remote_cmd=sync_bkp_to_upload) else: # Mirror the local file into the destination directory of the shared storage rsync(filepath, u'{0}:{1}'.format(api_host, dst_path), cli_output=True, makedest=True, archive=True, progress=True, rsync_path=u'sudo rsync', extra='ssh -i {0}'.format(self.id_rsa)) ssh(api_host, id=self.id_rsa, remote_cmd=u'sudo chown www-data:www-data {0} -R'.format(dst_path)) return u'{0}://{1}/{2}/uploads/{3}'.format(u'glusterfs', local_cfg.storage_address, local_cfg.storage_mountpoint, os.path.basename(filepath))
def process_role(role, roles): directory = ROLES_TARGET_DIRECTORY / f'ansible-role-{role}' if not directory.exists(): print('Create role', role) try: rsync(LIBRARY_DIRECTORY, directory, destination_is_dir=True) os.chdir(directory) paths = PATHS + [f'roles/{r}' for r in roles - {role}] print('\tFiltering') subprocess.check_output([ 'git', 'filter-branch', '--force', '--index-filter', f"git rm --cached --ignore-unmatch -r {' '.join(paths)}", '--prune-empty', '--tag-name-filter', 'cat', '--', '--all' ]) for path in (Path('roles') / role).glob('*'): print('\tMove directory', path.name) subprocess.check_output(['git', 'mv', path, path.name]) subprocess.check_output(['git', 'clean', '-f', '-d']) print('\tGenerate README') filesystem.from_template(README_TEMPLATE, 'README.md', values={ 'has_meta': Path('meta').exists(), 'role': role }, jinja2=True) subprocess.check_output(['git', 'add', 'README.md']) subprocess.check_output(['git', 'commit', '-m', MESSAGE]) print('\tJob done!') except Exception: filesystem.remove(directory, recursive=True) raise print('Push role', role) os.chdir(directory) url = create_github_repo(role) subprocess.check_call(['git', 'remote', 'remove', 'origin']) subprocess.check_call(['git', 'remote', 'add', 'origin', url]) subprocess.check_call(['git', 'push', '--all'])
def transcode(in_relpath_json): u"""Convert an input media file to 3 (SD) or 5 (HD) output files.""" logger = get_task_logger(u'encodebox.tasks.transcode') report = None in_abspath = None failed_abspath = None temporary_directory = None outputs_directory = None final_state = states.FAILURE final_url = None try: settings = load_settings() in_relpath = json.loads(in_relpath_json) in_abspath = join(settings[u'local_directory'], in_relpath) try: in_directories = in_relpath.split(os.sep) assert (len(in_directories) == 4) publisher_id = in_directories[0] product_id = in_directories[1] assert (in_directories[2] == u'uploaded') filename = in_directories[3] name, extension = splitext(filename) except: raise ValueError( to_bytes( u'Input file path does not respect template publisher_id/product_id/filename' )) # Generate a unguessable filename using a seed and the original filename name = generate_unguessable_filename(settings[u'filenames_seed'], filename) completed_abspath = join(settings[u'local_directory'], publisher_id, product_id, u'completed', filename) failed_abspath = join(settings[u'local_directory'], publisher_id, product_id, u'failed', filename) temporary_directory = join(settings[u'local_directory'], publisher_id, product_id, u'temporary', filename) outputs_directory = join(settings[u'local_directory'], publisher_id, product_id, u'outputs', filename) remote_directory = join(settings[u'remote_directory'], publisher_id, product_id) remote_url = settings[u'remote_url'].format(publisher_id=publisher_id, product_id=product_id, name=name) report = TranscodeProgressReport(settings[u'api_servers'], publisher_id, product_id, filename, getsize(in_abspath), logger) report.send_report(states.STARTED, counter=0) logger.info(u'Create outputs directories') for path in (completed_abspath, failed_abspath, temporary_directory, outputs_directory): shutil.rmtree(path, ignore_errors=True) try_makedirs(temporary_directory) try_makedirs(outputs_directory) resolution = get_media_resolution(in_abspath) if not resolution: raise IOError( to_bytes(u'Unable to detect resolution of video "{0}"'.format( in_relpath))) quality = u'hd' if resolution[HEIGHT] >= HD_HEIGHT else u'sd' template_transcode_passes = settings[quality + u'_transcode_passes'] template_smil_filename = settings[quality + u'_smil_template'] logger.info(u'Media {0} {1}p {2}'.format(quality.upper(), resolution[HEIGHT], in_relpath)) logger.info(u'Generate SMIL file from template SMIL file') from_template(template_smil_filename, join(outputs_directory, name + u'.smil'), {u'name': name}) logger.info( u'Generate transcoding passes from templated transcoding passes') transcode_passes = passes_from_template(template_transcode_passes, input=in_abspath, name=name, out=outputs_directory, tmp=temporary_directory) report.transcode_passes = transcode_passes logger.info(u'Execute transcoding passes') for counter, transcode_pass in enumerate(transcode_passes, 1): if transcode_pass[0] in (u'ffmpeg', u'x264'): encoder_module = globals()[transcode_pass[0]] for statistics in encoder_module.encode( transcode_pass[1], transcode_pass[2], transcode_pass[3]): status = statistics.pop(u'status').upper() if status == u'PROGRESS': for info in (u'output', u'returncode', u'sanity'): statistics.pop(info, None) report.send_report(states.ENCODING, counter=counter, statistics=statistics) elif status == u'ERROR': raise RuntimeError(statistics) else: try: check_call(transcode_pass) except OSError: raise OSError( to_bytes(u'Missing encoder ' + transcode_pass[0])) logger.info( u'Move the input file to the completed directory and send outputs to the remote host' ) move(in_abspath, completed_abspath) try: report.send_report(states.TRANSFERRING) is_remote = u':' in remote_directory if is_remote: # Create directory in remote host username_host, directory = remote_directory.split(u':') username, host = username_host.split(u'@') ssh_client = paramiko.SSHClient() ssh_client.load_system_host_keys() ssh_client.set_missing_host_key_policy( paramiko.AutoAddPolicy()) # FIXME man-in-the-middle attack ssh_client.connect(host, username=username) ssh_client.exec_command(u'mkdir -p "{0}"'.format(directory)) else: # Create directory in local host try_makedirs(remote_directory) rsync(source=outputs_directory, destination=remote_directory, source_is_dir=True, destination_is_dir=True, archive=True, progress=True, recursive=True, extra=u'ssh' if is_remote else None) final_state, final_url = states.SUCCESS, remote_url except Exception as e: logger.exception(u'Transfer of outputs to remote host failed') final_state = states.TRANSFER_ERROR with open(join(outputs_directory, u'transfer-error.log'), u'w', u'utf-8') as log: log.write(repr(e)) except Exception as e: logger.exception(u'Transcoding task failed') try: logger.info(u'Report the error by e-mail') send_error_email(exception=e, filename=in_abspath, settings=settings) except: logger.exception(u'Unable to report the error by e-mail') logger.info( u'Move the input file to the failed directory and remove the outputs' ) if in_abspath and failed_abspath: move(in_abspath, failed_abspath) if outputs_directory and exists(outputs_directory): shutil.rmtree(outputs_directory) raise finally: if report: report.send_report(final_state, url=final_url) logger.info(u'Remove the temporary files') if temporary_directory and exists(temporary_directory): shutil.rmtree(temporary_directory)
def hook_config_changed(self): cfg, local_cfg = self.config, self.local_config # Apache site files must end with .conf for a2ensite to work site_file = self.name_slug + ".conf" self.info(u'Start MongoDB and RabbitMQ daemons') self.cmd(u'service mongodb start', fail=False) self.cmd(u'service rabbitmq-server start', fail=False) self.info(u'Configure JuJu Service Orchestrator') juju_config_path = dirname(local_cfg.juju_config_file) rsync(local_cfg.juju_template_path, juju_config_path, **self.rsync_kwargs) chown(juju_config_path, DAEMON_USER, DAEMON_GROUP, recursive=True) self.info(u'Configure Secure Shell') rsync(local_cfg.ssh_template_path, local_cfg.ssh_config_path, **self.rsync_kwargs) chown(local_cfg.ssh_config_path, DAEMON_USER, DAEMON_GROUP, recursive=True) self.info(u'Configure Apache 2') self.template2config(local_cfg.htaccess_template_file, local_cfg.htaccess_config_file, {}) self.template2config(local_cfg.site_template_file, join(local_cfg.sites_available_path, site_file), { u'alias': self.api_alias, u'directory': local_cfg.site_directory, u'domain': self.public_address, u'wsgi': local_cfg.api_wsgi }) self.cmd(u'a2dissite 000-default') self.cmd(u'a2ensite {0}'.format(site_file)) self.info(u'Configure MongoDB Scalable NoSQL DB') with open(u'f.js', u'w', u'utf-8') as mongo_f: mongo_f.write(u"db.addUser('admin', '{0}', false);".format(cfg.mongo_admin_password)) with open(u'g.js', u'w', u'utf-8') as mongo_g: mongo_g.write(u"db.addUser('node', '{0}', false);".format(cfg.mongo_node_password)) self.cmd(u'mongo f.js') self.cmd(u'mongo orchestra f.js') self.cmd(u'mongo celery g.js') [os.remove(f) for f in (u'f.js', u'g.js')] mongo_config = ConfigObj(local_cfg.mongo_config_file) mongo_config[u'bind_ip'] = u'0.0.0.0' mongo_config[u'noauth'] = u'false' mongo_config[u'auth'] = u'true' mongo_config.write() self.configure_rabbitmq() if cfg.plugit_api_url: self.info(u'Configure PlugIt server') infos = { u'api_url': cfg.plugit_api_url, u'debug': cfg.verbose, u'base_url': cfg.plugit_base_url, u'allowed_networks': u'", "'.join(cfg.plugit_allowed_networks.split(u',')) } self.template2config(local_cfg.plugit_template_file, local_cfg.plugit_config_file, infos) self.info(u'Configure Orchestra the Orchestrator') local_cfg.verbose = cfg.verbose local_cfg.api_url = self.api_url(local=False) local_cfg.charms_release = cfg.charms_release local_cfg.node_secret = cfg.node_secret local_cfg.root_secret = cfg.root_secret local_cfg.mongo_admin_connection = self.mongo_admin_connection local_cfg.mongo_node_connection = self.mongo_node_connection local_cfg.rabbit_connection = self.rabbit_connection infos = { u'rabbit': unicode(self.rabbit_connection), u'port': unicode(27017), u'username': u'node', u'password': unicode(cfg.mongo_node_password), } self.template2config(local_cfg.celery_template_file, local_cfg.celery_config_file, infos) local_cfg.email_server = cfg.email_server local_cfg.email_tls = cfg.email_tls local_cfg.email_address = cfg.email_address local_cfg.email_username = cfg.email_username local_cfg.email_password = cfg.email_password local_cfg.plugit_api_url = cfg.plugit_api_url self.remark(u'Orchestrator successfully configured') self.info(u'Symlink charms default directory to directory for release {0}'.format(cfg.charms_release)) try_symlink(abspath(local_cfg.charms_default_path), abspath(local_cfg.charms_release_path)) self.info(u'Ensure that the Apache sites directory is owned by the right user') chown(local_cfg.sites_directory, DAEMON_USER, DAEMON_GROUP, recursive=True) self.info(u'Configure Cronjob') self.template2config(local_cfg.cronjob_template_file, '/etc/cron.d/cron_enco', {}) self.storage_remount()