def handle_noargs(self, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs sample_user = install_sample_user() management.call_command('loaddata', 'initial_pig_examples.json', verbosity=2) Document.objects.sync() if USE_NEW_EDITOR.get(): # Get or create sample user directories home_dir = Directory.objects.get_home_directory(sample_user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=sample_user, name=Document2.EXAMPLES_DIR) try: # Don't overwrite doc = Document.objects.get(object_id=1100713) doc2 = Document2.objects.get(owner=sample_user, name=doc.name, type='link-pigscript') # If document exists but has been trashed, recover from Trash if doc2.parent_directory != examples_dir: doc2.parent_directory = examples_dir doc2.save() except Document.DoesNotExist: LOG.warn('Sample pig script document not found.') except Document2.DoesNotExist: if doc.content_object: data = doc.content_object.dict data.update({'content_type': doc.content_type.model, 'object_id': doc.object_id}) data = json.dumps(data) doc2 = Document2.objects.create( owner=sample_user, parent_directory=examples_dir, name=doc.name, type='link-pigscript', description=doc.description, data=data) LOG.info('Successfully installed sample link to pig script: %s' % (doc2.name,)) # Share with default group examples_dir.share(sample_user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
def handle_noargs(self, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs USERNAME = '******' try: sample_user = User.objects.get(username=USERNAME) except User.DoesNotExist: sample_user = User.objects.create(username=USERNAME, password='******', is_active=False, is_superuser=False, id=1100713, pk=1100713) management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2)
def handle_noargs(self, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info( _('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info( _('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs install_sample_user() management.call_command('loaddata', 'initial_pig_examples.json', verbosity=2) Document.objects.sync()
def handle_noargs(self, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs USERNAME = '******' try: sample_user = User.objects.get(username=USERNAME) except User.DoesNotExist: sample_user = User.objects.create(username=USERNAME, password='******', is_active=False, is_superuser=False, id=1100713, pk=1100713) management.call_command('loaddata', 'initial_pig_examples.json', verbosity=2) Document.objects.sync()
def handle_noargs(self, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info(_("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2) self.install_examples() Document.objects.sync()
def handle(self, *args, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() sample_user = install_sample_user() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info( _('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) fs.do_as_user(sample_user.username, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info( _('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) fs.do_as_user(sample_user.username, fs.copyFromLocal, local_dir, remote_data_dir) # Initialize doc2, whether editor script or link doc2 = None # Install editor pig script without doc1 link LOG.info("Using Hue 4, will install pig editor sample.") doc2 = self.install_pig_script(sample_user) if USE_NEW_EDITOR.get(): # Get or create sample user directories LOG.info("Creating sample user directories.") home_dir = Directory.objects.get_home_directory(sample_user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=sample_user, name=Document2.EXAMPLES_DIR) # If document exists but has been trashed, recover from Trash if doc2 and doc2.parent_directory != examples_dir: doc2.parent_directory = examples_dir doc2.save() # Share with default group examples_dir.share(sample_user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
def handle_noargs(self, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info(_("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR ) # Share oozie examples with default group oozie_examples = Document2.objects.filter( type__in=['oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2'], owner=self.user, parent_directory=None ) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()]) self.install_examples() Document.objects.sync()
def handle(self, *args, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() sample_user = install_sample_user() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(sample_user.username, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(sample_user.username, fs.copyFromLocal, local_dir, remote_data_dir) # Initialize doc2, whether editor script or link doc2 = None # Install editor pig script without doc1 link LOG.info("Using Hue 4, will install pig editor sample.") doc2 = self.install_pig_script(sample_user) if USE_NEW_EDITOR.get(): # Get or create sample user directories LOG.info("Creating sample user directories.") home_dir = Directory.objects.get_home_directory(sample_user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=sample_user, name=Document2.EXAMPLES_DIR) # If document exists but has been trashed, recover from Trash if doc2 and doc2.parent_directory != examples_dir: doc2.parent_directory = examples_dir doc2.save() # Share with default group examples_dir.share(sample_user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
def handle_noargs(self, **options): fs = cluster.get_hdfs() sample_user = CreateSandboxUserCommand().handle_noargs() fs.setuser(sample_user) create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.copyFromLocal(local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.copyFromLocal(local_dir, remote_data_dir) # Load jobs management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2)
def handle_noargs(self, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs install_sample_user() management.call_command('loaddata', 'initial_pig_examples.json', verbosity=2) Document.objects.sync()
def handle_noargs(self, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info( _("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info( _('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info( _('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR) # Share oozie examples with default group oozie_examples = Document2.objects.filter(type__in=[ 'oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2' ], owner=self.user, parent_directory=None) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()]) self.install_examples() Document.objects.sync()
def handle_noargs(self, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info(_("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR ) if USE_NEW_EDITOR.get(): docs = Document.objects.get_docs(self.user, Workflow).filter(owner=self.user) for doc in docs: if doc.content_object: data = doc.content_object.data_dict data.update({'content_type': doc.content_type.model, 'object_id': doc.object_id}) data = json.dumps(data) doc2 = Document2.objects.create( owner=self.user, parent_directory=examples_dir, name=doc.name, type='link-workflow', description=doc.description, data=data) LOG.info('Successfully installed sample link to jobsub: %s' % (doc2.name,)) # Share oozie examples with default group oozie_examples = Document2.objects.filter( type__in=['oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2'], owner=self.user, parent_directory=None ) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()]) self.install_examples() Document.objects.sync()
def handle(self, *args, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info(_("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR ) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): with transaction.atomic(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2, commit=False) # Install editor oozie examples without doc1 link LOG.info("Using Hue 4, will install oozie editor samples.") example_jobs = [] example_jobs.append(self._install_mapreduce_example()) example_jobs.append(self._install_java_example()) example_jobs.append(self._install_spark_example()) example_jobs.append(self._install_pyspark_example()) # If documents exist but have been trashed, recover from Trash for doc in example_jobs: if doc is not None and doc.parent_directory != examples_dir: doc.parent_directory = examples_dir doc.save() # Share oozie examples with default group oozie_examples = Document2.objects.filter( type__in=['oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2'], owner=self.user, parent_directory=None ) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
def handle(self, *args, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info( _("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info( _('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info( _('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): with transaction.atomic(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2, commit=False) # Install editor oozie examples without doc1 link LOG.info("Using Hue 4, will install oozie editor samples.") example_jobs = [] example_jobs.append(self._install_mapreduce_example()) example_jobs.append(self._install_java_example()) example_jobs.append(self._install_spark_example()) example_jobs.append(self._install_pyspark_example()) # If documents exist but have been trashed, recover from Trash for doc in example_jobs: if doc is not None and doc.parent_directory != examples_dir: doc.parent_directory = examples_dir doc.save() # Share oozie examples with default group oozie_examples = Document2.objects.filter(type__in=[ 'oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2' ], owner=self.user, parent_directory=None) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
def handle_noargs(self, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info( _('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info( _('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) fs.do_as_user(fs.DEFAULT_USER, fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs sample_user = install_sample_user() management.call_command('loaddata', 'initial_pig_examples.json', verbosity=2) Document.objects.sync() if USE_NEW_EDITOR.get(): # Get or create sample user directories home_dir = Directory.objects.get_home_directory(sample_user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=sample_user, name=Document2.EXAMPLES_DIR) try: # Don't overwrite doc = Document.objects.get(object_id=1100713) doc2 = Document2.objects.get(owner=sample_user, name=doc.name, type='link-pigscript') # If document exists but has been trashed, recover from Trash if doc2.parent_directory != examples_dir: doc2.parent_directory = examples_dir doc2.save() except Document.DoesNotExist: LOG.warn('Sample pig script document not found.') except Document2.DoesNotExist: if doc.content_object: data = doc.content_object.dict data.update({ 'content_type': doc.content_type.model, 'object_id': doc.object_id }) data = json.dumps(data) doc2 = Document2.objects.create( owner=sample_user, parent_directory=examples_dir, name=doc.name, type='link-pigscript', description=doc.description, data=data) LOG.info( 'Successfully installed sample link to pig script: %s' % (doc2.name, )) # Share with default group examples_dir.share(sample_user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
def handle(self, *args, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info(_("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR ) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): with transaction.atomic(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2, commit=False) if IS_HUE_4.get(): # Install editor oozie examples without doc1 link LOG.info("Using Hue 4, will install oozie editor samples.") example_jobs = [] example_jobs.append(self._install_mapreduce_example()) example_jobs.append(self._install_java_example()) example_jobs.append(self._install_spark_example()) example_jobs.append(self._install_pyspark_example()) # If documents exist but have been trashed, recover from Trash for doc in example_jobs: if doc is not None and doc.parent_directory != examples_dir: doc.parent_directory = examples_dir doc.save() elif USE_NEW_EDITOR.get(): # Install as link-workflow doc2 to old Job Designs docs = Document.objects.get_docs(self.user, Workflow).filter(owner=self.user) for doc in docs: if doc.content_object: data = doc.content_object.data_dict data.update({'content_type': doc.content_type.model, 'object_id': doc.object_id}) data = json.dumps(data) # Don't overwrite doc2, created = Document2.objects.get_or_create( owner=self.user, parent_directory=examples_dir, name=doc.name, type='link-workflow', description=doc.description, data=data ) LOG.info('Successfully installed sample link to jobsub: %s' % (doc2.name,)) # Share oozie examples with default group oozie_examples = Document2.objects.filter( type__in=['oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2'], owner=self.user, parent_directory=None ) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()]) if not IS_HUE_4.get(): self.install_examples() Document.objects.sync()
def handle(self, *args, **options): fs = cluster.get_hdfs() create_directories(fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() sample_user = install_sample_user() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(sample_user.username, fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = paths.get_thirdparty_root("sample_data") remote_data_dir = fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) fs.do_as_user(sample_user.username, fs.copyFromLocal, local_dir, remote_data_dir) # Initialize doc2, whether editor script or link doc2 = None if IS_HUE_4.get(): # Install editor pig script without doc1 link LOG.info("Using Hue 4, will install pig editor sample.") doc2 = self.install_pig_script(sample_user) else: # Install old pig script fixture LOG.info("Using Hue 3, will install pig script fixture.") with transaction.atomic(): management.call_command('loaddata', 'initial_pig_examples.json', verbosity=2, commit=False) Document.objects.sync() if USE_NEW_EDITOR.get(): # Get or create sample user directories LOG.info("Creating sample user directories.") home_dir = Directory.objects.get_home_directory(sample_user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=sample_user, name=Document2.EXAMPLES_DIR) if not IS_HUE_4.get(): try: # Don't overwrite doc = Document.objects.get(object_id=1100713) doc2 = Document2.objects.get(owner=sample_user, name=doc.name, type='link-pigscript') except Document.DoesNotExist: LOG.warn('Sample pig script document not found.') except Document2.DoesNotExist: if doc.content_object: data = doc.content_object.dict data.update({'content_type': doc.content_type.model, 'object_id': doc.object_id}) data = json.dumps(data) doc2 = Document2.objects.create( owner=sample_user, parent_directory=examples_dir, name=doc.name, type='link-pigscript', description=doc.description, data=data) LOG.info('Successfully installed sample link to pig script: %s' % (doc2.name,)) # If document exists but has been trashed, recover from Trash if doc2 and doc2.parent_directory != examples_dir: doc2.parent_directory = examples_dir doc2.save() # Share with default group examples_dir.share(sample_user, Document2Permission.READ_PERM, groups=[get_default_user_group()])
def handle_noargs(self, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info(_("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info(_('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info(_('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir}) self.fs.do_as_user(self.user.username, self.fs.copyFromLocal, local_dir, remote_data_dir) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR ) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2) if IS_HUE_4.get(): # Install editor oozie examples without doc1 link LOG.info("Using Hue 4, will install oozie editor samples.") example_jobs = [] example_jobs.append(self._install_mapreduce_example()) example_jobs.append(self._install_java_example()) example_jobs.append(self._install_spark_example()) example_jobs.append(self._install_pyspark_example()) # If documents exist but have been trashed, recover from Trash for doc in example_jobs: if doc is not None and doc.parent_directory != examples_dir: doc.parent_directory = examples_dir doc.save() elif USE_NEW_EDITOR.get(): # Install as link-workflow doc2 to old Job Designs docs = Document.objects.get_docs(self.user, Workflow).filter(owner=self.user) for doc in docs: if doc.content_object: data = doc.content_object.data_dict data.update({'content_type': doc.content_type.model, 'object_id': doc.object_id}) data = json.dumps(data) # Don't overwrite doc2, created = Document2.objects.get_or_create( owner=self.user, parent_directory=examples_dir, name=doc.name, type='link-workflow', description=doc.description, data=data ) LOG.info('Successfully installed sample link to jobsub: %s' % (doc2.name,)) # Share oozie examples with default group oozie_examples = Document2.objects.filter( type__in=['oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2'], owner=self.user, parent_directory=None ) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()]) if not IS_HUE_4.get(): self.install_examples() Document.objects.sync()
def handle_noargs(self, **options): self.user = install_sample_user() self.fs = cluster.get_hdfs() LOG.info( _("Creating sample directory '%s' in HDFS") % REMOTE_SAMPLE_DIR.get()) create_directories(self.fs, [REMOTE_SAMPLE_DIR.get()]) remote_dir = REMOTE_SAMPLE_DIR.get() # Copy examples binaries for name in os.listdir(LOCAL_SAMPLE_DIR.get()): local_dir = self.fs.join(LOCAL_SAMPLE_DIR.get(), name) remote_data_dir = self.fs.join(remote_dir, name) LOG.info( _('Copying examples %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Copy sample data local_dir = LOCAL_SAMPLE_DATA_DIR.get() remote_data_dir = self.fs.join(remote_dir, 'data') LOG.info( _('Copying data %(local_dir)s to %(remote_data_dir)s\n') % { 'local_dir': local_dir, 'remote_data_dir': remote_data_dir }) self.fs.do_as_user(self.fs.DEFAULT_USER, self.fs.copyFromLocal, local_dir, remote_data_dir) # Load jobs LOG.info(_("Installing examples...")) if ENABLE_V2.get(): management.call_command('loaddata', 'initial_oozie_examples.json', verbosity=2) # Get or create sample user directories home_dir = Directory.objects.get_home_directory(self.user) examples_dir, created = Directory.objects.get_or_create( parent_directory=home_dir, owner=self.user, name=Document2.EXAMPLES_DIR) if USE_NEW_EDITOR.get(): docs = Document.objects.get_docs(self.user, Workflow).filter(owner=self.user) for doc in docs: if doc.content_object: data = doc.content_object.data_dict data.update({ 'content_type': doc.content_type.model, 'object_id': doc.object_id }) data = json.dumps(data) doc2 = Document2.objects.create( owner=self.user, parent_directory=examples_dir, name=doc.name, type='link-workflow', description=doc.description, data=data) LOG.info( 'Successfully installed sample link to jobsub: %s' % (doc2.name, )) # Share oozie examples with default group oozie_examples = Document2.objects.filter(type__in=[ 'oozie-workflow2', 'oozie-coordinator2', 'oozie-bundle2' ], owner=self.user, parent_directory=None) oozie_examples.update(parent_directory=examples_dir) examples_dir.share(self.user, Document2Permission.READ_PERM, groups=[get_default_user_group()]) self.install_examples() Document.objects.sync()