def setUp(self): super(TestJobManager, self).setUp() p.patch_minidom_writexml() self.override_config('plugins', ['fake']) pb.setup_plugins() castellan.validate_config() ds_manager.setup_data_sources()
def setUp(self): super(TestPigWorkflowCreator, self).setUp() p.patch_minidom_writexml() self.prepare = {"delete": ["delete_dir_1", "delete_dir_2"], "mkdir": ["mkdir_1"]} self.job_xml = "job_xml.xml" self.configuration = {"conf_param_1": "conf_value_1", "conf_param_2": "conf_value_3"} self.files = ["file1", "file2"] self.archives = ["arch1"] self.streaming = {"mapper": "/usr/bin/cat", "reducer": "/usr/bin/wc"}
def setUp(self): p.patch_minidom_writexml() self.prepare = {'delete': ['delete_dir_1', 'delete_dir_2'], 'mkdir': ['mkdir_1']} self.job_xml = 'job_xml.xml' self.configuration = {'conf_param_1': 'conf_value_1', 'conf_param_2': 'conf_value_3'} self.files = ['file1', 'file2'] self.archives = ['arch1'] self.streaming = {'mapper': '/usr/bin/cat', 'reducer': '/usr/bin/wc'}
def setUp(self): p.patch_minidom_writexml() self.prepare = { 'delete': ['delete_dir_1', 'delete_dir_2'], 'mkdir': ['mkdir_1'] } self.job_xml = 'job_xml.xml' self.configuration = { 'conf_param_1': 'conf_value_1', 'conf_param_2': 'conf_value_3' } self.files = ['file1', 'file2'] self.archives = ['arch1'] self.streaming = {'mapper': '/usr/bin/cat', 'reducer': '/usr/bin/wc'}
def setUp(self): p.patch_minidom_writexml()
def setUp(self): super(XMLUtilsTestCase, self).setUp() p.patch_minidom_writexml()
def setUp(self): super(TestJobManager, self).setUp() p.patch_minidom_writexml() pb.setup_plugins()
def setUp(self): super(TestJobManager, self).setUp() p.patch_minidom_writexml() pb.setup_plugins() castellan.validate_config()
def setUp(self): super(MinidomPatchesTest, self).setUp() patches.patch_minidom_writexml()
from sahara.middleware import log_exchange from sahara.openstack.common import log from sahara.plugins import base as plugins_base from sahara.service import api as service_api from sahara.service import periodic from sahara.utils import api as api_utils from sahara.utils import patches from sahara.utils import remote LOG = log.getLogger(__name__) # Patches minidom's writexml to avoid excess whitespaces in generated xml # configuration files that brakes Hadoop. patches.patch_minidom_writexml() opts = [ cfg.StrOpt('os_auth_protocol', default='http', help='Protocol used to access OpenStack Identity service.'), cfg.StrOpt('os_auth_host', default='127.0.0.1', help='IP or hostname of machine on which OpenStack Identity ' 'service is located.'), cfg.StrOpt('os_auth_port', default='5000', help='Port of OpenStack Identity service.'), cfg.StrOpt('os_admin_username', default='admin', help='This OpenStack user is used to verify provided tokens. '
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from sahara.utils import patches patches.patch_minidom_writexml() import os import sys # If ../sahara/__init__.py exists, add ../ to Python search path, so that # it will override what happens to be installed in /usr/(local/)lib/python... possible_topdir = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]), os.pardir, os.pardir)) if os.path.exists(os.path.join(possible_topdir, 'sahara', '__init__.py')): sys.path.insert(0, possible_topdir)
def setUp(self): patches.patch_minidom_writexml()