Esempio n. 1
0
 def test_default_cwd(self):
     try:
         cmd = Shell('')
         out = cmd.run('bash --version')
         out = cmd.run('bash', '--version')
     except Exception as e:
         self.fail('Exception should not have been raised')
Esempio n. 2
0
def create_demo_kafka_topic():
    '''Creates a kafka topic for the demo if it doesn't already exist.
  
  The caveat here in using this is that Kafka must be installed on the same machine as the demo, and thus the same machine as Ambari as well. The function will try to start the Kafka service through Ambari and then once the service is started is will use the location of the Kafka topics script to create the topic
  
  The name for the topic is specified in ``global.conf``.
  
  
  Args:
    N/A
    
  Returns:
    bool: True if the creation is successful. False otherwise.
  '''
    conf = config.read_config('global.conf')
    am_conf = conf['AMBARI']
    amc = Ambari(am_conf['username'], am_conf['password'], am_conf['proto'],
                 am_conf['server'], am_conf['port'])

    logger.info('Starting Kafka Broker')

    if amc.service_action('Sandbox', 'KAFKA', 'START'):
        sh = Shell()
        topics_script = conf['DEMO']['kafka_topics_script']
        zk = conf['DEMO']['zk_connection']
        topic_name = conf['DEMO']['kafka_topic_name']
        logger.info('Attempting to create new Kafka Topic')
        out = sh.run(topics_script + ' --create --zookeeper ' + zk +
                     ' --replication-factor 1 --partitions 1 --topic ' +
                     topic_name)
        logger.debug(str(out))
        if len(out[1]) == 0:
            return True
        else:
            return False
Esempio n. 3
0
 def test_simple_run(self):
     try:
         cmd = Shell('/tmp')
         out = cmd.run('bash --version')
         out = cmd.run('bash', '--version')
     except Exception as e:
         self.fail('Exception should not have been raised. ' + str(e))
Esempio n. 4
0
def get_kafka_topics():
    '''List the kafka topics on the current installation.
  
  Requires that Kafka is installed on the same machine and Ambari is up and running. Will start the service and use the Kafka scripts to list out all of the topics.
  
  
  Args:
    N/A
    
  Returns:
    list: [0] will contain the list of all the topics in a string, typically separated by newlines. [1] will contain any errors when retrieving the topics.
  
  '''
    conf = config.read_config('global.conf')
    am_conf = conf['AMBARI']
    amc = Ambari(am_conf['username'], am_conf['password'], am_conf['proto'],
                 am_conf['server'], am_conf['port'])

    logger.info('Starting Kafka Broker')

    if amc.service_action('Sandbox', 'KAFKA', 'START'):
        sh = Shell()
        topics_script = conf['DEMO']['kafka_topics_script']
        zk = conf['DEMO']['zk_connection']
        logger.info('Attempting to create new Kafka Topic')
        out = sh.run(topics_script + ' --list --zookeeper ' + zk)

        if len(out[1]) == 0:
            topics = out[0]
            topics = topics.strip().split('\n')
            logger.info('Kafka topics output: ' + str(topics))
            return topics

    return ['', 'Unable to get topics. Could not start Kafka Broker']
Esempio n. 5
0
def kerberize():
    '''Kerberize the cluster using a script. Untested. Can take 10-15 minutes.
  
  This utilizes a script found at https://github.com/crazyadmins/useful-scripts/tree/master/ambari
  
  If you're running this script on a cluster you should look in ``configuration/kerberos/ambari.props`` to make sure the proper values are present in the file or else the script will fail.
  
  Args:
    N/A
    
  Returns:
    N/A
  '''
    script = config.get_path('kerberos/setup_kerberos.sh')
    sh = Shell()
    sh.run('bash ' + script)
Esempio n. 6
0
    def export_hdfs(self, data):
        '''Write out data from the generator to a file **in CSV format in HDFS**
    
    The file to write to is found in ``global.conf``. Header lines are not written to the file. All data is appended to a single file.
    
    When a new data generator starts the file is essentially 'wiped out' so make sure to copy the data elsewhere before stopping/restarting the generator.
    
    Args:
      data (dict/object): The data from the generator here writes out the data as a CSV for easier ingestion into other places like Hive or Spark.
    
    Returns:
      N/A
    
    '''

        self.hdfs_data_pool.append(data)
        if len(self.hdfs_data_pool) > self.data_pool_size:
            header = ', '.join(
                map(lambda v: v, sorted(self.hdfs_data_pool[0].keys())))
            lines = '\n'.join(
                map(
                    lambda v: ', '.join(
                        map(lambda k: str(v[k]), sorted(v.keys()))),
                    self.hdfs_data_pool))
            lines = lines.replace(
                '\"',
                '"')  # Unescape to make sure all quotes are unescaped first
            lines = lines.replace(
                '"', '\"'
            )  # Escape so bash command doesn't fail if we have quotes included.
            self.hdfs_data_pool = []
            hdfs_file = self.export_hdfs_file
            bash = Shell()
            hdfs_cmd = 'hdfs dfs -appendToFile - ' + hdfs_file
            echo_cmd = 'echo "%s"' % (lines)
            cmd = ' | '.join([echo_cmd, hdfs_cmd])
            output = bash.run(cmd)
            logger.debug('HDFS Append Output: ' + str(output))
Esempio n. 7
0
 def test_default_cwd(self):
     try:
         cmd = Shell()
     except IOError as e:
         self.fail('No argument shell constructor should not raise IOError')
Esempio n. 8
0
 def test_existing_directory(self):
     try:
         # Actual Directory
         cmd = Shell('/tmp')
     except IOError as e:
         self.fail('Valid path should pass here')
Esempio n. 9
0
 def test_nonexistent_cwd(self):
     try:
         cmd = Shell('/missing/directory')
         self.fail('Should raise IOError on setting nonexistent directory')
     except IOError as e:
         return