Exemplo n.º 1
0
def main():
  logging.basicConfig(level=logging.DEBUG)

  cluster = PseudoHdfs4()
  cluster.start()

  print "%s running" % (cluster,)
  print "fs.default.name=%s" % (cluster.fs_default_name,)
  print "dfs.http.address=%s" % (cluster.dfs_http_address,)
  print "jobtracker.thrift.port=%s" % (cluster.jt_thrift_port,)
  print "mapred.job.tracker=%s" % (cluster.mapred_job_tracker,)

  from IPython.Shell import IPShellEmbed
  IPShellEmbed()()

  cluster.stop()
Exemplo n.º 2
0
def main():
    logging.basicConfig(level=logging.DEBUG)

    cluster = PseudoHdfs4()
    cluster.start()

    print "%s running" % (cluster, )
    print "fs.default.name=%s" % (cluster.fs_default_name, )
    print "dfs.http.address=%s" % (cluster.dfs_http_address, )
    print "jobtracker.thrift.port=%s" % (cluster.jt_thrift_port, )
    print "mapred.job.tracker=%s" % (cluster.mapred_job_tracker, )

    from IPython.Shell import IPShellEmbed
    IPShellEmbed()()

    cluster.stop()
Exemplo n.º 3
0
  """
  Manages _shared_cluster.
  """
  global _shared_cluster
  if _shared_cluster is None:
    _shared_cluster = MiniHadoopCluster()
    _shared_cluster.start()
    atexit.register(_shared_cluster.stop)
  return _shared_cluster

if __name__ == '__main__':
  """
  It's poor form to write tests for tests (the world-wide stack
  overflow exception), so this merely tries the code.
  """
  logging.basicConfig(level=logging.DEBUG)
  import desktop
  desktop.lib.conf.initialize([hadoop.conf])

  if True:
    cluster = MiniHadoopCluster(num_datanodes=5, num_tasktrackers=5)
    cluster.start()
    print cluster.namenode_port
    print cluster.jobtracker_port
    print cluster.config.get("dfs.thrift.address")
    cluster.dump_ini(sys.stdout)

    from IPython.Shell import IPShellEmbed
    IPShellEmbed()()
    cluster.stop()
Exemplo n.º 4
0
 def s():
   for f in finish:
     f()
   cluster.stop()
Exemplo n.º 5
0
 def s():
   for f in finish:
     f()
   cluster.stop()