Example #1
0
  def __init__(self, fs, path, mode="w", block_size=None):
    self.fs = fs
    self.closed = False
    assert mode == "w"
    extra_confs = []
    if block_size:
      extra_confs.append("-Ddfs.block.size=%d" % block_size)
    self.subprocess_cmd = [self.fs.hadoop_bin_path,
                           "jar",
                           hadoop.conf.SUDO_SHELL_JAR.get(),
                           self.fs.user,
                           "-Dfs.default.name=" + self.fs.uri] + \
                           extra_confs + \
                           ["-put", "-", encode_fs_path(path)]

    self.subprocess_env = i18n.make_utf8_env()

    if self.subprocess_env.has_key('HADOOP_CLASSPATH'):
      self.subprocess_env['HADOOP_CLASSPATH'] += ':' + hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()
    else:
      self.subprocess_env['HADOOP_CLASSPATH'] = hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()

    if hadoop.conf.HADOOP_CONF_DIR.get():
      self.subprocess_env['HADOOP_CONF_DIR'] = hadoop.conf.HADOOP_CONF_DIR.get()

    self.path = path
    self.putter = subprocess.Popen(self.subprocess_cmd,
                                   stdin=subprocess.PIPE,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   close_fds=True,
                                   env=self.subprocess_env,
                                   bufsize=WRITE_BUFFER_SIZE)
Example #2
0
  def __init__(self, fs, path, mode="w", block_size=None):
    self.fs = fs
    self.closed = False
    assert mode == "w"
    extra_confs = []
    if block_size:
      extra_confs.append("-Ddfs.block.size=%d" % block_size)
    self.subprocess_cmd = [self.fs.hadoop_bin_path,
                           "jar",
                           hadoop.conf.SUDO_SHELL_JAR.get(),
                           self.fs.user,
                           "-Dfs.default.name=" + self.fs.uri] + \
                           extra_confs + \
                           ["-put", "-", encode_fs_path(path)]

    self.subprocess_env = i18n.make_utf8_env()

    if self.subprocess_env.has_key('HADOOP_CLASSPATH'):
      self.subprocess_env['HADOOP_CLASSPATH'] += ':' + hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()
    else:
      self.subprocess_env['HADOOP_CLASSPATH'] = hadoop.conf.HADOOP_EXTRA_CLASSPATH_STRING.get()

    if hadoop.conf.HADOOP_CONF_DIR.get():
      self.subprocess_env['HADOOP_CONF_DIR'] = hadoop.conf.HADOOP_CONF_DIR.get()

    self.path = path
    self.putter = subprocess.Popen(self.subprocess_cmd,
                                   stdin=subprocess.PIPE,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   close_fds=True,
                                   env=self.subprocess_env,
                                   bufsize=WRITE_BUFFER_SIZE)
Example #3
0
 def __init__(self, fs, path, mode="w", block_size=None):
   self.fs = fs
   self.closed = False
   assert mode == "w"
   extra_confs = []
   if block_size:
     extra_confs.append("-Ddfs.block.size=%d" % block_size)
   self.subprocess_cmd = [self.fs.hadoop_bin_path,
                          "dfs",
                          "-Dfs.default.name=" + self.fs._get_hdfs_base(),
                          "-Dhadoop.job.ugi=" + self.fs.ugi] + \
                          extra_confs + \
                          ["-put", "-", encode_fs_path(path)]
   self.path = path
   self.putter = subprocess.Popen(self.subprocess_cmd,
                                  stdin=subprocess.PIPE,
                                  stdout=subprocess.PIPE,
                                  stderr=subprocess.PIPE,
                                  close_fds=True,
                                  env=i18n.make_utf8_env(),
                                  bufsize=WRITE_BUFFER_SIZE)