示例#1
0
 def test_home(self):
     old_home = pydoop.hadoop_home()
     if os.path.isdir(old_home):
         new_home = os.path.join(self.wd, 'hadoop')
         os.symlink(old_home, new_home)
         os.environ['HADOOP_HOME'] = new_home
         reload(pydoop)
         self.assertEqual(pydoop.hadoop_home(), new_home)
示例#2
0
 def test_home(self):
     old_home = pydoop.hadoop_home()
     if os.path.isdir(old_home):
         new_home = os.path.join(self.wd, 'hadoop')
         os.symlink(old_home, new_home)
         os.environ['HADOOP_HOME'] = new_home
         reload(pydoop)
         self.assertEqual(pydoop.hadoop_home(), new_home)
示例#3
0
"""

import sys
import os
import random
import uuid
import tempfile
import imp
import unittest
import shutil
import warnings

import pydoop


_HADOOP_HOME = pydoop.hadoop_home()
_HADOOP_CONF_DIR = pydoop.hadoop_conf()
_RANDOM_DATA_SIZE = 32
_DEFAULT_HDFS_HOST = "localhost"
_DEFAULT_HDFS_PORT = 8020 if pydoop.is_cloudera() else 9000
_DEFAULT_BYTES_PER_CHECKSUM = 512
HDFS_HOST = os.getenv("HDFS_HOST", _DEFAULT_HDFS_HOST)
HDFS_PORT = os.getenv("HDFS_PORT", _DEFAULT_HDFS_PORT)


def _get_special_chr():
    """
    This is used to check unicode support.  On some systems, depending
    on locale settings, we won't be able to use non-ASCII characters
    when interacting with system calls.  Since in such cases it
    doesn't really make sense to run these tests we set UNI_CHR to a
示例#4
0
 def test_home(self):
   if os.environ.has_key('HADOOP_HOME'):
     self.assertEqual(os.environ['HADOOP_HOME'], pydoop.hadoop_home())
示例#5
0
文件: setup.py 项目: kmatzen/pydoop
from distutils.extension import Extension
from distutils.command.build_ext import build_ext
from distutils.command.build_py import build_py
from distutils.command.clean import clean
from distutils.errors import DistutilsSetupError
from distutils import log

import pydoop
import pydoop.hadoop_utils as hu


try:
  JAVA_HOME = os.environ["JAVA_HOME"]
except KeyError:
  raise RuntimeError("java home not found, try setting JAVA_HOME")
HADOOP_HOME = pydoop.hadoop_home(fallback=None)
HADOOP_VERSION_INFO = pydoop.hadoop_version_info()
BOOST_PYTHON = os.getenv("BOOST_PYTHON", "boost_python")
PIPES_SRC = ["src/%s.cpp" % n for n in (
  "pipes",
  "pipes_context",
  "pipes_test_support",
  "pipes_serial_utils",
  "exceptions",
  "pipes_input_split",
  )]
HDFS_SRC = ["src/%s.cpp" % n for n in (
  "hdfs_fs",
  "hdfs_file",
  "hdfs_common",
  )]
示例#6
0
文件: setup.py 项目: onlynone/pydoop
from distutils.extension import Extension
from distutils.command.build_ext import build_ext
from distutils.command.build_py import build_py
from distutils.command.clean import clean
from distutils.errors import DistutilsSetupError
from distutils import log

import pydoop
import pydoop.hadoop_utils as hu


try:
  JAVA_HOME = os.environ["JAVA_HOME"]
except KeyError:
  raise RuntimeError("java home not found, try setting JAVA_HOME")
HADOOP_HOME = pydoop.hadoop_home(fallback=None)
HADOOP_VERSION_INFO = pydoop.hadoop_version_info()
BOOST_PYTHON = os.getenv("BOOST_PYTHON", "boost_python")
PIPES_SRC = ["src/%s.cpp" % n for n in (
  "pipes",
  "pipes_context",
  "pipes_test_support",
  "pipes_serial_utils",
  "exceptions",
  "pipes_input_split",
  )]
HDFS_SRC = ["src/%s.cpp" % n for n in (
  "hdfs_fs",
  "hdfs_file",
  "hdfs_common",
  )]
示例#7
0
 def test_home(self):
     if os.environ.has_key('HADOOP_HOME'):
         self.assertEqual(os.environ['HADOOP_HOME'], pydoop.hadoop_home())