コード例 #1
0
ファイル: server.py プロジェクト: dixneuf19/fuzzy-octo-disco
    def FindFaces(self, request, context):
        logger.info("Received a request: \n%s" % request)

        if request.picture is None or request.picture.path == "":
            logger.warn("You need at least picture object with a path")
            return find_faces_pb2.FindFacesResponse(status="NO_PATH_ERROR")

        picture_path = Path(request.picture.path)

        if not(picture_path.exists()):
            logger.warn("The path '%s' doesnt exists." % picture_path)
            return find_faces_pb2.FindFacesResponse(status="INVALID_PATH_ERROR")

        if not (picture_path.ext.lower() in (".jpg", ".jpeg", ".png", ".gif")):
            logger.warn("The type of %s isn't supported" % picture_path)
            return find_faces_pb2.FindFacesResponse(status="INVALID_FILE_EXTENSION_ERROR")
        
        # Finally we are good to go
        picture = Picture(picture_path.abspath())
        try:
            picture.open()
        except OSError:
            logger.warn("Error when opening the file %s" % picture_path.abspath())
            return find_faces_pb2.FindFacesResponse(status="OPEN_FILE_ERROR")
        
        try:
            picture.img2raw()
            find_faces(picture)
        except Exception as error:
            logger.debug(error)
            logger.warn("Error when searching the faces")
            return find_faces_pb2.FindFacesResponse(status="IMAGE_PROCESSING_ERROR")
        
        if len(picture.face_location) == 0:
            logger.info("No face found on this picture")
            return find_faces_pb2.FindFacesResponse(status="NO_FACE_FOUND")
        
        logger.info("Found %d face(s)." % len(picture.face_location))
        faces_paths = []
        for i in range(len(picture.face_location)):
            try:
                face = picture.clone()
                face.face_crop(margin=FACE_CROP_MARGIN, whichface=i)
                out_path = make_save_path(request.picture.path, index=i)
                face.save(out_path)
                faces_paths.append(out_path)
            except Exception as error:
                logger.debug(error)
                logger.warn("failed to clone, crop or save face %d" % i)
                pass
        
        if len(faces_paths) == 0:
            logger.warn("Failed to handle ALL faces")
            return find_faces_pb2.FindFacesResponse(status="FAILED_ALL_FACES")
        
        faces_pictures = [find_faces_pb2.Picture(path=faces_paths[i], index=i) for i in range(len(faces_paths))]    
        return find_faces_pb2.FindFacesResponse(status="SUCCESS", nb_faces=len(faces_pictures), faces=faces_pictures)    
コード例 #2
0
ファイル: zWorkspace.py プロジェクト: AceyT/zTextureReducer
 def add_file(self, file: Path):
     if file.exists() and file.ext == ".png":
         tmp = Image.open(file.abspath())
         size = tmp.size
         tmp.close()
         self.files.insert("",
                           "end",
                           text=file.basename(),
                           values=("{}x{}".format(*size),
                                   "{}".format(file.abspath())))
コード例 #3
0
def minimal_config(
    name,
    executable,
    directory,
    *,
    config_ext="cfg",
    log_ext="log",
    out_ext="out",
    error_ext="err",
    use_absolute_paths=True,
    keep_env=False,
):
    """Construct Minimal Job Configuration."""
    directory = Path(directory)
    if use_absolute_paths:
        build_path = lambda ext: (directory / f"{name}.{ext}").abspath()
    else:
        build_path = lambda ext: directory / f"{name}.{ext}"
    config = JobConfig(path=build_path(config_ext))
    config.name = property(lambda s: name)
    with config.write_mode as config:
        config.executable = Path(executable).abspath()
        config.log = build_path(log_ext)
        config.getenv = keep_env
        config.initialdir = directory.abspath(
        ) if use_absolute_paths else directory
        config.output = build_path(out_ext)
        config.error = build_path(error_ext)
    return config
コード例 #4
0
    def is_publishable(self, path: Path) -> bool:
        files_to_ignore = []
        for glob_pattern in self.ignore_globs:
            for filepath in glob(self.postsdir / glob_pattern, recursive=True):
                files_to_ignore.append(Path(filepath).abspath())

        return path.abspath() not in files_to_ignore
コード例 #5
0
class LoadFilePathStrategy(object):
    """Base class for all strategies that load configuration from a
    file.
    """
    def __init__(self, file_path, must_exist=False):
        self._file_path = Path(file_path).expand()
        self.file_path = self._file_path.abspath()
        self.must_exist = must_exist

    def _process_file_path(self, config):
        raise NotImplementedError('Subclasses must implement this method.')

    def process(self, config=None):
        if config is None:
            config = {}

        if self.file_path.startswith('~'):
            if self.must_exist:
                raise Exception('Unable to load "%s". Environment home not set.' % self.file_path)

            return config

        if not self._file_path.exists():
            if self.must_exist:
                raise Exception('Config file "' + self.file_path + '" doesn\'t exist.')

            return config

        return self._process_file_path(config)
コード例 #6
0
def is_directory_excluded(directory, excluded):
    directory = Path(directory).normpath()
    directory2 = directory.abspath()
    if (directory in excluded) or (directory2 in excluded):
        return True
    # -- OTHERWISE:
    return False
コード例 #7
0
def _download_bitday_images():
    try:
        from mega import Mega
    except ImportError:
        os.system('pip install --user mega.py')
        from mega import Mega
    import zipfile
    os.chdir(Path(os.environ['HOME']) / 'Pictures')
    instance = Mega()
    instance.login_anonymous()
    instance.download_url(
        'https://mega.nz/file/L55EHRoJ#kqbzKJUlQtIiZj4QZFl5Gcp7ebu_l2CR-pdL56gthOM'
    )
    dst_folder = Path('bitday')
    if dst_folder.exists():
        dst_folder.rmtree()
    dst_folder.mkdir()
    with zipfile.ZipFile('./BitDay-2-1920x1080.zip', 'r') as f:
        f.extractall(dst_folder)
    (dst_folder / '__MACOSX').rmtree()
    images_folder: Path = dst_folder / '1920x1080'
    for img in images_folder.files('*.png'):
        img.move(dst_folder)
    images_folder.rmtree()
    print('Put bitday images into', dst_folder.abspath())
コード例 #8
0
ファイル: zOptions.py プロジェクト: AceyT/zTextureReducer
 def _pick_dir(self):
     directory = tk.filedialog.askdirectory(
         initialdir="./", title="Choose a directory for where to export")
     dirpath = Path(directory)
     if dirpath.exists():
         self.dir_entry.delete(0, 'end')
         self.dir_entry.insert('', dirpath.abspath())
コード例 #9
0
    def __init__(self, name=''):

        self.call_dir = Path(os.getcwd()).abspath()

        if not name:
            # TODO: Add the date time rather than temp string
            name = tempfile.mkdtemp(dir='.', prefix='simu_')

        dirname = Path(name)
        if not dirname.exists():
            dirname.mkdir()
        self.dirname = dirname.abspath()
        self.name = str(self.dirname.name)

        self.copy_input()
        self.generate_output()
        data = walter_data()
        if not (data / 'WALTer.lpy').exists():
            raise ImportError('could not locate walter.lpy source code')
        self.walter = str(data / 'WALTer.lpy')

        csv = 'which_output_files.csv'
        if not (self.dirname / csv).exists():
            (walter_data() / csv).copy(self.dirname)
        self._outputs = {}  # needed for first call to which_output

        itable = 'index-table.json'
        if (self.dirname / itable).exists():
            self.itable = OrderedDict(self.read_itable(self.dirname / itable))
        else:
            self.itable = OrderedDict()
        self._combi_params = {}
コード例 #10
0
 def on_image_select_update(self, img):
     img = Path(img)
     if img.exists():
         self.image_path = img.abspath()
     else:
         self.image_path = None
     self._update_preview()
コード例 #11
0
class LoadFilePathStrategy(object):
    """Base class for all strategies that load configuration from a
    file.
    """
    def __init__(self, file_path, must_exist=False):
        self._file_path = Path(file_path).expand()
        self.file_path = self._file_path.abspath()
        self.must_exist = must_exist

    def _process_file_path(self, config):
        raise NotImplementedError('Subclasses must implement this method.')

    def process(self, config=None):
        if config is None:
            config = {}

        if self.file_path.startswith('~'):
            if self.must_exist:
                raise Exception(
                    'Unable to load "%s". Environment home not set.' %
                    self.file_path)

            return config

        if not self._file_path.exists():
            if self.must_exist:
                raise Exception('Config file "' + self.file_path +
                                '" doesn\'t exist.')

            return config

        return self._process_file_path(config)
コード例 #12
0
def _assert_files(idf_file, weather_file, working_dir,
                  idd_file, out_dir):
    """Ensure the files and directory are here and convert them as path.Path

    This function will coerce the string as a path.py Path and assert if
    mandatory files or directory are missing.
    """

    def get_idd(eplus_directory, idd_file):
        if not idd_file and eplus_directory:
            return Path(eplus_directory) / "Energy+.idd"
        if not idd_file:
            return Path("Energy+.idd")
        return Path(idd_file)

    idd_file = get_idd(os.environ.get('EPLUS_DIR', None),
                       idd_file)
    logger.debug('looking for idd file (%s)' % idd_file.abspath())

    if not idd_file.isfile():
        raise IOError("IDD file not found")

    working_dir = Path(working_dir)
    logger.debug(
        'checking if working directory (%s) exist' %
        working_dir.abspath())
    if not working_dir.isdir():
        raise IOError("Working directory does not exist")

    out_dir = Path(out_dir)
    logger.debug(
        'checking if output directory (%s) exist' %
        out_dir.abspath())
    if not out_dir.isdir():
        raise IOError("Output directory does not exist")

    weather_file = Path(weather_file)
    logger.debug('looking for weather file (%s)' % weather_file.abspath())
    if not weather_file.isfile():
        raise IOError("Weather file not found")

    idf_file = Path(idf_file)
    logger.debug('looking for idf file (%s)' % idf_file.abspath())
    if not idf_file.isfile():
        raise IOError("IDF file not found")

    return idf_file, weather_file, working_dir, idd_file, out_dir
コード例 #13
0
def gen_data_folders(PARENT_DIRECTORY):
    d = path(PARENT_DIRECTORY)
    l = []
    for root, dirs, files in os.walk(d):
        # if any(file.endswith('.hcl') for file in files) and any(is_pcap_session(file) for file in files):
        if any(is_pcap_session(join(root, file)) for file in files):
            l.append(path.abspath(root))
    return l
コード例 #14
0
    def __get_work_dir(self, yaml_node):
        try:
            work_dir = Path(yaml_node.start_mark.name).dirname()
        except Exception as e:
            work_dir = Path(yaml_node.end_mark.name).dirname()

        work_dir = work_dir.abspath()

        return work_dir
コード例 #15
0
ファイル: saveload.py プロジェクト: Exodus111/Projects
 def load_file(self, name):
     folder = Path("./save")
     print(folder.abspath())
     savfiles = [f for f in folder.files("*.sav")]
     if name in savfiles:
         with open(name, "r") as outfile:
             return json.loads(outfile.read())
     else:
         print("File not Found")
         return None
コード例 #16
0
def dump(path, save, dbformat, debug):
    path = Path(path)
    save = Path(save)

    click.echo('Path {}!'.format(path.abspath()))
    click.echo('Save Path {}'.format(save.abspath()))

    search = Search(path)

    with atomic_write(save, overwrite=True) as file_handler:
        try:
            for fso in search.walk():

                if debug:
                    print('\rprocessing {}\r'.format(fso.path))

                file_handler.write(render(fso, dbformat))
        except KeyboardInterrupt:
            file_handler.flush()
            click.echo('Closing database')
コード例 #17
0
def main():
    parser = argparse.ArgumentParser(description='cleanup a filename, \
            replace . with spaces, delete some common shit at the end of a filename\
            ex: "actual.file.name-1080p.BluRay.x264-GECKOS[rarbg]" -> "actual filename"')
    parser.add_argument('filenames', nargs='+', help='list of files and folders to cleanup')
    args = parser.parse_args()
    for f in args.filenames:
        p = Path(f.strip())
        newname = rename(p.basename(), p.isdir())
        p = p.rename(Path.joinpath(p.dirname(), newname))
        print(p.abspath(), end='')
コード例 #18
0
def build(ctx, builder="html", options=""):
    """Build docs with sphinx-build"""
    sourcedir = ctx.config.sphinx.sourcedir
    destdir = Path(ctx.config.sphinx.destdir or "build")/builder
    destdir = destdir.abspath()
    with cd(sourcedir):
        destdir_relative = Path(".").relpathto(destdir)
        command = "sphinx-build {opts} -b {builder} {sourcedir} {destdir}" \
                    .format(builder=builder, sourcedir=".",
                            destdir=destdir_relative, opts=options)
        ctx.run(command)
コード例 #19
0
 def export_queue(self, queue_getter, export_path):
     if queue_getter:
         queue = queue_getter()
         if len(queue) == 0:
             raise Exception("Export queue empty\nNo item in queue")
         options = convert_options(**self.options)
         for item in queue:
             img_path = Path(item[0])
             res_path = export_path / item[1] + img_path.ext
             img = Image.open(img_path.abspath())
             img = process_image(img, **options)
             img.save(res_path.abspath())
         messagebox.showinfo("Done", "Finish exporting queue")
コード例 #20
0
def test_destination_prefix():
    system = FileSystem.parse(inputPath)
    out = Path('tests/out')
    out.rmtree_p()
    out.makedirs_p()
    generator = Generator(search_path='tests/templates')
    for module in system.modules:
        dst_template = '{{out}}/{{module|lower}}.txt'
        ctx = {'out': out.abspath(), 'module': module}
        generator.write(dst_template, 'module.txt', ctx)
        path = generator.apply(dst_template, ctx)
        assert Path(path).exists() == True
    out.rmtree_p()
コード例 #21
0
def save(im, out_path, out_tag, max_size, file_path, extension):
    out_path, out_tag = Path(out_path), Path(out_tag)
    out_path = out_path + out_tag + "_" + file_path.namebase + extension
    quality = 90
    im.save(out_path, quality=quality)
    while out_path.size > int(max_size) and quality >= 40:
        quality -= 5
        logger.info("The file is too heavy, trying to save with quality : %f" % quality)
        im.save(out_path, quality=quality)

    if out_path.size > int(max_size):
        logger.info("Can't manage to reduce the size under %d bytes." % max_size)
        out_path.remove()
    else:
        logger.info("The file is succesfully saved at %s" % out_path.abspath())
コード例 #22
0
ファイル: zOptions.py プロジェクト: AceyT/zTextureReducer
 def _on_export(self):
     try:
         export_path = Path(self.dir_entry.get())
         if export_path.exists() and self.export_callback:
             self.export_callback(export_path)
         elif not export_path.exists():
             err = "Invalid export path : doesn't exists\n" \
             "Given path : [{}]".format(export_path.abspath())
             raise Exception(err)
     except Exception as err:
         err_msg = "Something went wrong\n" \
             "Look at the console and report the issue if there is anything\n\n" \
             "--- Message caught ---\n" \
             "{}".format(err)
         messagebox.showerror("Error", err_msg)
         return
コード例 #23
0
ファイル: search.py プロジェクト: nasgoncalves/fsops
class Search(object):
    def __init__(self, path):
        self.path = Path(path)

    def walk(self):
        for root, dirs, files in os.walk(self.path.abspath()):

            for obj in dirs:
                dir_path = Path(root) / obj
                if dir_path.access(os.R_OK):
                    yield Object.from_path(dir_path)

            for obj in files:
                file_path = Path(root) / obj
                if file_path.access(os.R_OK):
                    yield Object.from_path(file_path)
コード例 #24
0
ファイル: model.py プロジェクト: jenisys/cmake-build
    def __init__(self,
                 ctx,
                 project_dir=None,
                 project_build_dir=None,
                 build_config=None,
                 cmake_generator=None):
        if build_config is None:
            cmake_build_type = self.CMAKE_BUILD_TYPE_DEFAULT
            build_config = BuildConfig("default",
                                       cmake_build_type=cmake_build_type)

        project_dir = Path(project_dir or ".")
        project_dir = project_dir.abspath()
        if not project_build_dir:
            build_dir = make_build_dir_from_schema(ctx.config,
                                                   build_config.name)
            project_build_dir = project_dir / build_dir

        config_name = build_config.name
        cmake_generator_default = build_config.cmake_generator
        cmake_toolchain = build_config.cmake_toolchain

        self.ctx = ctx
        self.project_dir = project_dir
        self.project_build_dir = Path(project_build_dir).abspath()
        self.config = None
        self._build_config = build_config
        self._stored_config = None
        self._stored_cmake_generator = None
        self._dirty = True
        self._placeholder_map = {}
        self.load_config()
        self.update_from_initial_config(build_config)
        self.config.name = config_name
        if not cmake_generator:
            # -- INHERIT: Stored cmake_generator, if it is not overridden.
            cmake_generator = self._stored_config.cmake_generator or \
                              cmake_generator_default
        self.config.cmake_generator = cmake_generator
        self.config.cmake_toolchain = cmake_toolchain
        self.cmake_install_prefix = self.replace_placeholders(
            self.cmake_install_prefix)
        self.cmake_config_overrides_cmake_build_type = \
            self.CMAKE_CONFIG_OVERRIDES_CMAKE_BUILD_TYPE
        # MAYBE:
        # self.cmake_defines = self.replace_placeholders(self.cmake_defines)
        self._dirty = True
コード例 #25
0
def document(source, dest=None, interactive=True):
    """
    Import document from ``source``. Procedure will download/cache the file,
    create a directory to store metadta.
    """

    assert ' ' not in source

    if dest is not None:
        assert re.match('^[a-zA-Z0-9\-_.]+$', dest), \
            '%r is not a valid name for a skid document.' % dest
        dest = Path(dest)

    source = Path(source)

    print(colors.blue % 'adding %s' % source)

    # store the absolute path for local files.
    if source.exists():
        source = source.abspath()

    exists = False
    try:
        cached = cache_document(source, dest=dest)

    except SkidFileExists as e:
        print('[%s] document already cached. using existing notes.' %
              colors.yellow % 'warn')
        cached = e.filename
        exists = True

    except SkidDownloadError as e:
        print('[%s] Failed to download (%s).' % (colors.red % 'error', e))
        return

    d = Document(cached)

    if not exists:
        new_document(d, source, cached)

    if interactive:
        d.edit_notes()

    print("Don't forget to 'skid update'")

    return d
コード例 #26
0
ファイル: FileMonitor.py プロジェクト: hellmonky/project
 def deleteFileWithFullPath(self, deletePath):
     # 获取当前的路径
     d = Path(deletePath)
     # 判断当前路径是否可以正常访问,如果可以就在当前路径下操作,否则报错或者创建路径
     path_state = d.access(os.F_OK)
     if path_state is True:
         print(d.abspath())
         # 获取当前路径下所有的文件个数
         num_files = len(d.files())
         print("当前共有文件:"+str(num_files)+"个")
         # 然后遍历当前所有的文件
         for file in d.files():
             print(file)
         # 然后获取当前路径下的所有文件夹
         num_dirs = len(d.dirs())
         print("当前共有文件夹:"+str(num_dirs)+"个")
         # 然后遍历当前的文件夹
         for current_dir in d.dirs():
             print(current_dir)
コード例 #27
0
 def deleteFileWithFullPath(self, deletePath):
     # 获取当前的路径
     d = Path(deletePath)
     # 判断当前路径是否可以正常访问,如果可以就在当前路径下操作,否则报错或者创建路径
     path_state = d.access(os.F_OK)
     if path_state is True:
         print(d.abspath())
         # 获取当前路径下所有的文件个数
         num_files = len(d.files())
         print("当前共有文件:" + str(num_files) + "个")
         # 然后遍历当前所有的文件
         for file in d.files():
             print(file)
         # 然后获取当前路径下的所有文件夹
         num_dirs = len(d.dirs())
         print("当前共有文件夹:" + str(num_dirs) + "个")
         # 然后遍历当前的文件夹
         for current_dir in d.dirs():
             print(current_dir)
コード例 #28
0
ファイル: csv2fits.py プロジェクト: cbdc/veritas_lc
def csv2fits(csv_file, fits_file=None):
    from astropy.table import Table
    from path import Path

    filein = Path(csv_file)
    dirin = filein.abspath().dirname()
    if fits_file is None:
        dirout = dirin.joinpath('../pub')
        fileout = (dirout.joinpath(filein.namebase) + '.fits').normpath()
    else:
        fileout = Path(fits_file).abspath()

    t = Table.read(filein, format='ascii.ecsv')

    # mjd_header2table(t)
    add_radec2header(t)
    flatten_header(t)

    t.write(fileout, format='fits', overwrite=True)
    return t
コード例 #29
0
def make_excluded(excluded, config_dir=None, workdir=None):
    workdir = workdir or Path.getcwd()
    config_dir = config_dir or workdir
    workdir = Path(workdir)
    config_dir = Path(config_dir)

    excluded2 = []
    for p in excluded:
        assert p, "REQUIRE: non-empty"
        p = Path(p)
        if p.isabs():
            excluded2.append(p.normpath())
        else:
            # -- RELATIVE PATH:
            # Described relative to config_dir.
            # Recompute it relative to current workdir.
            p = Path(config_dir) / p
            p = workdir.relpathto(p)
            excluded2.append(p.normpath())
            excluded2.append(p.abspath())
    return set(excluded2)
コード例 #30
0
def _update_deployment(cfy,
                       manager,
                       deployment_id,
                       tenant,
                       blueprint_path,
                       tmpdir,
                       skip_reinstall=False,
                       inputs=None):
    if inputs:
        inputs_file = Path(tmpdir) / 'deployment_update_inputs.json'
        inputs_file.write_text(json.dumps(inputs))
        kwargs = {'inputs': inputs_file.abspath()}
    else:
        kwargs = {}
    global update_counter
    update_counter += 1
    cfy.deployments.update(deployment_id,
                           blueprint_id='b-{0}'.format(uuid.uuid4()),
                           blueprint_path=blueprint_path,
                           tenant_name=tenant,
                           skip_reinstall=skip_reinstall,
                           **kwargs)
コード例 #31
0
def _update_deployment(cfy,
                       manager,
                       deployment_id,
                       tenant,
                       blueprint_path,
                       tmpdir,
                       skip_reinstall=False,
                       inputs=None):
    if inputs:
        inputs_file = Path(tmpdir) / 'deployment_update_inputs.json'
        inputs_file.write_text(json.dumps(inputs))
        kwargs = {'inputs': inputs_file.abspath()}
    else:
        kwargs = {}
    global update_counter
    update_counter += 1
    cfy.deployments.update(deployment_id,
                           blueprint_id='b-{0}'.format(uuid.uuid4()),
                           blueprint_path=blueprint_path,
                           tenant_name=tenant,
                           skip_reinstall=skip_reinstall,
                           **kwargs)
コード例 #32
0
def index(script_root):
    script_root_path = Path(script_root)
    script_root_path.dirs()
    print("Indexing from script root:", script_root_path)
    all_cmds = []

    with script_root_path:
        for path in Path(".").walk():
            if path.fnmatch(FUZZ_YAML_FILE_NAME):
                all_cmds.extend(extract_cmds(path))

    print("Done indexing. Saving to ~/.fuzzconfig.yaml")

    with Path("~/.fuzzconfig.yaml").expanduser().open("w") as f:
        yaml.dump(
            {
                'script-root': str(script_root_path.abspath()),
                'commands': [serialize_command(c) for c in all_cmds]
            },
            f,
            default_flow_style=False)

    print("Index complete")
コード例 #33
0
import json
from mock import MagicMock, patch, call
from path import Path
import pytest
import sys

d = Path('__file__').parent.abspath() / 'hooks'
sys.path.insert(0, d.abspath())

from lib.registrator import Registrator

class TestRegistrator():

    def setup_method(self, method):
        self.r = Registrator()

    def test_data_type(self):
        if type(self.r.data) is not dict:
            pytest.fail("Invalid type")

    @patch('json.loads')
    @patch('httplib.HTTPConnection')
    def test_register(self, httplibmock, jsonmock):
        result = self.r.register('foo', 80, '/v1beta1/test')

        httplibmock.assert_called_with('foo', 80)
        requestmock = httplibmock().request
        requestmock.assert_called_with(
                "POST", "/v1beta1/test",
                json.dumps(self.r.data),
                {"Content-type": "application/json",
コード例 #34
0
ファイル: test_task.py プロジェクト: nkeim/tasker
class TestTask(unittest.TestCase):
    def enter_dir(self, dirname):
        """Set up a sample pipeline in a directory. 
        Tests depend on sample data defined here."""
        task = TaskerSubclass(dirname)
        # Totally arbitrary configuration "scheme"
        task.conf = dict(one='one_str', two=2.0, name=task.name) 
        task.one_count = 0
        @task.create_task([], storage.JSON('one.json'))
        def one(tsk, ins):
            """Docstring: One"""
            task.one_count += 1
            assert len(ins) == 0
            return task.conf['one'] # Goes to JSON
        @task.create_task(one, [storage.JSON('two.json'), storage.JSON('2b.json')])
        def two(tsk, input):
            self.assertEqual(input, task.conf['one'])
            return task.conf['two'], {'twofloat': task.conf['two'], 
                    'onestr': input, 'name': task.conf['name']}
        @task.create_task([one, two], storage.Pandas('three.h5'))
        def three(tsk, ins):
            assert ins[0] == task.conf['one']
            twofloat = ins[1][1]['twofloat']
            assert twofloat == task.conf['two']
            return pandas.Series([twofloat,])
        @task.create_task({'three': three, 'td': 'three_dummy'}, 'four')
        def four(tsk, ins):
            assert ins['three'][0] == task.conf['two'] # First row of Series
            self.assertEqual(ins['td'].basename(), 'three_dummy')
            assert len(ins['td'].split()[0])
            (task.p / 'four').touch()
            return 'dummy'
        return task
    def setUp(self):
        os.chdir(basedir)
        self.testdir = Path(tempfile.mkdtemp())
        self.task = self.enter_dir(self.testdir)
    def tearDown(self):
        os.chdir(basedir)
        self.testdir.rmtree()
    def test_name(self):
        assert self.task.one.__name__ == 'one'
    def test_prepare_data(self):
        # Make sure we know what to do with TaskUnit and FileBase instances
        self.task.three()
        assert isinstance(self.task.one.outs[0], storage.FileBase)
        with self.task:
            self.assertEqual(self.task.one._prepare_data(self.task.one._outs_as_given), 
                    'one_str')
            self.assertEqual(task._nestmap(self.task.two._prepare_data,
                self.task.one), 'one_str')
            # Returned data structure mirrors that of input
            self.assertIsInstance(task._nestmap(self.task.three._prepare_data,
                self.task.three._ins_as_given), list)
    def test_2tasks(self):
        """Make sure up-to-date tasks do not rerun"""
        self.assertEqual(self.task.one(), 'one_str')
        self.assertEqual(self.task.two()[0], 2.0)
        self.assertEqual(self.task.one_count, 1) # two() should not re-run one()
    def test_pipeline(self):
        """Make sure full depth of task tree is traversed."""
        self.task.four() # Should be making one(), two(), etc.
        assert (self.testdir / 'one.json').exists()
    def test_load(self):
        self.task.two()
        self.task.two.load()
    def test_hash(self):
        assert len(set([self.task.one, self.task.one])) == 1
    def test_docstr(self):
        self.assertEqual(self.task.one.__doc__, 'Docstring: One')
    def test_clear(self):
        assert self.task.one() == 'one_str'
        self.task.two()
        assert self.task.one_count == 1  # two() should not re-run one()
        self.task.one.clear()
        assert self.task.one_count == 1
        assert self.task.one() == 'one_str'  # Re-runs one()
        assert self.task.one_count == 2
    def test_report(self):
        self.assertSetEqual(set(self.task.four.report()),
                            set([self.task.one, self.task.two,
                                 self.task.three, self.task.four]))
    def test_clearall(self):
        self.task.three()
        self.assertSetEqual(set(self.task.three.report()), set())
        self.task.clear()
        self.assertSetEqual(set(self.task.three.report()),
                            set([self.task.one, self.task.two,
                                 self.task.three]))
    def test_twodirs(self):
        """Make sure separate task instances do not mix paths"""
        td2 = Path(tempfile.mkdtemp())
        try:
            task2 = self.enter_dir(td2)
            self.assertNotEqual(self.task.two()[1]['name'], task2.two()[1]['name'])
            self.assertNotEqual(self.task.two()[1]['name'], task2.two()[1]['name'])
        finally:
            os.chdir(basedir)
            td2.rmtree()

    def test_context(self):
        """Text chdir context behavior"""
        assert Path('.').abspath().basename() != self.testdir.abspath().basename()
        with self.task:
            assert Path('.').abspath().basename() == self.testdir.abspath().basename()

    def test_progress(self):
        @self.task.create_task([], ['progress.tag'])
        def exercise_progress(tsk, ins):
            mon = progress.Monitor([self.task.p])
            def readprog():
                assert (self.task.p / progress.DEFAULT_STATUS_FILE).exists()
                stat = mon.get_statuses()
                return stat.ix[0].to_dict()
            assert readprog()['status'] == 'working'
            assert 'elapsed_time' in readprog()
            for i in tsk.progress.tally(range(10)):
                assert type(i) is int
                stat = readprog()
                assert stat['status'] == 'working'
                assert stat['current'] == i + 1
                assert 'time_per' in stat
            for i in tsk.progress.tally(range(10), 10):
                assert type(i) is int
                stat = readprog()
                assert stat['total'] == 10
                assert stat['current'] == i + 1
                assert 'time_per' in stat
            for i in range(10):
                tsk.progress.working(i)
                stat = readprog()
                assert stat['status'] == 'working'
                assert stat['current'] == i + 1
                assert 'time_per' not in stat
            for i in range(10):
                tsk.progress.working(i, 10)
                stat = readprog()
                assert stat['current'] == i + 1
            for i in range(10):
                tsk.progress.working(i, 10, {'hi': 1.5})
                stat = readprog()
                assert stat['current'] == i + 1
                assert stat['hi'] == 1.5
        self.task.exercise_progress()

    def test_locking(self):
        didrun = []
        @self.task.create_task([], ['dummy.tag'])
        def try_locking(tsk, ins):
            didrun.append(True)
            assert self.task.is_working()
            assert self.task.is_working(task='try_locking')
            assert not self.task.is_working(task='something_else')
            self.task.one.run() # Should be OK
            self.assertRaises(task.LockException, try_locking)
        assert not self.task.is_working()
        self.task.try_locking()
        assert didrun
        assert not self.task.is_working()
        self.task.unlock() # Just to try it
        self.task.unlock() # Just to try it again
        assert not self.task.is_working()

    def test_menu(self):
        self.task.menu()
        self.assertEqual(self.task.one(), 'one_str')
        # Check display of completed task
        self.task.menu()
コード例 #35
0
class CreateOrUpdateSiteConfigurationTest(TestCase):
    """
    Test for the create_or_update_site_configuration management command.
    """
    command = 'create_or_update_site_configuration'

    def setUp(self):
        super(CreateOrUpdateSiteConfigurationTest, self).setUp()
        self.site_id = 1
        self.site_id_arg = ['--site-id', str(self.site_id)]
        self.json_file_path = Path(__file__).parent / "fixtures/config1.json"
        self.input_configuration = {
            'FEATURE_FLAG': True,
            'SERVICE_URL': 'https://foo.bar',
            'ABC': 123,
        }

    @property
    def site(self):
        """
        Return the fixture site for this test class.
        """
        return Site.objects.get(id=self.site_id)

    def assert_site_configuration_does_not_exist(self):
        """
        Assert that the site configuration for the fixture site does not exist.
        """
        with self.assertRaises(SiteConfiguration.DoesNotExist):
            SiteConfiguration.objects.get(site=self.site)

    def get_site_configuration(self):
        """
        Return the site configuration for the fixture site.
        """
        return SiteConfiguration.objects.get(site=self.site)

    def create_fixture_site_configuration(self, enabled):
        SiteConfiguration.objects.update_or_create(site=self.site,
                                                   defaults={
                                                       'enabled': enabled,
                                                       'values': {
                                                           'ABC': 'abc',
                                                           'B': 'b'
                                                       }
                                                   })

    def test_command_no_args(self):
        """
        Verify the error on the command with no arguments.
        """
        with self.assertRaises(CommandError) as error:
            call_command(self.command)
        self.assertIn(
            'Error: one of the arguments --site-id domain is required',
            str(error.exception))

    def test_site_created_when_site_id_non_existent(self):
        """
        Verify that a new site is created  when given a site ID that doesn't exist.
        """
        non_existent_site_id = 999
        with self.assertRaises(Site.DoesNotExist):
            Site.objects.get(id=non_existent_site_id)

        call_command(self.command, '--site-id', non_existent_site_id)
        Site.objects.get(id=non_existent_site_id)

    def test_site_created_when_domain_non_existent(self):
        """
        Verify that a new site is created when given a domain name that does not have an existing site..
        """
        domain = 'nonexistent.com'
        with self.assertRaises(Site.DoesNotExist):
            Site.objects.get(domain=domain)
        call_command(self.command, domain)
        Site.objects.get(domain=domain)

    def test_both_site_id_domain_given(self):
        """
        Verify that an error is thrown when both site_id and the domain name are provided.
        """
        with self.assertRaises(CommandError) as error:
            call_command(self.command, 'domain.com', '--site-id', '1')

        self.assertIn('not allowed with argument', str(error.exception))

    def test_site_configuration_created_when_non_existent(self):
        """
        Verify that a SiteConfiguration instance is created if it doesn't exist.
        """
        self.assert_site_configuration_does_not_exist()

        call_command(self.command, *self.site_id_arg)
        site_configuration = SiteConfiguration.objects.get(site=self.site)
        self.assertFalse(site_configuration.values)
        self.assertFalse(site_configuration.enabled)

    def test_both_enabled_disabled_flags(self):
        """
        Verify the error on providing both the --enabled and --disabled flags.
        """
        with self.assertRaises(CommandError) as error:
            call_command(self.command, '--enabled', '--disabled',
                         *self.site_id_arg)
        self.assertIn(
            'argument --disabled: not allowed with argument --enabled',
            str(error.exception))

    @ddt.data(('enabled', True), ('disabled', False))
    @ddt.unpack
    def test_site_configuration_enabled_disabled(self, flag, enabled):
        """
        Verify that the SiteConfiguration instance is enabled/disabled as per the flag used.
        """
        self.assert_site_configuration_does_not_exist()
        call_command(self.command, '--{}'.format(flag), *self.site_id_arg)
        site_configuration = SiteConfiguration.objects.get(site=self.site)
        self.assertFalse(site_configuration.values)
        self.assertEqual(enabled, site_configuration.enabled)

    def test_site_configuration_created_with_parameters(self):
        """
        Verify that a SiteConfiguration instance is created with the provided values if it does not exist.
        """
        self.assert_site_configuration_does_not_exist()
        call_command(self.command, '--configuration',
                     json.dumps(self.input_configuration), *self.site_id_arg)
        site_configuration = self.get_site_configuration()
        self.assertDictEqual(site_configuration.values,
                             self.input_configuration)

    def test_site_configuration_created_with_json_file_parameters(self):
        """
        Verify that a SiteConfiguration instance is created with the provided values if it does not exist.
        """
        self.assert_site_configuration_does_not_exist()
        call_command(self.command, '-f', str(self.json_file_path.abspath()),
                     *self.site_id_arg)
        site_configuration = self.get_site_configuration()
        self.assertEqual(site_configuration.values, {'ABC': 123, 'XYZ': '789'})

    @ddt.data(True, False)
    def test_site_configuration_updated_with_parameters(self, enabled):
        """
        Verify that the existing parameters are updated when provided in the command.
        """
        self.create_fixture_site_configuration(enabled)
        call_command(self.command, '--configuration',
                     json.dumps(self.input_configuration), *self.site_id_arg)
        site_configuration = self.get_site_configuration()
        self.assertEqual(
            site_configuration.values, {
                'ABC': 123,
                'B': 'b',
                'FEATURE_FLAG': True,
                'SERVICE_URL': 'https://foo.bar'
            })
        self.assertEqual(site_configuration.enabled, enabled)

    @ddt.data(True, False)
    def test_site_configuration_updated_from_json_file(self, enabled):
        """
        Verify that the existing parameteres are updated when provided through a YAML file.
        """
        self.create_fixture_site_configuration(enabled)
        call_command(self.command, '-f', str(self.json_file_path.abspath()),
                     *self.site_id_arg)
        site_configuration = self.get_site_configuration()
        expected_site_configuration = {'ABC': 'abc', 'B': 'b'}
        with codecs.open(self.json_file_path, encoding='utf-8') as f:
            expected_site_configuration.update(json.load(f))
        self.assertEqual(site_configuration.values,
                         expected_site_configuration)
        self.assertEqual(site_configuration.enabled, enabled)
コード例 #36
0
ファイル: geoselect.py プロジェクト: FreekKalter/geoselect
def main():
    desc = "A script to select photos from a set, based on geographical location. Either via a decimal\
            latitude/longitude point. Or a photo taken in the desired location, it will look for\
            exif standerized gps lat- and longitude tags."
    argparser = argparse.ArgumentParser(description=desc)
    argparser.add_argument(
        "location",
        type=str,
        default="",
        help='location given in decimal degrees like: "40.783068, -73.965350",\
                                  or a path to a photo with exif gps info',
    )
    argparser.add_argument(
        "--path",
        type=str,
        help="path to look for image files, if not set files\
                           will be taken from stdin",
    )
    argparser.add_argument(
        "--extentions",
        type=str,
        default="jpg,png,jpeg,tiff",
        help="comma separated list of extension to look for in PATH",
    )
    argparser.add_argument(
        "--copy-to", dest="copyto", type=str, default="", help="path where found photos should be copied"
    )
    argparser.add_argument(
        "--time-based",
        dest="time_based",
        action="store_true",
        help="also add photos wich themselfs dont have gps information, but are taken\
                                 in a short time before or after one that has (in the right location)",
    )
    argparser.add_argument(
        "--radius",
        type=int,
        default=1,
        help="radius of area (in kilometers) to select\
                           photos from",
    )
    argparser.add_argument("-V", "--version", action="version", version="%(prog)s v" + __version__)
    args = argparser.parse_args()
    args.location = args.location.strip()
    m = re.match("(-?\d+(\.\d+)?)\s*,\s*(-?\d+(\.\d+)?)", args.location)
    location = dict()
    if m:
        location["lat"], location["long"] = float(m.group(1)), float(m.group(3))
    else:
        p = Path(args.location)
        if not p.exists():
            print('Photo given to extract location from: "' + p.abspath() + '" does not exist')
            sys.exit(1)
        with open(str(p.abspath()), "rb") as f:
            tags = exifread.process_file(f)
            if "GPS GPSLongitude" not in tags.keys() or "GPS GPSLatitude" not in tags.keys():
                print("Photo does not contain gps information")
                sys.exit(1)
            location["lat"] = convert_to_decimal(str(tags["GPS GPSLatitude"]))
            location["long"] = convert_to_decimal(str(tags["GPS GPSLongitude"]))
    if not location:
        print("Invalid location given")
        argparser.print_help()
        sys.exit(1)

    extensions = map(lambda x: x.strip(), args.extentions.split(","))
    # Files to be processed are either taken from a path specified on the commandline,
    # or from stdinput. The build_dict functions takes an iterator to loop over the files.
    if args.path:
        p = Path(args.path)
        if not p.exists():
            print(args.path + " does not exist")
            sys.exit(1)
        else:
            images = []
            for e in extensions:
                images += p.files("*." + e)
            files_with_tags = build_dict(iter(images))
    else:

        def readline_generator():
            while 1:
                line = sys.stdin.readline()
                if not line:
                    break
                yield Path(line.strip())

        files_with_tags = build_dict(readline_generator())

    on_location = location_filter(files_with_tags, location, args.radius)
    if args.time_based:
        on_location.update(add_based_on_time(files_with_tags, on_location))

    if args.copyto != "" and not Path(args.copyto).exists():
        print(args.copyto + " does not exist")
        sys.exit(1)

    for f, tags in on_location.items():
        print(f)
        if args.copyto != "":
            Path(f).copy2(args.copyto)
コード例 #37
0
from mock import patch
from path import path
from path import Path
import pytest
import subprocess
import sys

# Add the hooks directory to the python path.
hooks_dir = Path('__file__').parent.abspath() / 'hooks'
sys.path.insert(0, hooks_dir.abspath())
# Import the module to be tested.
import kubernetes_installer


def test_run():
    """ Test the run method both with valid commands and invalid commands. """
    ls = 'ls -l {0}/kubernetes_installer.py'.format(hooks_dir)
    output = kubernetes_installer.run(ls, False)
    assert output
    assert 'kubernetes_installer.py' in output
    output = kubernetes_installer.run(ls, True)
    assert output
    assert 'kubernetes_installer.py' in output

    invalid_directory = path('/not/a/real/directory')
    assert not invalid_directory.exists()
    invalid_command = 'ls {0}'.format(invalid_directory)
    with pytest.raises(subprocess.CalledProcessError) as error:
        kubernetes_installer.run(invalid_command)
        print(error)
    with pytest.raises(subprocess.CalledProcessError) as error:
コード例 #38
0
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from mock import patch, Mock, MagicMock
from path import Path
import pytest
import sys

# Munge the python path so we can find our hook code
d = Path('__file__').parent.abspath() / 'hooks'
sys.path.insert(0, d.abspath())

# Import the modules from the hook
import install


class TestInstallHook():
    @patch('install.path')
    def test_update_rc_files(self, pmock):
        """
        Test happy path on updating env files. Assuming everything
        exists and is in place.
        """
        pmock.return_value.lines.return_value = ['line1', 'line2']
        install.update_rc_files(['test1', 'test2'])
        pmock.return_value.write_lines.assert_called_with(