示例#1
0
文件: setup.py 项目: jnpkrn/clufter
 def _pkg_prepare_install(self):
     build_lib = self.get_finalized_command('build_binary').build_lib
     # two-staged copy in case of built_files
     for filedef in self.built_files:
         src = src_orig = self.pkg_params[filedef['src']]
         src = path_basename(src_orig)
         assert src == src_orig, "built_files contains dirs"
         src = path_join(build_lib, src)
         dst = self.pkg_params[filedef['dst']]
         dst = path_join(build_lib, path_basename(dst))
         if src != dst:
             self._pkg_prepare_file(src, dst)
         self.pkg_params[filedef['src']] = dst
     icmd = self.distribution.get_command_obj('install', create=False)
     for filedef in (self.data_files + self.built_files):
         src = self.pkg_params[filedef['src']]
         dst = self.pkg_params[filedef['dst']]
         no_glob = all(c not in path_basename(src) for c in '?*')
         if dst.startswith(sys_prefix):
             dst = path_join(icmd.install_base, dst[len(sys_prefix)+1:])
         self.distribution.data_files.append((
             path_dirname(dst), [
                 path_join(
                     path_dirname(src),
                     path_basename(dst)
                 ),
             ]
         ) if no_glob else (
             dst,
             glob(src)
         ))
         if DEBUG:
             print(DBGPFX + "\tinstall data_files: %s"
                   % self.distribution.data_files)
示例#2
0
文件: setup.py 项目: jmartign/clufter
 def _pkg_prepare_install(self):
     build_lib = self.get_finalized_command('build_binary').build_lib
     # two-staged copy in case of built_files
     for filedef in self.built_files:
         src = src_orig = self.pkg_params[filedef['src']]
         src = path_basename(src_orig)
         assert src == src_orig, "built_files contains dirs"
         src = path_join(build_lib, src)
         dst = self.pkg_params[filedef['dst']]
         dst = path_join(build_lib, path_basename(dst))
         if src != dst:
             self._pkg_prepare_file(src, dst)
         self.pkg_params[filedef['src']] = dst
     icmd = self.distribution.get_command_obj('install', create=False)
     for filedef in (self.data_files + self.built_files):
         src = self.pkg_params[filedef['src']]
         dst = self.pkg_params[filedef['dst']]
         no_glob = all(c not in path_basename(src) for c in '?*')
         if dst.startswith(sys_prefix):
             dst = path_join(icmd.install_base,
                             dst[len(sys_prefix) + 1:])
         self.distribution.data_files.append((path_dirname(dst), [
             path_join(path_dirname(src), path_basename(dst)),
         ]) if no_glob else (dst, glob(src)))
         if DEBUG:
             print(
                 DBGPFX + "\tinstall data_files: %s" %
                 self.distribution.data_files)
示例#3
0
文件: cdb_list.py 项目: zWaR/wazuh
def get_lists(filename=None, offset=0, limit=common.database_limit, select=None, sort_by=None, sort_ascending=True,
              search_text=None, complementary_search=False, search_in_fields=None, relative_dirname=None):
    """Get CDB lists content.

    Parameters
    ----------
    filename : list
        Filenames to filter by.
    offset : int
        First item to return.
    limit : int
        Maximum number of items to return.
    select : list
        List of selected fields to return.
    sort_by : dict
        Fields to sort the items by. Format: {"fields":["field1","field2"],"order":"asc|desc"}
    sort_ascending : boolean
        Sort in ascending (true) or descending (false) order.
    search_text : str
        Find items with the specified string.
    complementary_search : bool
        If True, only results NOT containing `search_text` will be returned. If False, only results that contains
        `search_text` will be returned.
    search_in_fields : str
        Name of the field to search in for the `search_text`.
    relative_dirname : str
         Filter by relative dirname.

    Returns
    -------
    result : AffectedItemsWazuhResult
        Lists content.
    """
    result = AffectedItemsWazuhResult(all_msg='All specified lists were returned',
                                      some_msg='Some lists were not returned',
                                      none_msg='No list was returned')
    dirname = join(common.ossec_path, relative_dirname) if relative_dirname else None

    lists = list()
    for path in get_filenames_paths(filename):
        # Only files which exist and whose dirname is the one specified by the user (if any), will be added to response.
        if not any([dirname is not None and path_dirname(path) != dirname, not isfile(path)]):
            lists.append({'items': [{'key': key, 'value': value} for key, value in get_list_from_file(path).items()],
                          'relative_dirname': path_dirname(to_relative_path(path)),
                          'filename': split(to_relative_path(path))[1]})

    data = process_array(lists, search_text=search_text, search_in_fields=search_in_fields,
                         complementary_search=complementary_search, sort_by=sort_by, sort_ascending=sort_ascending,
                         offset=offset, limit=limit, select=select, allowed_sort_fields=SORT_FIELDS,
                         required_fields=REQUIRED_FIELDS)
    result.affected_items = data['items']
    result.total_affected_items = data['totalItems']

    return result
示例#4
0
文件: utils.py 项目: Python3pkg/LCONF
def build_cython_extension(py_or_pyx_file_path, cython_force_rebuild=True):
    """ Build a cython extension from a `.py` or `.pyx` file

   - build will be done in a sub-folder named `_pyxbld` in the py_or_pyx_file_path

   :param py_or_pyx_file_path: (str) path to a `.py` or `.pyx` file
   :param cython_force_rebuild: (bool) If True the cython extension is rebuild even if it was already build
   :return: (tuple) cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
   """
    module_dir = path_dirname(py_or_pyx_file_path)
    module__cython_name = path_splitext(path_basename(py_or_pyx_file_path))[0]
    cython_module_c_file_path = path_join(module_dir,
                                          module__cython_name + '.c')
    cython_build_dir_path = path_join(module_dir, '_pyxbld')

    args = ['--quiet', 'build_ext', '--build-lib', module_dir]
    if cython_force_rebuild:
        args.append('--force')
    dist = Distribution({'script_name': None, 'script_args': args})
    dist.ext_modules = [
        Extension(name=module__cython_name, sources=[py_or_pyx_file_path])
    ]
    dist.cmdclass = {'build_ext': cython_build_ext}
    build = dist.get_command_obj('build')
    build.build_base = cython_build_dir_path

    try:
        dist.parse_command_line()
    except DistutilsArgError as err:
        raise Err('utils.build_cython_extension', [
            'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path),
            '  DistutilsArgError: <{}>'.format(err)
        ])

    try:
        obj_build_ext = dist.get_command_obj('build_ext')
        dist.run_commands()
        cython_extension_module_path = obj_build_ext.get_outputs()[0]
        if path_dirname(py_or_pyx_file_path) != module_dir:
            raise Err('utils.build_cython_extension', [
                'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path),
                '  <module_dir> differs from final <cython_module_dir>',
                '   module_dir: <{}>'.format(module_dir),
                '   cython_module_dir: <{}>'.format(
                    path_dirname(py_or_pyx_file_path))
            ])
    except Exception as err:
        raise Err('utils.build_cython_extension', [
            'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path),
            '  Exception: <{}>'.format(err)
        ])

    return cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
示例#5
0
def build_cython_extension(py_or_pyx_file_path, cython_force_rebuild=True):
   """ Build a cython extension from a `.py` or `.pyx` file

   - build will be done in a sub-folder named `_pyxbld` in the py_or_pyx_file_path

   :param py_or_pyx_file_path: (str) path to a `.py` or `.pyx` file
   :param cython_force_rebuild: (bool) If True the cython extension is rebuild even if it was already build
   :return: (tuple) cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
   """
   module_dir = path_dirname(py_or_pyx_file_path)
   module__cython_name = path_splitext(path_basename(py_or_pyx_file_path))[0]
   cython_module_c_file_path = path_join(module_dir, module__cython_name + '.c')
   cython_build_dir_path = path_join(module_dir, '_pyxbld')

   args = ['--quiet', 'build_ext', '--build-lib', module_dir]
   if cython_force_rebuild:
      args.append('--force')
   dist = Distribution({'script_name': None, 'script_args': args})
   dist.ext_modules = [Extension(name=module__cython_name, sources=[py_or_pyx_file_path])]
   dist.cmdclass = {'build_ext': cython_build_ext}
   build = dist.get_command_obj('build')
   build.build_base = cython_build_dir_path

   try:
      dist.parse_command_line()
   except DistutilsArgError as err:
      raise Err('utils.build_cython_extension', [
         'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path),
         '  DistutilsArgError: <{}>'.format(err)
      ])

   try:
      obj_build_ext = dist.get_command_obj('build_ext')
      dist.run_commands()
      cython_extension_module_path = obj_build_ext.get_outputs()[0]
      if path_dirname(py_or_pyx_file_path) != module_dir:
         raise Err('utils.build_cython_extension', [
            'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path),
            '  <module_dir> differs from final <cython_module_dir>',
            '   module_dir: <{}>'.format(module_dir),
            '   cython_module_dir: <{}>'.format(path_dirname(py_or_pyx_file_path))
         ])
   except Exception as err:
      raise Err('utils.build_cython_extension', [
         'py_or_pyx_file_path: <{}>'.format(py_or_pyx_file_path),
         '  Exception: <{}>'.format(err)
      ])

   return cython_extension_module_path, cython_module_c_file_path, cython_build_dir_path
示例#6
0
def get_versions(default=None, verbose=False):
   # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have __file__, we can work backwards from there to
   # the root. Some py2exe/bbfreeze/non-CPython implementations don't do __file__, in which case we can only use expanded
   # keywords.
   if not default:
      default = {'version': 'unknown', 'full': ''}
   keywords = {'refnames': git_refnames, 'full': git_full}
   ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
   if ver:
      return ver

   try:
      root = path_abspath(__file__)
      # versionfile_source is the relative path from the top of the source tree (where the .git directory might live) to this
      # file. Invert this to find the root from __file__.
      for i in range(len(versionfile_source.split(os_sep))):
         root = path_dirname(root)
   except NameError:
      return default

   return (
      git_versions_from_vcs(tag_prefix, root, verbose)
      or versions_from_parentdir(parentdir_prefix, root, verbose)
      or default
   )
示例#7
0
def get_versions(default=None, verbose=False):
   # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have __file__, we can work backwards from there to
   # the root. Some py2exe/bbfreeze/non-CPython implementations don't do __file__, in which case we can only use expanded
   # keywords.
   if not default:
      default = {'version': 'unknown', 'full': ''}
   keywords = {'refnames': git_refnames, 'full': git_full}
   ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
   if ver:
      return ver

   try:
      root = path_abspath(__file__)
      # versionfile_source is the relative path from the top of the source tree (where the .git directory might live) to this
      # file. Invert this to find the root from __file__.
      for i in range(len(versionfile_source.split(os_sep))):
         root = path_dirname(root)
   except NameError:
      return default

   return (
      git_versions_from_vcs(tag_prefix, root, verbose)
      or versions_from_parentdir(parentdir_prefix, root, verbose)
      or default
   )
示例#8
0
文件: setup.py 项目: icjsb/Stock_test
def get_version():
    scope = {}
    with open(
            path_join(path_dirname(__file__), "jqfactor_analyzer",
                      "version.py")) as fp:
        exec(fp.read(), scope)
    return scope.get('__version__', '1.0')
    def run(self, edit):
        file_name = self.view.file_name()
        if not file_name:
            return

        file_dir = path_dirname(file_name)
        result = fn_execute(["smartcomments","-g", "-t", file_dir])
示例#10
0
    def run(self):
        print(' creating {}'.format(versionfile_source))
        # noinspection PyTypeChecker
        with open(versionfile_source, 'w') as file_:
            assert VCS is not None, 'please set versioneer.VCS'
            LONG = LONG_VERSION_PY[VCS]
            file_.write(
                LONG % {
                    'DOLLAR': '$',
                    'TAG_PREFIX': tag_prefix,
                    'PARENTDIR_PREFIX': parentdir_prefix,
                    'VERSIONFILE_SOURCE': versionfile_source,
                })

        # noinspection PyTypeChecker
        ipy = path_join(path_dirname(versionfile_source), '__init__.py')
        try:
            with open(ipy, 'r') as file_:
                old = file_.read()
        except EnvironmentError:
            old = ''
        if INIT_PY_SNIPPET not in old:
            print(' appending to {}'.format(ipy))
            with open(ipy, 'a') as file_:
                file_.write(INIT_PY_SNIPPET)
        else:
            print(' {} unmodified'.format(ipy))

        # Make sure both the top-level 'versioneer.py' and versionfile_source (PKG/_version.py, used by runtime code) are in
        # MANIFEST.in, so they'll be copied into source distributions. Pip won't be able to install the package without this.
        manifest_in = path_join(get_root(), 'MANIFEST.in')
        simple_includes = set()
        try:
            with open(manifest_in, 'r') as file_:
                for line in file_:
                    if line.startswith('include '):
                        for include in line.split()[1:]:
                            simple_includes.add(include)
        except EnvironmentError:
            pass
        # That doesn't cover everything MANIFEST.in can do (http://docs.python.org/2/distutils/sourcedist.html#commands), so it
        # might give some false negatives. Appending redundant 'include' lines is safe, though.
        if 'versioneer.py' not in simple_includes:
            print(' appending <versioneer.py> to MANIFEST.in')
            with open(manifest_in, 'a') as file_:
                file_.write('include versioneer.py\n')
        else:
            print(' <versioneer.py> already in MANIFEST.in')
        if versionfile_source not in simple_includes:
            print(' appending versionfile_source: <{}> to MANIFEST.in'.format(
                versionfile_source))
            with open(manifest_in, 'a') as file_:
                file_.write('include {}\n'.format(versionfile_source))
        else:
            print(' versionfile_source already in MANIFEST.in')

        # Make VCS-specific changes. For git, this means creating/changing `.gitattributes` to mark _version.py for export-time
        # keyword substitution.
        do_vcs_install(manifest_in, versionfile_source, ipy)
    def run(self, edit):
        file_name = self.view.file_name()
        if not file_name:
            return

        file_dir = path_dirname(file_name)
        result = fn_execute([get_command(), "-g","-t", file_dir])
        print(result)
示例#12
0
   def run(self):
      print(' creating {}'.format(versionfile_source))
      # noinspection PyTypeChecker
      with open(versionfile_source, 'w') as file_:
         assert VCS is not None, 'please set versioneer.VCS'
         LONG = LONG_VERSION_PY[VCS]
         file_.write(LONG % {
            'DOLLAR': '$',
            'TAG_PREFIX': tag_prefix,
            'PARENTDIR_PREFIX': parentdir_prefix,
            'VERSIONFILE_SOURCE': versionfile_source,
         })

      # noinspection PyTypeChecker
      ipy = path_join(path_dirname(versionfile_source), '__init__.py')
      try:
         with open(ipy, 'r') as file_:
            old = file_.read()
      except EnvironmentError:
         old = ''
      if INIT_PY_SNIPPET not in old:
         print(' appending to {}'.format(ipy))
         with open(ipy, 'a') as file_:
            file_.write(INIT_PY_SNIPPET)
      else:
         print(' {} unmodified'.format(ipy))

      # Make sure both the top-level 'versioneer.py' and versionfile_source (PKG/_version.py, used by runtime code) are in
      # MANIFEST.in, so they'll be copied into source distributions. Pip won't be able to install the package without this.
      manifest_in = path_join(get_root(), 'MANIFEST.in')
      simple_includes = set()
      try:
         with open(manifest_in, 'r') as file_:
            for line in file_:
               if line.startswith('include '):
                  for include in line.split()[1:]:
                     simple_includes.add(include)
      except EnvironmentError:
         pass
      # That doesn't cover everything MANIFEST.in can do (http://docs.python.org/2/distutils/sourcedist.html#commands), so it
      # might give some false negatives. Appending redundant 'include' lines is safe, though.
      if 'versioneer.py' not in simple_includes:
         print(' appending <versioneer.py> to MANIFEST.in')
         with open(manifest_in, 'a') as file_:
            file_.write('include versioneer.py\n')
      else:
         print(' <versioneer.py> already in MANIFEST.in')
      if versionfile_source not in simple_includes:
         print(' appending versionfile_source: <{}> to MANIFEST.in'.format(versionfile_source))
         with open(manifest_in, 'a') as file_:
            file_.write('include {}\n'.format(versionfile_source))
      else:
         print(' versionfile_source already in MANIFEST.in')

      # Make VCS-specific changes. For git, this means creating/changing `.gitattributes` to mark _version.py for export-time
      # keyword substitution.
      do_vcs_install(manifest_in, versionfile_source, ipy)
示例#13
0
def get_long_description():
    with open(path_join(path_dirname(__file__), 'README.md'), 'rb') as fp:
        long_desc = fp.read().decode('utf-8')

    long_desc = long_desc.replace(
        'docs/API文档.md',
        'https://github.com/JoinQuant/jqfactor_analyzer/blob/master/docs/API%E6%96%87%E6%A1%A3.md'
    )

    return long_desc
示例#14
0
def create_rel_symlink(existing_file_path, link_path):
    if os.path.islink(link_path) and \
            realpath(os.path.readlink(link_path)) == realpath(existing_file_path):
        return  # Nothing to do.

    try:
        os.symlink(os.path.relpath(existing_file_path, path_dirname(link_path)), link_path)
    except OSError, e:
        logging.warning("Error while trying to create a symlink {} -> {}".format(
            link_path, existing_file_path))
        raise e
示例#15
0
文件: setup.py 项目: jnpkrn/clufter
 def _pkg_prepare_build(self):
     package_data = self.package_data or {}
     for pkg_name in package_data:
         dst_top = self.distribution.package_dir.get('', '')
         dst_pkg = path_join(
                       dst_top,
                       self.distribution.package_dir.get(pkg_name, pkg_name)
         )
         if DEBUG: print(DBGPFX + "\tbuild dst_pkg %s" % dst_pkg)
         for filedef in package_data[pkg_name]:
             self._pkg_prepare_file(
                 self.pkg_params[filedef['src']],
                 path_join(dst_pkg, self.pkg_params[filedef['dst']]),
                 filedef.get('substitute', False)
             )
             self.distribution.package_data[pkg_name].append(
                 self.pkg_params[filedef['dst']]
             )
     for filedef in (self.data_files + self.buildonly_files):
         src = self.pkg_params[filedef['src']]
         src_basename = path_basename(src)
         dst_basename = path_basename(self.pkg_params[filedef['dst']])
         substitute = filedef.get('substitute', False)
         if all(c not in src_basename for c in '?*'):
             if src_basename != dst_basename or substitute:
                 self._pkg_prepare_file(
                     self.pkg_params[filedef['src']],
                     path_join(
                         path_dirname(self.pkg_params[filedef['src']]),
                         dst_basename
                     ),
                     substitute
                 )
             # eliminate sources from which we prepared files so they
             # will not end up at build dir and, in turn, installed;
             # consider only one-level of package at maximum
             hd, tl = (lambda s, r='': (s, r))(*src.split(sep, 1))
             if not tl:
                 hd, tl = '', filedef['src']
             try:
                 self.distribution.package_data.get(hd, []).remove(tl)
             except ValueError:
                 pass
         else:
             assert not substitute
示例#16
0
文件: cozyfs.py 项目: petergtz/cozy
    def rename(self, old_path, new_path):
        self.log.debug("PARAMS: old_path = '%s', new_path = '%s'", old_path, new_path)
        self.__assert_readwrite()


        node = self.nodes.node_from_path(old_path)
        new_parent_node = self.nodes.node_from_path(path_dirname(new_path))

        os.rename(self.__plain_path(old_path), self.__plain_path(new_path))
        inode = self.inodes.inode_from_inode_number(node.inode_number)
        self.chunks.update_plain_path(inode['data'], new_path.lstrip('/'))

        node.parent_node_id = new_parent_node.node_id
        node.path = new_path

        self.nodes.update_node(node, old_path)

        ctime = time.time()
        self.inodes.update_inode_in_place(node.inode_number, {'atime': ctime, 'mtime':ctime})

        self.storage.commit()
        return 0
示例#17
0
文件: setup.py 项目: jmartign/clufter
 def _pkg_prepare_build(self):
     for pkg_name, filedefs in (self.package_data or {}).iteritems():
         dst_top = self.distribution.package_dir.get('', '')
         dst_pkg = path_join(
             dst_top,
             self.distribution.package_dir.get(pkg_name, pkg_name))
         if DEBUG: print(DBGPFX + "\tbuild dst_pkg %s" % dst_pkg)
         for filedef in filedefs:
             self._pkg_prepare_file(
                 self.pkg_params[filedef['src']],
                 path_join(dst_pkg, self.pkg_params[filedef['dst']]),
                 filedef.get('substitute', False))
             self.distribution.package_data[pkg_name].append(
                 self.pkg_params[filedef['dst']])
     for filedef in (self.data_files + self.buildonly_files):
         src = self.pkg_params[filedef['src']]
         src_basename = path_basename(src)
         dst_basename = path_basename(self.pkg_params[filedef['dst']])
         substitute = filedef.get('substitute', False)
         if all(c not in src_basename for c in '?*'):
             if src_basename != dst_basename or substitute:
                 self._pkg_prepare_file(
                     self.pkg_params[filedef['src']],
                     path_join(
                         path_dirname(self.pkg_params[filedef['src']]),
                         dst_basename), substitute)
             # eliminate sources from which we prepared files so they
             # will not end up at build dir and, in turn, installed;
             # consider only one-level of package at maximum
             hd, tl = (lambda s, r='': (s, r))(*src.split(sep, 1))
             if not tl:
                 hd, tl = '', filedef['src']
             try:
                 self.distribution.package_data.get(hd, []).remove(tl)
             except ValueError:
                 pass
         else:
             assert not substitute
示例#18
0
import subprocess
import sys
import urlparse

from collections import deque, defaultdict
from os.path import basename as path_basename
from os.path import dirname as path_dirname
from os.path import realpath
from distutils.dir_util import mkpath
from six.moves import urllib

sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from yb.command_util import run_program, mkdir_p  # nopep8
from yb.linuxbrew import get_linuxbrew_dir  # nopep8

MODULE_DIR = path_dirname(realpath(__file__))
YB_SRC_ROOT = realpath(os.path.join(MODULE_DIR, '..', '..'))

# A resolved shared library dependency shown by ldd.
# Example (split across two lines):
#   libmaster.so => /home/mbautin/code/yugabyte/build/debug-gcc-dynamic/lib/libmaster.so
#   (0x00007f941fa5f000)
RESOLVED_DEP_RE = re.compile(r'^\s*(\S+)\s+=>\s+(\S.*\S)\s+[(]')

SYSTEM_LIBRARY_PATH_RE = re.compile(r'^/(usr|lib|lib64)/.*')
SYSTEM_LIBRARY_PATHS = [
    '/usr/lib',
    '/usr/lib64',
    '/lib',
    '/lib64',
    # This is used on Ubuntu
      file_content = file_content.replace(OrigTemplateName, NEW_PROJECT_NAME)

   if OrigTemplateName__lower_case in file_content:
      file_content = file_content.replace(OrigTemplateName__lower_case, NewProjectName__lower_case)

   if OrigTemplateOneLineDescription in file_content:
      file_content = file_content.replace(OrigTemplateOneLineDescription, NEW_PROJECT_ONE_LINE_DESCRIPTION)

   with open(file_path, 'w') as file_p:
      file_p.write(file_content)


# SECOND: replace File Names
for root, file_name in FileList:
   if OrigTemplateName in file_name:
      new_file_name = file_name.replace(OrigTemplateName, NEW_PROJECT_NAME)
      os_rename(path_join(root, file_name), path_join(root, new_file_name))


# THIRD: replace Dir Names
for root, dir_ in DirList:
   if dir_ == OrigTemplateName:
      os_rename(path_join(root, dir_), path_join(root, NEW_PROJECT_NAME))


# FINALLY: rename the Root folder
NewPathName = '{}/{}'.format(path_dirname(TEMPLATE_PyPROJECT_DIR_PATH), NEW_PROJECT_NAME)
os_rename(TEMPLATE_PyPROJECT_DIR_PATH, NewPathName)

print('\nFINISHED....\n')
示例#20
0
# Warning : do not import the distutils extension before setuptools It does break the cythonize function calls
# https://github.com/enthought/pyql/blob/master/setup.py
from Cython.Build import cythonize
from Cython.Compiler.Options import parse_directive_list

import versioneer

versioneer.VCS = 'git'
versioneer.versionfile_source = 'LCONF/_version.py'
versioneer.versionfile_build = 'LCONF/_version.py'
versioneer.tag_prefix = ''  # tags are like 1.1.0
versioneer.parentdir_prefix = 'LCONF-'  # path_dirname like 'LCONF-1.1.0'

_version = versioneer.get_version()

SCRIPT_PATH = path_dirname(
    path_abspath(inspect_getfile(inspect_currentframe())))
PACKAGE_NAME = 'LCONF'
ROOT_PACKAGE_PATH = path_join(path_dirname(SCRIPT_PATH), PACKAGE_NAME)
MAIN_PACKAGE_PATH = path_join(ROOT_PACKAGE_PATH, PACKAGE_NAME)

from LCONF import TESTED_HOST_OS

if sys_version_info[:2] < (3, 4) or 'linux' not in sys_platform:
    print(('''

      LCONF is only tested with Python 3.4.2rc1 or higher:\n  current python version: {0:d}.{1:d}\n\n

      TESTED_HOST_OS: {3:}
      '''.format(sys_version_info[:2][0], sys_version_info[:2][1],
                 TESTED_HOST_OS)))
示例#21
0
文件: setup.py 项目: jmartign/clufter
import py_compile
orig_py_compile = py_compile.compile


def doraise_py_compile(file, cfile=None, dfile=None, doraise=False):
    orig_py_compile(file, cfile=cfile, dfile=dfile, doraise=True)


py_compile.compile = doraise_py_compile

PREFER_GITHUB = True
PREFER_FORGE = 'github' if PREFER_GITHUB else 'pagure'  # alternatively: None
DEBUG = getenv("SETUPDEBUG")
DBGPFX = str(__file__)

here = path_abs(path_dirname(path_real(__file__)))
prev_cwd = getcwd()
chdir(
    here)  # make setup.py possess expected CWD + play better with pip install

#
# Custom machinery extending setuptools/distutils with mechanism
# for parameterization (mainly paths) and even content of these files
#

true_gen = lambda this: True
# XXX copy-paste of utils_func.py
bifilter = \
    lambda fnc, seq: \
        reduce(lambda acc, x: acc[int(not fnc(x))].append(x) or acc,
               seq, ([], []))
示例#22
0
    def package_binaries(self):
        """
        The main entry point to this class. Arranges binaries (executables and shared libraries),
        starting with the given set of "seed executables", in the destination directory so that
        the executables can find all of their dependencies.
        """

        # Breadth-first search queue.
        queue = deque()

        for seed_executable_glob in self.seed_executable_patterns:
            re_match = re.match(r'^build/latest/(.*)$', seed_executable_glob)
            if re_match:
                updated_glob = path_join(self.build_dir, re_match.group(1))
                logging.info(
                    "Automatically updating seed glob to be relative to build dir: {} -> {}".format(
                        seed_executable_glob, updated_glob))
                seed_executable_glob = updated_glob
            for seed_executable in glob.glob(seed_executable_glob):
                queue.append(self.get_node_by_path(seed_executable, is_executable=True))

        # Also package Linuxbrew's dynamic linker.
        ld_path = realpath(path_join(LINUXBREW_HOME, 'lib', 'ld.so'))
        queue.append(self.get_node_by_path(ld_path))
        queue.append(self.get_node_by_path(PATCHELF_PATH, is_executable=True))

        # This will be a set of GraphNode objects. GraphNode should have proper hashing and equality
        # overrides.
        visited = set()

        while len(queue) > 0:
            node = queue.popleft()
            if node not in visited:
                visited.add(node)
                for new_dep in node.deps:
                    target_node = self.get_node_by_path(new_dep.target)
                    new_dep.target_node = target_node
                    if target_node not in visited:
                        queue.append(target_node)
                        target_node.add_reverse_dependency(node)

        nodes = self.nodes_by_digest.values()

        unique_dir_names = set()
        need_short_digest = False
        for node in nodes:
            name = node.lib_link_dir_name_prefix()
            if name in unique_dir_names:
                logging.warn(
                        "Duplicate library dir name: '{}', will use SHA256 digest prefix to "
                        "ensure directory name uniqueness".format(name))
                need_short_digest = True
            unique_dir_names.add(name)

        mkpath(path_join(self.dest_dir, 'bin'))
        dest_lib_dir = path_join(self.dest_dir, 'lib')
        dest_bin_dir = path_join(self.dest_dir, 'bin')
        for node in nodes:
            target_dir_basename = node.lib_link_dir_name_prefix()
            if need_short_digest:
                # We need to add a digest to the directory name to disambiguate between libraries
                # with the same name/version, e.g. two versions of libz 1.2.8 that come in when
                # building using Clang. Eventually we need to deduplicate libraries and remove the
                # need of doing this.
                target_dir_basename += '_' + node.digest[:8]
            node.target_dir = path_join(dest_lib_dir, target_dir_basename)
            node_basename = path_basename(node.path)
            if node.is_executable:
                node.target_path = path_join(dest_bin_dir, node_basename)
            else:
                node.target_path = path_join(node.target_dir, node_basename)
            mkpath(node.target_dir)
            if os.path.exists(node.target_path):
                os.unlink(node.target_path)
            shutil.copyfile(node.path, node.target_path)
            shutil.copymode(node.path, node.target_path)
            # Make the file writable so we can change the rpath in the file.
            os.chmod(node.target_path, os.stat(node.target_path).st_mode | stat.S_IWUSR)

        # Symlink the dynamic linker into an easy-to-find location inside lib.
        ld_node = self.find_existing_node_by_path(ld_path)
        ld_symlink_path = path_join(dest_lib_dir, 'ld.so')
        create_rel_symlink(ld_node.target_path, ld_symlink_path)

        patchelf_node = self.find_existing_node_by_path(PATCHELF_PATH)

        for node in nodes:
            if node in [ld_node, patchelf_node]:
                continue

            # Create symlinks in each node's directory that point to all of its dependencies.
            for dep in sorted(set(node.deps).union(set(node.indirect_dependencies_via_dlopen()))):
                existing_file_path = dep.target_node.target_path
                link_path = path_join(node.target_dir, os.path.basename(dep.name))
                if os.path.islink(link_path):
                    os.unlink(link_path)
                if realpath(existing_file_path) != realpath(link_path):
                    create_rel_symlink(existing_file_path, link_path)

            # Update RPATH of this binary to point to the directory that has symlinks to all the
            # right versions libraries it needs.
            new_rpath = '$ORIGIN'
            # Compute the path of the "target directory" (the directory that RPATH needs to
            # point to) relative to the binary path. If the binary is an executable, it won't be
            # that directory, because it will be in "bin" instead.
            target_dir_rel_path = os.path.relpath(
                    node.target_dir, path_dirname(node.target_path))
            if target_dir_rel_path != '.':
                if target_dir_rel_path.startswith('./'):
                    target_dir_rel_path = target_dir_rel_path[2:]
                new_rpath += '/' + target_dir_rel_path

            logging.debug("Setting RPATH of '{}' to '{}'".format(node.target_path, new_rpath))

            patchelf_result = run_patchelf('--set-rpath', new_rpath, node.target_path)
            if node.is_executable and \
               patchelf_result.stderr == PATCHELF_NOT_AN_ELF_EXECUTABLE:
                logging.info("Not an ELF executable: '{}', removing the directory '{}'.".format(
                    node.path, node.target_dir))
                shutil.rmtree(node.target_dir)
                continue

            node_metadata = dict(
               original_location=normalize_path_for_metadata(node.path),
               dependencies=[
                   d.as_metadata() for d in
                   sorted(list(node.deps), key=lambda dep: dep.target)])

            with open(path_join(node.target_dir, 'metadata.yml'), 'w') as metadata_file:
                yaml.dump(node_metadata, metadata_file)

            # Use the Linuxbrew dynamic linker for all files.
            run_patchelf('--set-interpreter', ld_symlink_path, node.target_path)

        # Create symlinks from the "lib" directory to the "libdb" library. We add the "lib"
        # directory to LD_LIBRARY_PATH, and this makes sure dlopen finds correct library
        # dependencies when it is invoked by libsasl2.
        for node in nodes:
            if node.basename().startswith('libdb-'):
                create_rel_symlink(node.target_path, path_join(dest_lib_dir, node.basename()))
        logging.info("Successfully generated a YB distribution at {}".format(self.dest_dir))
示例#23
0
文件: setup.py 项目: jnpkrn/clufter
except NameError:
    basestring = str

# bail out if any code is not valid (http://stackoverflow.com/a/2240549)
import py_compile
orig_py_compile = py_compile.compile
def doraise_py_compile(file, cfile=None, dfile=None, doraise=False):
    orig_py_compile(file, cfile=cfile, dfile=dfile, doraise=True)
py_compile.compile = doraise_py_compile

PREFER_GITHUB = True
PREFER_FORGE = 'github' if PREFER_GITHUB else 'pagure'  # alternatively: None
DEBUG = getenv("SETUPDEBUG")
DBGPFX = str(__file__)

here = path_abs(path_dirname(path_real(__file__)))
prev_cwd = getcwd()
chdir(here)  # make setup.py possess expected CWD + play better with pip install

#
# Custom machinery extending setuptools/distutils with mechanism
# for parameterization (mainly paths) and even content of these files
#

true_gen = lambda this: True
# XXX copy-paste of utils_func.py
# from functools import reduce
#bifilter = \
#    lambda fnc, seq: \
#        reduce(lambda acc, x: acc[int(not fnc(x))].append(x) or acc,
#               seq, ([], []))
### 'os.path' Module ###
from os.path import(
    expanduser  as path_expand_user,
    dirname     as path_dirname,
    basename    as path_basename,
)

# Discover the path to a home directory.
# Performs Unix shell-like "tilde expansion". (Works on Windows too.)
print( path_expand_user( "~" ) )
# Useful for finding a user's configuration files.

# Find the directory name portion of a path.
# Like Unix 'dirname' command.
print( path_dirname( path_expand_user( "~" ) ) )
# Find the file name portion of a path.
# Like Unix 'basename' command, but doesn't filter extensions.
print( path_basename( path_join(
    path_curdir,
    "stdlib-demo" + path_extsep + "py"
) ) )

# <demo> --- stop ---

### 'os.path' Module ###
from os.path import(
    exists      as path_exists,
)

# Test for the existence of a file.
示例#25
0
# -*- coding: utf-8 -*-
from csv import reader as csv_reader
from os.path import join as path_join, dirname as path_dirname
from random import randrange as random_randrange
"""
Returns a tuple containing a header and a quote.

This code is placed in the public domain.
"""

quote_list = []
try:
    with open(path_join(path_dirname(__file__), 'futurama.csv')) as quote_file:
        for quote in csv_reader(quote_file):
            if len(quote) == 2:
                quote_list.append(quote)
except IOError:
    pass

if len(quote_list) == 0:
    quote_list.append(('Bender', "Well, we're boned!"))


def get_header():
    quote = quote_list[random_randrange(0, len(quote_list))]
    return ('X-%s' % quote[0].replace(' ', '-'), quote[1])


if __name__ == '__main__':
    print(get_header())
示例#26
0
def get_root():
    try:
        return path_dirname(path_abspath(__file__))
    except NameError:
        return path_dirname(path_abspath(sys_argv[0]))
示例#27
0
# -*- coding: utf-8 -*-
from csv import reader as csv_reader
from os.path import join as path_join, dirname as path_dirname
from random import randrange as random_randrange

"""
Returns a tuple containing a header and a quote.

This code is placed in the public domain.
"""

quote_list = []
try:
    for quote in csv_reader(open(path_join(path_dirname(__file__), 'futurama.csv'))):
        if len(quote) == 2:
            quote_list.append(quote)
except IOError:
    pass

if len(quote_list) == 0:
    quote_list.append(('Bender', "Well, we're boned!"))

def get_header():
    quote = quote_list[random_randrange(0,len(quote_list))]
    return ('X-%s' % quote[0].replace(' ', '-'), quote[1])

if __name__ == '__main__':
    print(get_header())

示例#28
0
# https://github.com/enthought/pyql/blob/master/setup.py
from Cython.Build import cythonize
from Cython.Compiler.Options import parse_directive_list

import versioneer


versioneer.VCS = 'git'
versioneer.versionfile_source = 'PySpeedIT/_version.py'
versioneer.versionfile_build = 'PySpeedIT/_version.py'
versioneer.tag_prefix = ''  # tags are like 1.1.0
versioneer.parentdir_prefix = 'PySpeedIT-'  # path_dirname like 'PySpeedIT-1.1.0'

_version = versioneer.get_version()

SCRIPT_PATH = path_dirname(path_abspath(inspect_getfile(inspect_currentframe())))
PACKAGE_NAME = 'PySpeedIT'
ROOT_PACKAGE_PATH = path_join(path_dirname(SCRIPT_PATH), PACKAGE_NAME)
MAIN_PACKAGE_PATH = path_join(ROOT_PACKAGE_PATH, PACKAGE_NAME)

from PySpeedIT import TESTED_HOST_OS

if sys_version_info[:2] < (3, 4) or 'linux' not in sys_platform:
   print('''

      PySpeedIT is only tested with Python 3.4.2rc1 or higher:\n  current python version: {0:d}.{1:d}\n\n

      TESTED_HOST_OS: {3:}
      '''.format(sys_version_info[:2][0], sys_version_info[:2][1], TESTED_HOST_OS))

# check some untested options
示例#29
0
Created on Jan 12, 2014

C:/Users/Administrator/Documents/Programming/PythonSource/source/mycompiler/pyqtui/test2.py
@author: Nathan S.

Rebuild default config file settings from hard-coded defaults.
Provide access to hard-coded defaults if desired.
"""

import json
from itertools import chain
from os.path import dirname as path_dirname, exists as path_exists
from collections import OrderedDict

jsonfile = '/'.join((path_dirname(__file__), 'uicfg.json'))
JSON_INDENT = 4


def default_cflags():
    """
    @return: mapping of cflags categories to list of specific flags
    @rtype: dict
    """

    standard = ["-Wall", '-Wextra', '-std=c11']
    warnings = ["-Wall", '-Werror', '-Wextra', '-Wfatal-errors', '-Wpedantic', '-Wshadow', '-Wpointer-arith',
                '-Wcast-qual', '-Wmissing-prototypes', '-Wstrict-prototypes', '-Wuninitialized', '-Wstrict-aliasing',
                '-Wcast-align', '-Wformat=2', '-Wmissing-declarations']
    language = ['-std=c11', '-std=c99', '-std=c90', '-ansi']
    optimization = ["-O1", "-O2", "-O3"]
示例#30
0
from os.path import (
   abspath as path_abspath,
   dirname as path_dirname,
   join as path_join,
)
from sys import path as sys_path

# optional yaml for some example: needs pyyaml installed
try:
   # noinspection PyUnresolvedReferences
   from yaml import dump as yaml_dump
   has_yaml = True
except ImportError:
   has_yaml = False

SCRIPT_PATH = path_dirname(path_abspath(inspect_getfile(inspect_currentframe())))
PROJECT_ROOT = path_dirname(SCRIPT_PATH)

ROOT_PACKAGE_NAME = 'LCONF'
ROOT_PACKAGE_PATH = path_join(PROJECT_ROOT, ROOT_PACKAGE_NAME)

sys_path.insert(0, PROJECT_ROOT)

from LCONF.lconf_structure_classes import (
   Blk,
   BlkI,
   KVList,
   KVMap,
   Root,
   ListOT,
)
示例#31
0
文件: setup.py 项目: jmartign/clufter
 def finalize_options(self):
     build_ext.finalize_options(self)
     self.extensions = self.binaries
     self.build_lib = path_dirname(self.build_lib)
示例#32
0
文件: setup.py 项目: jnpkrn/clufter
 def finalize_options(self):
     build_ext.finalize_options(self)
     self.extensions = self.binaries
     self.build_lib = path_dirname(self.build_lib)
示例#33
0
def get_psphinxtheme_root_dir():
   """ :return: (str) path of the *P-SphinxTheme* root dir
   """
   return path_dirname(path_abspath(inspect_getfile(inspect_currentframe())))
示例#34
0
def find_elf_dependencies(elf_file_path):
    """
    Run ldd on the given ELF file and find libraries that it depends on. Also run patchelf and get
    the dynamic linker used by the file.

    @param elf_file_path: ELF file (executable/library) path
    """

    elf_file_path = realpath(elf_file_path)
    if elf_file_path.startswith('/usr/') or elf_file_path.startswith('/lib64/'):
        ldd_path = '/usr/bin/ldd'
    else:
        ldd_path = LINUXBREW_LDD_PATH

    ldd_result = run_program([ldd_path, elf_file_path], error_ok=True)
    dependencies = set()
    if ldd_result.returncode != 0:
        # Interestingly, the below error message is printed to stdout, not stderr.
        if ldd_result.stdout == 'not a dynamic executable':
            logging.debug(
                "Not a dynamic executable: {}, ignoring dependency tracking".format(elf_file_path))
            return dependencies
        raise RuntimeError(ldd_result.error_msg)

    for ldd_output_line in ldd_result.stdout.split("\n"):
        m = RESOLVED_DEP_RE.match(ldd_output_line)
        if m:
            lib_name = m.group(1)
            lib_resolved_path = realpath(m.group(2))
            dependencies.add(Dependency(lib_name, lib_resolved_path))

        tokens = ldd_output_line.split()
        if len(tokens) >= 4 and tokens[1:4] == ['=>', 'not', 'found']:
            missing_lib_name = tokens[0]
            raise RuntimeError("Library not found for '{}': {}".format(
                elf_file_path, missing_lib_name))

        # If we matched neither RESOLVED_DEP_RE or the "not found" case, that is still fine,
        # e.g. there could be a line of the following form in the ldd output:
        #   linux-vdso.so.1 =>  (0x00007ffc0f9d2000)

    elf_basename = path_basename(elf_file_path)
    elf_dirname = path_dirname(elf_file_path)
    if elf_basename.startswith('libsasl2.'):
        # TODO: don't package Berkeley DB with the product -- it has an AGPL license.
        for libdb_so_name in ['libdb.so', 'libdb-5.so']:
            libdb_so_path = path_join(path_dirname(elf_file_path), 'libdb.so')
            if os.path.exists(libdb_so_path):
                dependencies.add(Dependency('libdb.so', libdb_so_path, via_dlopen=True))
        sasl_plugin_dir = '/usr/lib64/sasl2'
        for sasl_lib_name in os.listdir(sasl_plugin_dir):
            if sasl_lib_name.endswith('.so'):
                dependencies.add(
                        Dependency(sasl_lib_name,
                                   realpath(path_join(sasl_plugin_dir, sasl_lib_name))))

    if elf_basename.startswith('libc-'):
        # glibc loads a lot of libns_... libraries using dlopen.
        for libnss_lib in glob.glob(os.path.join(elf_dirname, 'libnss_*')):
            if re.search(r'([.]so|\d+)$', libnss_lib):
                dependencies.add(Dependency(path_basename(libnss_lib),
                                            libnss_lib, via_dlopen=True))

    return dependencies
示例#35
0
#! /usr/bin/env python
# -*- coding: utf-8 -*-

# Copyright (c) Huoty, All rights reserved
# Author: Huoty <*****@*****.**>

from __future__ import print_function

import os
from setuptools import setup
from os.path import join as path_join, dirname as path_dirname


CURRDIR = path_dirname(__file__)

setup_args = dict(
    name='kmailbox',
    version='0.0.1',
    py_modules=["kmailbox"],
    author='Huoty',
    author_email='*****@*****.**',
    maintainer="Huoty",
    maintainer_email="*****@*****.**",
    description="Python email utils",
    url="https://github.com/kuanghy/kmailbox",
    keywords=["email", "mailbox", "smtp", "imap", "sendmail"],
    zip_safe=False,
    license='Apache License v2',
    python_requires='>=2.7',
    classifiers=[
        'Programming Language :: Python',
示例#36
0
def get_root():
   try:
      return path_dirname(path_abspath(__file__))
   except NameError:
      return path_dirname(path_abspath(sys_argv[0]))
示例#37
0
def load_cfg():
    cur_dir = path_dirname(__file__)
    cfg_file = '/'.join((cur_dir, 'uicfg.json'))
    with open(cfg_file, 'r') as f:
        cfg_ops = json.load(f)
    return cfg_ops
示例#38
0
def main():
    # pylint: disable=E1103

    options = _check_options()

    locale.setlocale(locale.LC_ALL, '')

    verbose = options.verbose

    if verbose:
        logging.disable(logging.INFO)
    else:
        logging.disable(logging.WARNING)

    _add_missing_mime_types()

    try:
        game = Game(game_list=None,
                    game_path=path_abspath(path_dirname(options.input)),
                    slug=None,
                    games_root=options.cache,
                    deploy_enable=True,
                    manifest_name=path_basename(options.input))

        _check_game(game)

        silent = options.silent
        if not silent:
            log('Deploying "%s" to "%s".' % (game.slug, options.hub))

        connection = connection_from_url(options.hub, maxsize=8, timeout=8.0)

        cookie = login(connection, options)

        (project, projectversion,
         projectversion_title) = _check_project(connection, options, cookie)

        result = 0

        deploy_info = None
        deploy_thread = None

        try:
            deploy_info = Deployment(game, connection, project, projectversion,
                                     projectversion_title,
                                     _get_cookie_value(cookie), options.cache)

            deploy_thread = Thread(target=deploy_info.deploy,
                                   args=[options.ultra])
            deploy_thread.start()

            start_time = time()

            result = _progress(deploy_info, silent, verbose)
            if (0 == result):
                result = _postupload_progress(deploy_info, connection, cookie,
                                              silent, verbose)
                if (0 == result):
                    if not silent:
                        log('Deployment time: %s' % _fmt_time(
                            (time() - start_time)))
                    game.set_deployed()

        except KeyboardInterrupt:
            warning('Program stopped by user!')
            if deploy_info:
                deploy_info.cancel()
            result = -1

        except Exception as e:
            error(str(e))
            if deploy_info:
                deploy_info.cancel()
            result = -1

        if deploy_info:
            del deploy_info

        if deploy_thread:
            del deploy_thread

        logout(connection, cookie)

        return result

    except GameError:
        return -1
示例#39
0
def main():
    # pylint: disable=E1103

    options = _check_options()

    locale.setlocale(locale.LC_ALL, '')

    verbose = options.verbose

    if verbose:
        logging.disable(logging.INFO)
    else:
        logging.disable(logging.WARNING)

    _add_missing_mime_types()

    try:
        game = Game(game_list=None,
                    game_path=path_abspath(path_dirname(options.input)),
                    slug=None,
                    games_root=options.cache,
                    deploy_enable=True,
                    manifest_name=path_basename(options.input))

        _check_game(game)

        silent = options.silent
        if not silent:
            log('Deploying "%s" to "%s".' % (game.slug, options.hub))

        connection = connection_from_url(options.hub, maxsize=8, timeout=8.0)

        cookie = login(connection, options)

        (project, projectversion, projectversion_title) = _check_project(connection, options, cookie)

        result = 0

        deploy_info = None
        deploy_thread = None

        try:
            deploy_info = Deployment(game,
                                     connection,
                                     project,
                                     projectversion,
                                     projectversion_title,
                                     _get_cookie_value(cookie),
                                     options.cache)

            deploy_thread = Thread(target=deploy_info.deploy, args=[options.ultra])
            deploy_thread.start()

            start_time = clock()

            result = _progress(deploy_info, silent, verbose)
            if (0 == result):
                result = _postupload_progress(deploy_info, connection, cookie, silent, verbose)
                if (0 == result):
                    if not silent:
                        log('Deployment time: %s' % _fmt_time((clock() - start_time)))
                    game.set_deployed()

        except KeyboardInterrupt:
            warning('Program stopped by user!')
            if deploy_info:
                deploy_info.cancel()
            result = -1

        except Exception as e:
            error(str(e))
            if deploy_info:
                deploy_info.cancel()
            result = -1

        if deploy_info:
            del deploy_info

        if deploy_thread:
            del deploy_thread

        logout(connection, cookie)

        return result

    except GameError:
        return -1
示例#40
0
"""
from datetime import datetime
from inspect import (
    getfile as inspect_getfile,
    currentframe as inspect_currentframe,
)
from os.path import (
    abspath as path_abspath,
    dirname as path_dirname,
    join as path_join,
)
from sys import path as sys_path

from nose.tools import (eq_, ok_)

SCRIPT_PATH = path_dirname(
    path_abspath(inspect_getfile(inspect_currentframe())))
PROJECT_ROOT = path_dirname(SCRIPT_PATH)

ROOT_PACKAGE_NAME = 'LCONF'
ROOT_PACKAGE_PATH = path_join(PROJECT_ROOT, ROOT_PACKAGE_NAME)

sys_path.insert(0, PROJECT_ROOT)

from LCONF.lconf_classes import (
    LconfBlk,
    LconfBlkI,
    LconfKVList,
    LconfKVMap,
    LconfRoot,
    LconfListOT,
)
示例#41
0
ap.add_argument("-d", "--dilation", type=int, default=2, help="dilation")
ap.add_argument("-f", "--file", type=str, default=0, help="file")
ap.add_argument("-w", "--wait", type=int, default=300, help="ms to wait")
ap.add_argument("-l", "--log", type=int, default=20, help="log level")
ap.add_argument("-m",
                "--min-sample",
                type=int,
                default=5,
                help="minimum sample")
args = vars(ap.parse_args())
camera = cv2.VideoCapture(args["file"])

logging.basicConfig(level=args["log"])

logging.info("Loading {}".format(
    path_dirname(motion_gesture.__file__) +
    "haarcascade_frontalface_default.xml"))
face_cascade = cv2.CascadeClassifier(
    path_dirname(motion_gesture.__file__) +
    "/haarcascade_frontalface_default.xml")


def main():
    previousFrame = None
    previous_x, previous_y = -1, -1
    previous_dx, previous_dy = 0, 0
    largest_area_x = 0
    largest_area_y = 0

    direction_log = []
    last_movement = time.time() * 1000