Esempio n. 1
0
def generate_csw_connections_file():
    """generate a CSW connections file from a flat file of CSW URLs"""

    filename = options.get("filename", False)

    if not filename:
        raise ValueError("path to file of CSW URLs required")

    conns = etree.Element("qgsCSWConnections")
    conns.attrib["version"] = "1.0"

    with open(filename) as connsfh:
        for line in connsfh:
            url = line.strip()
            if not url:  # blank line
                continue
            try:
                csw = CatalogueServiceWeb(url)
                title = unicode(csw.identification.title)
                etree.SubElement(conns, "csw", name=title, url=url)
            except Exception as err:
                error("ERROR on CSW %s: %s", url, err)

    with open("%s.xml" % filename, "w") as connsxmlfh:
        connsxmlfh.write(etree.tostring(conns, encoding="utf-8"))
Esempio n. 2
0
def read_requirements(*filenames):
    """
    Read PIP "requirements*.txt" files.
    These files contains python package requirements.

    :param filenames:   List of requirement files to read.
    :returns: List of packages/package requirements (list-of-strings).
    """
    package_requirements = []
    for filename in filenames:
        if not os.path.exists(filename):
            error("REQUIREMENT-FILE %s not found" % filename)
            continue
        # -- NORMAL CASE:
        with open(filename, "r") as f:
            requirements = pkg_resources.parse_requirements(f.read())
            package_requirements.extend(requirements)
#        # -- NORMAL CASE:
#        requirements_file = open(filename, "r")
#        for line in requirements_file.readlines():
#            line = line.strip()
#            if not line or line.startswith("#"):
#                continue    #< SKIP: EMPTY-LINE or COMMENT-LINE
#            package_requirements.append(line)
#        requirements_file.close()
    return package_requirements
Esempio n. 3
0
def generate_csw_connections_file():
    """generate a CSW connections file from a flat file of CSW URLs"""

    filename = options.get('filename', False)

    if not filename:
        raise ValueError('path to file of CSW URLs required')

    conns = etree.Element('qgsCSWConnections')
    conns.attrib['version'] = '1.0'

    with open(filename) as connsfh:
        for line in connsfh:
            url = line.strip()
            if not url:  # blank line
                continue
            try:
                csw = CatalogueServiceWeb(url)
                title = unicode(csw.identification.title)
                conn = etree.SubElement(conns, 'csw', name=title, url=url)
            except Exception as err:
                error('ERROR on CSW %s: %s', url, err)

    with open('%s.xml' % filename, 'w') as connsxmlfh:
        connsxmlfh.write(etree.tostring(conns, encoding='utf-8'))
Esempio n. 4
0
def pychecker(args):
    """Run pychecker on sources."""
    if not args:
        args = options.pychecker.default_args.split()

    # -- COLLECT: command options, files
    problematic = []
    cmdopts = []
    files   = []
    for arg in args:
        path_ = path(arg)
        if arg.startswith("-"):
            cmdopts.append(arg)
        elif path_.isdir():
            files.extend(path_.walkfiles("*.py"))
        elif arg.endswith(".py") and path_.exists():
            files.append(arg)
        else:
            error("UNKNOWN FILE: {0}".format(arg))
            problematic.append(arg)

    # -- EXECUTE:
    cmdopts = " ".join(cmdopts)
    for file_ in files:
        try:
            sh("pychecker {opts} {file}".format(opts=cmdopts, file=file_))
        except Exception, e:
            error("FAILURE: {0}".format(e))
            problematic.append(file_)
Esempio n. 5
0
def generate_csw_connections_file():
    """generate a CSW connections file from a flat file of CSW URLs"""

    filename = options.get('filename', False)

    if not filename:
        raise ValueError('path to file of CSW URLs required')

    conns = etree.Element('qgsCSWConnections')
    conns.attrib['version'] = '1.0'

    with open(filename) as connsfh:
        for line in connsfh:
            url = line.strip()
            if not url:  # blank line
                continue
            try:
                csw = CatalogueServiceWeb(url)
                title = str(csw.identification.title)
                etree.SubElement(conns, 'csw', name=title, url=url)
            except Exception as err:
                error('ERROR on CSW %s: %s', url, err)

    with open('%s.xml' % filename, 'w') as connsxmlfh:
        connsxmlfh.write(etree.tostring(conns, encoding='utf-8'))
Esempio n. 6
0
def migrate(args):
    """Run South's migrate command.
    """
    try:
        import south
        cmd = args and 'migrate %s' % ' '.join(options.args) or 'migrate'
        call_manage(cmd)
    except ImportError:
        error('Could not import south.')
Esempio n. 7
0
def schema(args):
    """Run South's schemamigration command.
    """
    try:
        import south
        cmd = args and 'schemamigration %s' % ' '.join(options.args) or 'schemamigration'
        call_manage(cmd)
    except ImportError:
        error('Could not import south.')
Esempio n. 8
0
 def _set_pid(self, pid):
     if pid is None:
         try:
             os.unlink(self.pid_file)
         except OSError as e:
             error('Cannot remove pid file %s: %s', self.pid_file, e)
     else:
         with open(self.pid_file, 'w') as pid_file:
             pid_file.write(str(pid))
Esempio n. 9
0
def upload_s3(file_path, bucket_name, file_key, force=False, acl='private'):
    """Upload a local file to S3.
    """
    file_path = path(file_path)
    bucket = open_s3(bucket_name)

    if file_path.isdir():
        # Upload the contents of the dir path.
        paths = file_path.listdir()
        paths_keys = zip(paths, ['%s/%s' % (file_key, p.name) for p in paths])
    else:
        # Upload just the given file path.
        paths_keys = [(file_path, file_key)]

    for p, k in paths_keys:
        headers = {}
        s3_key = bucket.get_key(k)
        if not s3_key:
            from boto.s3.key import Key
            s3_key = Key(bucket, k)

        content_type = mimetypes.guess_type(p)[0]
        if content_type:
            headers['Content-Type'] = content_type
        file_size = p.stat().st_size
        file_data = p.bytes()
        file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(
            hashlib.md5(file_data).hexdigest())

        # Check the hash.
        if s3_key.etag:
            s3_md5 = s3_key.etag.replace('"', '')
            if s3_md5 == file_md5:
                info('Hash is the same. Skipping %s' % file_path)
                continue
            elif not force:
                # Check if file on S3 is older than local file.
                s3_datetime = datetime.datetime(*time.strptime(
                    s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
                local_datetime = datetime.datetime.utcfromtimestamp(
                    p.stat().st_mtime)
                if local_datetime < s3_datetime:
                    info("File %s hasn't been modified since last " \
                         "being uploaded" % (file_key))
                    continue
        # File is newer, let's process and upload
        info("Uploading %s..." % (file_key))

        try:
            s3_key.set_contents_from_string(file_data,
                                            headers,
                                            policy=acl,
                                            replace=True,
                                            md5=(file_md5, file_md5_64))
        except Exception as e:
            error("Failed: %s" % e)
            raise
Esempio n. 10
0
def migrate(args):
    """Run South's migrate command.
    """
    try:
        import south
        cmd = args and 'migrate %s' % ' '.join(options.args) or 'migrate'
        call_manage(cmd)
    except ImportError:
        error('Could not import south.')
Esempio n. 11
0
File: django.py Progetto: eykd/paved
def migrate(args):
    """Run South's migrate command.
    """
    try:
        import south

        cmd = args and "migrate %s" % " ".join(options.args) or "migrate"
        call_manage(cmd)
    except ImportError:
        error("Could not import south.")
Esempio n. 12
0
File: django.py Progetto: eykd/paved
def schema(args):
    """Run South's schemamigration command.
    """
    try:
        import south

        cmd = args and "schemamigration %s" % " ".join(options.args) or "schemamigration"
        call_manage(cmd)
    except ImportError:
        error("Could not import south.")
Esempio n. 13
0
def schema(args):
    """Run South's schemamigration command.
    """
    try:
        import south
        cmd = args and 'schemamigration %s' % ' '.join(
            options.args) or 'schemamigration'
        call_manage(cmd)
    except ImportError:
        error('Could not import south.')
def min_version(min_version):
    """
    Utility function to ensure that a minimal paver version is used.
    Aborts paver execution if expectation is not met.

    :param min_version: Minimum paver version that is required (as string).
    """
    if not (paver_version >= min_version):
        error("REQUIRE: paver >= %s (actually: %s)" % (min_version, paver_version))
        error("ABORT: Here.")
        sys.exit(1)
Esempio n. 15
0
def sphinx_daemon_start():
    """Start the search daemon."""
    path(options.sphinx.log_path).makedirs()
    if is_sphinx_daemon_running():
        error('Search daemon is already running.')
    else:
        command = 'searchd --config %s' % options.sphinx.config
        if sys.platform == 'win32':
            command = 'start /B ' + command
        sh(command)
        time.sleep(2)
Esempio n. 16
0
 def link(self):
     """Creates a link in the system"""
     try:
         self.local_service_path.symlink(self.system_service_path)
     except OSError:
         error('Cannot write %s, sudo maybe?', self.system_service_path)
         info('# ln -s {target} {link}'.format(
             target=self.local_service_path,
             link=self.system_service_path,
         ))
     else:
         self._reload()
Esempio n. 17
0
def min_version(min_version):
    """
    Utility function to ensure that a minimal paver version is used.
    Aborts paver execution if expectation is not met.

    :param min_version: Minimum paver version that is required (as string).
    """
    if not (paver_version >= min_version):
        error("REQUIRE: paver >= %s (actually: %s)" %
              (min_version, paver_version))
        error("ABORT: Here.")
        sys.exit(1)
Esempio n. 18
0
File: s3.py Progetto: ponty/paved
def upload_s3(file_path, bucket_name, file_key, force=False, acl='private'):
    """Upload a local file to S3.
    """
    file_path = path(file_path)
    bucket = open_s3(bucket_name)

    if file_path.isdir():
        # Upload the contents of the dir path.
        paths = file_path.listdir()
        paths_keys = zip(paths, ['%s/%s' % (file_key, p.name) for p in paths])
    else:
        # Upload just the given file path.
        paths_keys = [(file_path, file_key)]

    for p, k in paths_keys:
        headers = {}
        s3_key = bucket.get_key(k)
        if not s3_key:
            from boto.s3.key import Key
            s3_key = Key(bucket, k)

        content_type = mimetypes.guess_type(p)[0]
        if content_type:
            headers['Content-Type'] = content_type
        file_size = p.stat().st_size
        file_data = p.bytes()
        file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(hashlib.md5(file_data).hexdigest())

        # Check the hash.
        if s3_key.etag:
            s3_md5 = s3_key.etag.replace('"', '')
            if s3_md5 == file_md5:
                info('Hash is the same. Skipping %s' % file_path)
                continue
            elif not force:
                # Check if file on S3 is older than local file.
                s3_datetime = datetime.datetime(*time.strptime(
                    s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
                local_datetime = datetime.datetime.utcfromtimestamp(p.stat().st_mtime)
                if local_datetime < s3_datetime:
                    info("File %s hasn't been modified since last " \
                         "being uploaded" % (file_key))
                    continue
        # File is newer, let's process and upload
        info("Uploading %s..." % (file_key))
        
        try:
            s3_key.set_contents_from_string(file_data, headers, policy=acl, replace=True, md5=(file_md5, file_md5_64))
        except Exception as e:
            error("Failed: %s" % e)
            raise
Esempio n. 19
0
    def runpipe():
        kwargs = {'shell': True, 'cwd': cwd, 'env': env}
        if capture:
            kwargs['stderr'] = subprocess.STDOUT
            kwargs['stdout'] = subprocess.PIPE
        p = subprocess.Popen(command, **kwargs)
        p_stdout = p.communicate()[0]
        if p_stdout is not None:
            p_stdout = p_stdout.decode(sys.getdefaultencoding(), 'ignore')
        if p.returncode and not ignore_error:
            if capture and p_stdout is not None:
                error(p_stdout)
            raise BuildFailure("Subprocess return code: %d" % p.returncode)

        if capture:
            return p_stdout
Esempio n. 20
0
    def runpipe():
        kwargs = {'shell': True, 'cwd': cwd}
        if capture:
            kwargs['stderr'] = subprocess.STDOUT
            kwargs['stdout'] = subprocess.PIPE
        p = subprocess.Popen(command, **kwargs)
        p_stdout = p.communicate()[0]
        if p_stdout is not None:
            p_stdout = p_stdout.decode(sys.getdefaultencoding())
        if p.returncode and not ignore_error:
            if capture and p_stdout is not None:
                error(p_stdout)
            raise BuildFailure("Subprocess return code: %d" % p.returncode)

        if capture:
            return p_stdout
Esempio n. 21
0
def download_s3(bucket_name, file_key, file_path, force=False):
    """Download a remote file from S3.
    """
    file_path = path(file_path)
    bucket = open_s3(bucket_name)

    file_dir = file_path.dirname()
    file_dir.makedirs()

    s3_key = bucket.get_key(file_key)
    if file_path.exists():
        file_data = file_path.bytes()
        file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(
            hashlib.md5(file_data).hexdigest())

        # Check the hash.
        try:
            s3_md5 = s3_key.etag.replace('"', '')
        except KeyError:
            pass
        else:
            if s3_md5 == file_md5:
                info('Hash is the same. Skipping %s' % file_path)
                return

            elif not force:
                # Check if file on S3 is older than local file.
                s3_datetime = datetime.datetime(*time.strptime(
                    s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
                local_datetime = datetime.datetime.utcfromtimestamp(
                    file_path.stat().st_mtime)
                if s3_datetime < local_datetime:
                    info("File at %s is less recent than the local version." %
                         (file_key))
                    return

    # If it is newer, let's process and upload
    info("Downloading %s..." % (file_key))

    try:
        with open(file_path, 'w') as fo:
            s3_key.get_contents_to_file(fo)
    except Exception as e:
        error("Failed: %s" % e)
        raise
Esempio n. 22
0
def venv_bin(name=None):
    """ Get the directory for virtualenv stubs, or a full executable path
        if C{name} is provided.
    """
    if not hasattr(sys, "real_prefix"):
        easy.error("ERROR: '%s' is not a virtualenv" % (sys.executable,))
        sys.exit(1)

    for bindir in ("bin", "Scripts"):
        bindir = os.path.join(sys.prefix, bindir)
        if os.path.exists(bindir):
            if name:
                return os.path.join(bindir, name + os.path.splitext(sys.executable)[1])
            else:
                return bindir
    else:
        easy.error("ERROR: Scripts directory not found in '%s'" % (sys.prefix,))
        sys.exit(1)
Esempio n. 23
0
def venv_bin(name=None):  # pylint: disable=inconsistent-return-statements
    """ Get the directory for virtualenv stubs, or a full executable path
        if C{name} is provided.
    """
    if not hasattr(sys, "real_prefix"):
        easy.error("ERROR: '%s' is not a virtualenv" % (sys.executable,))
        sys.exit(1)

    for bindir in ("bin", "Scripts"):
        bindir = os.path.join(sys.prefix, bindir)
        if os.path.exists(bindir):
            if name:
                bin_ext = os.path.splitext(sys.executable)[1] if sys.platform == 'win32' else ''
                return os.path.join(bindir, name + bin_ext)
            else:
                return bindir

    easy.error("ERROR: Scripts directory not found in '%s'" % (sys.prefix,))
    sys.exit(1)
Esempio n. 24
0
File: s3.py Progetto: ponty/paved
def download_s3(bucket_name, file_key, file_path, force=False):
    """Download a remote file from S3.
    """
    file_path = path(file_path)
    bucket = open_s3(bucket_name)

    file_dir = file_path.dirname()
    file_dir.makedirs()

    s3_key = bucket.get_key(file_key)
    if file_path.exists():
        file_data = file_path.bytes()
        file_md5, file_md5_64 = s3_key.get_md5_from_hexdigest(hashlib.md5(file_data).hexdigest())

        # Check the hash.
        try:
            s3_md5 = s3_key.etag.replace('"', '')
        except KeyError:
            pass
        else:
            if s3_md5 == file_md5:
                info('Hash is the same. Skipping %s' % file_path)
                return
                
            elif not force:
                # Check if file on S3 is older than local file.
                s3_datetime = datetime.datetime(*time.strptime(
                    s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
                local_datetime = datetime.datetime.utcfromtimestamp(file_path.stat().st_mtime)
                if s3_datetime < local_datetime:
                    info("File at %s is less recent than the local version." % (file_key))
                    return
        
    # If it is newer, let's process and upload
    info("Downloading %s..." % (file_key))
    
    try:
        with open(file_path, 'w') as fo:
            s3_key.get_contents_to_file(fo)
    except Exception as e:
        error("Failed: %s" % e)
        raise
Esempio n. 25
0
def venv_bin(name=None):  # pylint: disable=inconsistent-return-statements
    """ Get the directory for virtualenv stubs, or a full executable path
        if C{name} is provided.
    """
    if not hasattr(sys, "real_prefix"):
        easy.error("ERROR: '%s' is not a virtualenv" % (sys.executable, ))
        sys.exit(1)

    for bindir in ("bin", "Scripts"):
        bindir = os.path.join(sys.prefix, bindir)
        if os.path.exists(bindir):
            if name:
                bin_ext = os.path.splitext(
                    sys.executable)[1] if sys.platform == 'win32' else ''
                return os.path.join(bindir, name + bin_ext)
            else:
                return bindir

    easy.error("ERROR: Scripts directory not found in '%s'" % (sys.prefix, ))
    sys.exit(1)
Esempio n. 26
0
def pychecker(args):
    """Run pychecker on sources."""
    if not args:
        args = options.pychecker.default_args.split()

    # -- COLLECT: command options, files
    problematic = []
    cmdopts = []
    files   = []
    for arg in args:
        path_ = path(arg)
        if arg.startswith("-"):
            cmdopts.append(arg)
        elif path_.isdir():
            files.extend(path_.walkfiles("*.py"))
        elif arg.endswith(".py") and path_.exists():
            files.append(arg)
        else:
            error("UNKNOWN FILE: {0}".format(arg))
            problematic.append(arg)
    if path(".pycheckrc").exists():
        cmdopts.insert(0, "--config=.pycheckrc")

    # -- EXECUTE:
    cmdopts = " ".join(cmdopts)
    for file_ in files:
        try:
            sh("pychecker {opts} {file}".format(opts=cmdopts, file=file_))
        except Exception as e:
            error("FAILURE: {0}".format(e))
            problematic.append(file_)

    # -- SUMMARY:
    if problematic:
        errors = len(problematic)
        error("PYCHECKER FAILED: {0} error(s) occured.".format(errors))
        error("PROBLEMATIC:")
        for file_ in problematic:
            error("  - {0}".format(file_))
    else:
        info("PYCHECKER SUCCESS: {0} file(s).".format(len(files)))
Esempio n. 27
0
def upload():
    """upload package zipfile to server"""

    user = options.get('user', False)
    if not user:
        raise ValueError('OSGeo userid required')

    password = getpass.getpass('Enter your password: '******'':
        raise ValueError('password required')

    call_task('package')

    zipf = get_package_filename()

    url = 'http://%s:%s@%s:%d/%s' % (user, password, options.upload.host,
                                     options.upload.port,
                                     options.upload.endpoint)

    info('Uploading to http://%s/%s' %
         (options.upload.host, options.upload.endpoint))

    server = xmlrpclib.ServerProxy(url, verbose=False)

    try:
        with open(zipf) as zfile:
            plugin_id, version_id = \
                server.plugin.upload(xmlrpclib.Binary(zfile.read()))
            info('Plugin ID: %s', plugin_id)
            info('Version ID: %s', version_id)
    except xmlrpclib.Fault, err:
        error('ERROR: fault error')
        error('Fault code: %d', err.faultCode)
        error('Fault string: %s', err.faultString)
Esempio n. 28
0
def upload():
    """upload package zipfile to server"""

    user = options.get('user', False)
    if not user:
        raise ValueError('OSGeo userid required')

    password = getpass.getpass('Enter your password: '******'':
        raise ValueError('password required')

    call_task('package')

    zipf = get_package_filename()

    url = 'http://%s:%s@%s:%d/%s' % (user, password, options.upload.host,
                                     options.upload.port,
                                     options.upload.endpoint)

    info('Uploading to http://%s/%s' % (options.upload.host,
                                        options.upload.endpoint))

    server = xmlrpclib.ServerProxy(url, verbose=False)

    try:
        with open(zipf) as zfile:
            plugin_id, version_id = \
                server.plugin.upload(xmlrpclib.Binary(zfile.read()))
            info('Plugin ID: %s', plugin_id)
            info('Version ID: %s', version_id)
    except xmlrpclib.Fault, err:
        error('ERROR: fault error')
        error('Fault code: %d', err.faultCode)
        error('Fault string: %s', err.faultString)
Esempio n. 29
0
def quality(options):
    """Enforces PEP8"""
    qc = QualityChecker(WARNING_CODES, ERROR_CODES)
    report = qc()
    debug('Report is %s', report)

    out = getattr(options, 'output', '-')
    if out == '-':
        outfile = sys.stdout
    else:
        outfile = open(out, 'w')
    try:
        report.write(outfile)
    finally:
        if outfile is not sys.stdout:
            outfile.close()

    if not report:
        return False

    if report.has_errors:
        error('Critical errors in quality check')
    elif report.has_failures:
        error('Errors in quality check')
    elif report.has_warnings:
        error('Warnings in quality check')

    strictness = int(getattr(options, 'strictness', 2))
    if report.level >= strictness:
        raise SystemExit(1)
Esempio n. 30
0
    def _build_file(self, filename):
        kwargs = self.get_compile_kwargs()
        dep_tracker = DependencyTracker()
        kwargs['importers'].append((0, dep_tracker))

        infile = self._src.joinpath(filename)
        try:
            result = libsass.compile(filename=infile, **kwargs)
        except Exception as e:
            error('Cannot build %s: %s', infile, e)
            raise

        self._deps[infile] = dep_tracker
        relative_infile = os.path.relpath(infile, self._src)

        outfile = self._dest.joinpath(relative_infile).stripext() + '.css'
        if not outfile.parent.isdir():
            outfile.parent.makedirs_p()

        info('Build %s -> %s', filename, outfile)
        with open(outfile, 'wb') as out_stream:
            out_stream.write(result.encode('utf-8'))
Esempio n. 31
0
def _get_module(options):
    """Return the name of module passed as arg or the default.
    """
    module = None
    args = getattr(options, 'args', [])
    if args:
        module = args[0]
        info('using argument for module: %s' % module)
    branch = _get_branch_name()
    if branch.startswith('module/'):
        module = branch.partition('/')[-1]
        info('using git branch for module: %s' % module)
    if not module:
        try:
            module = path('module').text().rstrip()
        except:
            pass
        else:
            info('read module from file: %s' % module)
    if not module:
        error('could not determine the module')
    return module
Esempio n. 32
0
    def parse_output(self, stdout):
        """
        Parse the output of the r.js process. It expects a marker printed by
        r.js before the list of files. It raises a ``RuntimeError`` if this
        marker is not encountered else it returns the list of files.
        """
        output = []
        for line in stdout:
            output.append(line.decode('utf-8'))
            if line.startswith(b'-------------'):
                break
        else:
            error('rjs said: %s', '\n'.join(output))
            raise RuntimeError('r.js did not write the expected marker')

        files = []
        for line in stdout:
            # resolve path
            line = line.decode('utf-8').strip()
            if line:
                files.append(line)
        return files
Esempio n. 33
0
def _get_module(options):
    """Return the name of module passed as arg or the default.
    """
    module = None
    args = getattr(options, 'args', [])
    if args:
        module = args[0]
        info('using argument for module: %s' % module)
    branch = _get_branch_name()
    if branch.startswith('module/'):
        module = branch.partition('/')[-1]
        info('using git branch for module: %s' % module)
    if not module:
        try:
            module = path('module').text().rstrip()
        except:
            pass
        else:
            info('read module from file: %s' % module)
    if not module:
        error('could not determine the module')
    return module
Esempio n. 34
0
def pychecker(args):
    """Run pychecker on sources."""
    if not args:
        args = options.pychecker.default_args.split()

    # -- COLLECT: command options, files
    problematic = []
    cmdopts = []
    files   = []
    for arg in args:
        path_ = path(arg)
        if arg.startswith("-"):
            cmdopts.append(arg)
        elif path_.isdir():
            files.extend(path_.walkfiles("*.py"))
        elif arg.endswith(".py") and path_.exists():
            files.append(arg)
        else:
            error("UNKNOWN FILE: {0}".format(arg))
            problematic.append(arg)

    # -- EXECUTE:
    cmdopts = " ".join(cmdopts)
    for file_ in files:
        try:
            sh("pychecker {opts} {file}".format(opts=cmdopts, file=file_))
        except Exception as e:
            error("FAILURE: {0}".format(e))
            problematic.append(file_)

    # -- SUMMARY:
    if problematic:
        errors = len(problematic)
        error("PYCHECKER FAILED: {0} error(s) occured.".format(errors))
        error("PROBLEMATIC:")
        for file_ in problematic:
            error("  - {0}".format(file_))
    else:
        info("PYCHECKER SUCCESS: {0} file(s).".format(len(files)))
Esempio n. 35
0
def setup():
    clean = getattr(options, "clean", False)
    ext_libs = options.plugin.ext_libs
    if clean:
        ext_libs.rmtree()
    ext_libs.makedirs()
    reqs = read_requirements()
    os.environ["PYTHONPATH"] = ext_libs.abspath()
    for req in reqs:
        try:
            subprocess.check_call([
                sys.executable,
                "-m",
                "pip",
                "install",
                "--no-deps",
                "--upgrade",
                "-t",
                f"{ext_libs.abspath()}",
                req,
            ])
        except subprocess.CalledProcessError:
            error(f"Error installing {req} with pip.")
            sys.exit(1)
Esempio n. 36
0
 def docker_wrapper():
     error('Task %s requires docker but it is not installed', task_name)
     docker()  # Triggers the runtime error
Esempio n. 37
0
def upload():
    """upload package zipfile to server"""

    user = options.get("user", False)
    if not user:
        raise ValueError("OSGeo userid required")

    password = getpass.getpass("Enter your password: "******"":
        raise ValueError("password required")

    call_task("package")

    zipf = get_package_filename()

    url = "http://%s:%s@%s:%d/%s" % (user, password, options.upload.host, options.upload.port, options.upload.endpoint)

    info("Uploading to http://%s/%s" % (options.upload.host, options.upload.endpoint))

    server = xmlrpclib.ServerProxy(url, verbose=False)

    try:
        with open(zipf) as zfile:
            plugin_id, version_id = server.plugin.upload(xmlrpclib.Binary(zfile.read()))
            info("Plugin ID: %s", plugin_id)
            info("Version ID: %s", version_id)
    except xmlrpclib.Fault as err:
        error("ERROR: fault error")
        error("Fault code: %d", err.faultCode)
        error("Fault string: %s", err.faultString)
    except xmlrpclib.ProtocolError as err:
        error("Error: Protocol error")
        error("%s : %s", err.errcode, err.errmsg)
        if err.errcode == 403:
            error("Invalid name and password")
Esempio n. 38
0
         (options.upload.host, options.upload.endpoint))

    server = xmlrpclib.ServerProxy(url, verbose=False)

    try:
        with open(zipf) as zfile:
            plugin_id, version_id = \
                server.plugin.upload(xmlrpclib.Binary(zfile.read()))
            info('Plugin ID: %s', plugin_id)
            info('Version ID: %s', version_id)
    except xmlrpclib.Fault, err:
        error('ERROR: fault error')
        error('Fault code: %d', err.faultCode)
        error('Fault string: %s', err.faultString)
    except xmlrpclib.ProtocolError, err:
        error('Error: Protocol error')
        error("%s : %s", err.errcode, err.errmsg)
        if err.errcode == 403:
            error('Invalid name and password')


@task
def test_default_csw_connections():
    """test that the default CSW connections work"""

    relpath = 'resources%sconnections-default.xml' % os.sep
    csw_connections_xml = options.base.plugin / relpath

    csws = etree.parse(csw_connections_xml)

    for csw in csws.findall('csw'):
Esempio n. 39
0
                                        options.upload.endpoint))

    server = xmlrpclib.ServerProxy(url, verbose=False)

    try:
        with open(zipf) as zfile:
            plugin_id, version_id = \
                server.plugin.upload(xmlrpclib.Binary(zfile.read()))
            info('Plugin ID: %s', plugin_id)
            info('Version ID: %s', version_id)
    except xmlrpclib.Fault, err:
        error('ERROR: fault error')
        error('Fault code: %d', err.faultCode)
        error('Fault string: %s', err.faultString)
    except xmlrpclib.ProtocolError, err:
        error('Error: Protocol error')
        error("%s : %s", err.errcode, err.errmsg)
        if err.errcode == 403:
            error('Invalid name and password')


@task
def test_default_csw_connections():
    """test that the default CSW connections work"""

    relpath = 'resources/connections-default.xml'
    csw_connections_xml = options.base.plugin / relpath

    csws = etree.parse(csw_connections_xml)

    for csw in csws.findall('csw'):
Esempio n. 40
0
            error("UNKNOWN FILE: {0}".format(arg))
            problematic.append(arg)

    # -- EXECUTE:
    cmdopts = " ".join(cmdopts)
    for file_ in files:
        try:
            sh("pychecker {opts} {file}".format(opts=cmdopts, file=file_))
        except Exception, e:
            error("FAILURE: {0}".format(e))
            problematic.append(file_)

    # -- SUMMARY:
    if problematic:
        errors = len(problematic)
        error("PYCHECKER FAILED: {0} error(s) occured.".format(errors))
        error("PROBLEMATIC:")
        for file_ in problematic:
            error("  - {0}".format(file_))
    else:
        info("PYCHECKER SUCCESS: {0} file(s).".format(len(files)))

@task
@consume_args
def pylint(args):
    """Run pylint on sources."""
    if not args:
        args = options.pychecker.default_args.split()
    cmdline = " ".join(args)
    sh("pylint %s" % cmdline)