Exemplo n.º 1
0
def test_01_upgrade_jail(
    invoke_cli, jail, skip_test, release,
    freebsd_download_server, dhcp, jail_ip, resource_selector
):
    # This scenario should work in most cases
    # We can take the value of release specified, go down one version
    # Create a jail for this version and then upgrade to release
    # If it passes as desired, we can mark this as resolved

    req = requests.get(freebsd_download_server)
    assert req.status_code == 200

    releases = [
        StrictVersion(r) for r in re.findall(
            r'href="(\d.*)-RELEASE/"', req.content.decode('utf-8')
        )
    ]
    releases.sort()
    release = StrictVersion(release.split('-')[0])

    skip_test(release not in releases, f'{releases} does contain {release}')

    skip_test(releases.index(release) == 0, f'Cannot execute upgrade test')

    jails = resource_selector.jails_with_prop('ip4_addr', jail_ip)
    if not jails:
        jails = resource_selector.jails_with_prop('dhcp', 'on')

    skip_test(not jails)

    invoke_cli(
        ['upgrade', jail.name, '-r', release]
    )
Exemplo n.º 2
0
 def test_version_mismatch(self):
     """Test handling of configuration files from different rosrepo versions"""
     os.makedirs(os.path.join(self.wsdir, ".rosrepo"))
     cfg_file = os.path.join(self.wsdir, ".rosrepo", "config")
     with open(cfg_file, "w") as f:
         f.write("version: %s" % rosrepo_version)
     cfg = Config(self.wsdir)
     self.assertEqual(cfg.get("version"), rosrepo_version)
     with open(cfg_file, "w") as f:
         f.write("version: 3.0.0a0")
     cfg = Config(self.wsdir)
     self.assertEqual(cfg.get("version"), rosrepo_version)
     # revisions may change without affecting config compatibility
     v = StrictVersion(rosrepo_version)
     v.version = tuple([v.version[0], v.version[1], v.version[2] + 1])
     with open(cfg_file, "w") as f:
         f.write('version: "%s"' % str(v))
     cfg = Config(self.wsdir)
     self.assertEqual(cfg.get("version"), str(v))
     # major or minor version number change means incompatible configurations
     v.version = tuple([v.version[0], v.version[1] + 1, 0])
     with open(cfg_file, "w") as f:
         f.write('version: "%s"' % str(v))
     self.assertRaises(ConfigError, lambda: Config(self.wsdir))
     v.version = tuple([v.version[0] + 1, 0, 0])
     with open(cfg_file, "w") as f:
         f.write('version: "%s"' % str(v))
     self.assertRaises(ConfigError, lambda: Config(self.wsdir))
Exemplo n.º 3
0
def dipy_version():
    import dipy
    dipy_ver = dipy.__version__
    from distutils.version import StrictVersion

    dipy_ver = StrictVersion(dipy_ver.split('.dev')[0])
    return dipy_ver
Exemplo n.º 4
0
 def __init__(self):
     StrictVersion.__init__(self)
     tversion = '0.0.0'
     ccp4_major_minor = os.path.join(os.environ["CCP4"], "lib", "ccp4", "MAJOR_MINOR")
     if os.path.isfile(ccp4_major_minor):
         with open(ccp4_major_minor, "r") as f_in:
             tversion = f_in.read().strip()
     self.parse(tversion)
Exemplo n.º 5
0
def validate_version(version):
    validator = StrictVersion()
    try:
        validator.parse(version)

    except:
        raise ValidationError('invalid version {}'.format(version))

    return version
Exemplo n.º 6
0
    def version_init(self, s):
        '''Chcek if valid version string, using distutils.StrictVersion

        :param s:
        :return:
        '''
        VersionChecker = StrictVersion()
        VersionChecker.parse(s) # raises ValueError if no agreement
        return s
Exemplo n.º 7
0
    def run(self):
        if self.set:
            newver = str(self.set)
        else:
            try:
                oldver = self.distribution.metadata.version
                oldver = StrictVersion(oldver)
            except ValueError as err:
                print("Error: setup.py", err, file=sys.stderr)
                sys.exit(errno.EINVAL)

            major, minor, patch = oldver.version
            pre = oldver.prerelease

            if self.alpha:
                if pre is None or pre[0] != 'a':
                    pre = ('a', 0)
                else:
                    pre = (pre[0], pre[1] + 1)
            elif self.beta:
                if pre is None or pre[0] != 'b':
                    pre = ('b', 0)
                else:
                    pre = (pre[0], pre[1] + 1)
            elif self.release:
                pre = None
            elif self.patch:
                patch = patch + 1
                pre = None
            elif self.minor:
                minor = minor + 1
                patch = 0
                pre = None
            elif self.major:
                major = major + 1
                minor = 0
                patch = 0
                pre = None
            else:
                return
            newver = StrictVersion()
            newver.version = (major, minor, patch)
            newver.prerelease = pre
            newver = str(newver)

        for dirpath, dirnames, filenames in os.walk(os.curdir):
            for filename in (f for f in filenames if os.path.splitext(f)[1] == '.py'):
                inpath = os.path.join(dirpath, filename)
                outpath = inpath + '.tmp'
                with open(inpath) as fin, open(outpath, 'w') as fout:
                    for line in fin:
                        if line.startswith("__version__"):
                            line = "__version__ = '{0}'\n".format(newver)
                        fout.write(line)
                shutil.copystat(inpath, outpath)
                os.replace(outpath, inpath)
Exemplo n.º 8
0
 def __cmp__(self, other):
     if self.version is None or other.version is None:
         # cannot compare None version
         raise Exception("Unable to compare None versions")
     try:
         sv = StrictVersion()
         sv.parse(self.version)
         return sv.__cmp__(other.version)
     except Exception:
         return LooseVersion(self.version).__cmp__(LooseVersion(other.version))
Exemplo n.º 9
0
def check_llvm_version(context):
    context.Message('Checking for LLVM version...')
    success, output = context.TryAction('$LLVM_CONFIG --version >$TARGET')
    # some releases of LLVM 3.2 shipped as version 3.2svn
    output = output.replace('svn', '')
    if(not output):
      context.Result("No LLVM version could be found")
      exit(1)
    output = StrictVersion(output.rstrip())
    context.Result(str(output))
    context.env['LLVM_version'] = output
def load_translations(lang, version=1, commcare_version=None):
    # pt => por: hack for backwards compatibility
    if lang == 'pt':
        lang = 'por'

    try:
        str(lang)
    except UnicodeEncodeError:
        return {}

    paths_to_try = []
    if commcare_version == 'latest':
        files = listdir(normpath(join(__file__, "../historical-translations-by-version/")))
        if len(files):
            files.sort()
            files.reverse()
            paths_to_try.append(
                '../historical-translations-by-version/{file}'
                .format(file=files[0])
            )
            commcare_version = None
    elif commcare_version:
        try:
            commcare_version = StrictVersion(commcare_version)
        except ValueError:
            commcare_version = None
    if version == 2 and lang == 'en' and commcare_version:
        # the earliest version we have is 2.23
        if commcare_version < StrictVersion('2.23'):
            commcare_version = StrictVersion('2.23')
        major, minor, bugfix = commcare_version.version
        while bugfix >= 0:
            commcare_version.version = major, minor, bugfix
            paths_to_try.append(
                '../historical-translations-by-version/{commcare_version}-messages_{lang}-{version}.txt'
                .format(commcare_version=commcare_version, lang=lang, version=version)
            )
            bugfix -= 1

    while version:
        paths_to_try.append('../messages_{lang}-{version}.txt'
                            .format(lang=lang, version=version))
        version -= 1

    for rel_path in paths_to_try:
        path = normpath(join(__file__, rel_path))
        try:
            with open(path, encoding='utf-8') as f:
                return load(f)
        except IOError:
            pass
    return {}
Exemplo n.º 11
0
    def test_update_testsuites(self):

        v = StrictVersion(self.testsuites['MTBC/test'].version)
        # load by full name
        testsuites = {}
        update_testsuites(testsuites,
                {'MTBC/test': str(v)},
                self.testsuite_paths
            )
        assert testsuites.keys() == ['MTBC/test']
        # load by short name
        update_testsuites(testsuites,
                {'test': str(v)},
                self.testsuite_paths
            )
        assert set(testsuites.keys()) == set(['MTBC/test', 'test'])
        assert testsuites['test'] == testsuites['MTBC/test']

        # load compatible
        vv = list(v.version)
        vv[1] -= 1
        v.version = vv
        update_testsuites(testsuites,
                {'test': str(v)},
                self.testsuite_paths
            )
        assert set(testsuites.keys()) == set(['MTBC/test', 'test'])

        # load incompatbile 1/2
        vv[1] += 2
        v.version = vv
        try:
            update_testsuites(testsuites,
                    {'test': str(v)},
                    self.testsuite_paths
                )
            assert False, 'future minor version specified; should fail'
        except TestsuiteVersionConflictException:
            pass

        # load incompatbile 1/2
        vv[1] -= 1
        vv[0] -= 1
        v.version = vv
        try:
            update_testsuites(testsuites,
                    {'test': str(v)},
                    self.testsuite_paths
                )
            assert False, 'different major version specified; should fail'
        except TestsuiteVersionConflictException:
            pass
Exemplo n.º 12
0
def parse_version(vstring):
    """
    StrictVersion / LooseVersion decorator method
    @type   vstring:    str
    @return:    StrictVersion if possible, otherwise LooseVersion
    @rtype:     StrictVersion or LooseVersion
    """
    try:
        version = StrictVersion(vstring)
    except ValueError:
        logging.getLogger('ipvs.common.debug').info('Strict parsing failed, falling back to LooseVersion instead')
        version = LooseVersion(vstring)
        version.version = tuple(version.version)

    return version
Exemplo n.º 13
0
    def checkRequirements(self):
        RenderChanModule.checkRequirements(self)
        if self.active:
            # The CLI features depend on the version
            with tempfile.TemporaryDirectory() as tmpPath:
                # The exporting of a fake file is a workaround for older versions which just start the program when passed only -v
                proc = subprocess.Popen([self.conf['binary'], "-v", "--export-sequence", os.path.join(tmpPath,"test")], stdout=subprocess.PIPE)
                try:
                    outs, errs = proc.communicate(timeout=5)
                except TimeoutExpired:
                    proc.kill()
                    outs, errs = proc.communicate()
                rc = proc.poll()
            if rc == 0:
                try:
                    # Get the version from stdout. An example of the output: "Pencil2D 0.6.0\n"
                    self.version = outs.rstrip().decode("utf-8").split(" ")[-1]
                    self.version = ".".join(self.version.split(".")[0:3])
                    self.version = StrictVersion(self.version)
                except:
                    self.active = False
            else:
                self.active = False

            if self.active == False:
                print("WARNING: Failed to initialize Pencil2D module. The possible reasons for that could be: missing X connection, or the version of Pencil2D on your system is unsupported (too old?). In latter case please consider to get latest version at https://www.pencil2d.org/.")

        return self.active
Exemplo n.º 14
0
 def __init__(self):
     StrictVersion.__init__(self)
     ccp4_major_minor = os.path.join(os.environ["CCP4"], "lib", "ccp4", "MAJOR_MINOR")
     if os.path.isfile(ccp4_major_minor):
         with open(ccp4_major_minor, "r") as f_in:
             tversion = f_in.read().strip()
     else:
         logger = logging.getLogger(__name__)
         logger.debug("Detecting CCP4 version via executing pdbcur")
         stdout = cexec(['pdbcur' + EXE_EXT], permit_nonzero=True)
         tversion = None
         for line in stdout.split(os.linesep):
             if line.startswith(' ### CCP4'):
                 tversion = line.split()[2].rstrip(':')
         if tversion is None:
             raise RuntimeError("Cannot determine CCP4 version")
     self.parse(tversion)
Exemplo n.º 15
0
def match_brlcad_version(brlcad_options, brlcad_installations, logger):
    """
    Iterate the brlcad installations in the order found, and try to match it
    to a set of configuration options.
    """
    for version in brlcad_options:
        min_version = version.get("min-brlcad-version", None)
        if min_version:
            min_version = StrictVersion(min_version.strip())
        max_version = version.get("max-brlcad-version", None)
        if max_version:
            max_version = StrictVersion(max_version.strip())
        logger.debug("Checking {0}: {1} -> {2}".format(version["section"], min_version, max_version))
        for brlcad_info in brlcad_installations:
            if min_version and min_version > brlcad_info["version"]:
                continue
            if max_version and max_version < brlcad_info["version"]:
                continue
            logger.debug("Found matching brlcad installation: {0}".format(brlcad_info["prefix"]))
            yield version, brlcad_info
Exemplo n.º 16
0
    def __init__(self):
        RenderChanModule.__init__(self)
        if os.name == 'nt':
            self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\packages\\pencil2d\\pencil2d.exe")
        else:
            self.conf['binary']="pencil2d"
        self.conf["packetSize"]=0
        # Extra params
        self.extraParams["transparency"]="0"
        self.extraParams["width"]="-1"
        self.extraParams["height"]="-1"
        self.extraParams["startFrame"]="1"
        self.extraParams["endFrame"]="last"

        self.version=StrictVersion('0.5.4') #default value
Exemplo n.º 17
0
def main():
    # Change to SWIG path
    os.chdir(swig_path)

    # Check SWIG version
    for line in io.StringIO(subprocess.check_output(swig_version).decode('utf-8')):
        if line.startswith('SWIG Version '):
            *init, version = line.split('SWIG Version ')
            version = StrictVersion(version.strip())
            if version >= StrictVersion(swig_required_version):
                print('SWIG Version:', version)
                break
    else:
        fail('SWIG Version >= {} required'.format(swig_required_version), 2)

    # Map namespaces to function names
    functions = {}
    namespace = 'default'
    print('Mapping namespaces to functions')
    with open(natives_h) as natives:
        for line in natives:
            line = line.strip()

            # Namespace
            if line.startswith('namespace'):
                head, namespace, *tail = line.split(' ')
                namespace = namespace.lower()

            # Function
            if line.startswith('static'):
                *init, last = line.split(' ', maxsplit=2)
                name, *tail = last.split('(', maxsplit=1)
                functions.setdefault(namespace, set())
                functions[namespace].add(name)

    # Generate wrapper
    print('Generating wrapper')
    try:
        subprocess.check_call(swig_generate)
    except subprocess.CalledProcessError as exc:
        fail(exc, 3)

    # Rewrite Python wrapper
    last_namespace = None
    function_found = False
    skip = 0
    indent = ' ' * 4
    init = []
    middle = []
    tail = []

    def add_normal(_line):
        if function_found:
            tail.append(_line)
        else:
            init.append(_line)

    def add_class_assignment():
        if last_namespace is not None:
            middle.append('{} = _{}\n\n\n'.format(
                last_namespace, last_namespace.capitalize()))

    def maybe_add_method(_line):
        nonlocal last_namespace, function_found
        for _namespace, names in functions.items():
            for _name in names:
                if _line.startswith('def {}'.format(_name)):
                    function_found = True
                    if _namespace != last_namespace:
                        # Insert class assignment at the end of a namespace
                        add_class_assignment()

                        # Insert class declaration at the start of a namespace
                        middle.append('class _{}(_object):\n'.format(_namespace.capitalize()))
                        last_namespace = _namespace

                    # Insert staticmethod and function definition
                    *_, _last = _line.split('(', maxsplit=1)
                    middle.append(indent + '@staticmethod\n')
                    middle.append(indent + 'def {}({}'.format(
                        _name if _name.startswith('_') else _name.lower(),
                        _last
                    ))
                    return 3
        add_normal(_line)
        return 0

    # Parse generated Python wrapper
    print('Parsing generated Python wrapper')
    with open(gta_native_py_in) as natives_in:
        for line in natives_in:
            if skip > 0:
                # Return statement
                if skip == 3:
                    middle.append('{}{}\n'.format(indent, line))
                elif skip == 1 and len(line.strip()) > 0:
                    add_normal(line)
                skip -= 1
            elif line.startswith('def '):
                # Function
                skip = maybe_add_method(line)
            else:
                # Something else
                add_normal(line)
        add_class_assignment()

    # Write new Python wrapper
    print('Writing new Python wrapper')
    with open(gta_native_py_out, 'w') as natives_out:
        natives_out.writelines(init + middle + tail)

    # Remove originally generated Python wrapper
    print('Removing originally generated Python wrapper')
    os.remove(gta_native_py_in)

    # Done
    print('Done')
    sys.exit(0)
from django.conf import settings
from django.contrib.auth.models import Group
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.db.models.query import QuerySet
from django.utils import timezone
from jsonfield.fields import JSONField
from model_utils import Choices

from notifications import settings as notifications_settings
from notifications.signals import notify
from notifications.utils import id2slug
from swapper import load_model

if StrictVersion(get_version()) >= StrictVersion('1.8.0'):
    from django.contrib.contenttypes.fields import GenericForeignKey  # noqa
else:
    from django.contrib.contenttypes.generic import GenericForeignKey  # noqa

EXTRA_DATA = notifications_settings.get_config()['USE_JSONFIELD']


def is_soft_delete():
    return notifications_settings.get_config()['SOFT_DELETE']


def assert_soft_delete():
    if not is_soft_delete():
        # msg = """To use 'deleted' field, please set 'SOFT_DELETE'=True in settings.
        # Otherwise NotificationQuerySet.unread and NotificationQuerySet.read do NOT filter by 'deleted' field.
Exemplo n.º 19
0
from distutils.version import StrictVersion


VERSION = StrictVersion('3.1.0')

Exemplo n.º 20
0
class RenderChanPencil2dModule(RenderChanModule):
    def __init__(self):
        RenderChanModule.__init__(self)
        if os.name == 'nt':
            self.conf['binary']=os.path.join(os.path.dirname(__file__),"..\\..\\..\\packages\\pencil2d\\pencil2d.exe")
        else:
            self.conf['binary']="pencil2d"
        self.conf["packetSize"]=0
        # Extra params
        self.extraParams["transparency"]="0"
        self.extraParams["width"]="-1"
        self.extraParams["height"]="-1"
        self.extraParams["startFrame"]="1"
        self.extraParams["endFrame"]="last"

        self.version=StrictVersion('0.5.4') #default value

    def checkRequirements(self):
        RenderChanModule.checkRequirements(self)
        if self.active:
            # The CLI features depend on the version
            with tempfile.TemporaryDirectory() as tmpPath:
                # The exporting of a fake file is a workaround for older versions which just start the program when passed only -v
                proc = subprocess.Popen([self.conf['binary'], "-v", "--export-sequence", os.path.join(tmpPath,"test")], stdout=subprocess.PIPE)
                try:
                    outs, errs = proc.communicate(timeout=5)
                except TimeoutExpired:
                    proc.kill()
                    outs, errs = proc.communicate()
                rc = proc.poll()
            if rc == 0:
                try:
                    # Get the version from stdout. An example of the output: "Pencil2D 0.6.0\n"
                    self.version = outs.rstrip().decode("utf-8").split(" ")[-1]
                    self.version = ".".join(self.version.split(".")[0:3])
                    self.version = StrictVersion(self.version)
                except:
                    self.active = False
            else:
                self.active = False

            if self.active == False:
                print("WARNING: Failed to initialize Pencil2D module. The possible reasons for that could be: missing X connection, or the version of Pencil2D on your system is unsupported (too old?). In latter case please consider to get latest version at https://www.pencil2d.org/.")

        return self.active

    def analyze(self, filename):
        info={ "dependencies":[] }
        if filename.endswith(".pcl"):
            with open(filename, 'r') as f:
                tree = ElementTree.parse(f)
                root = tree.getroot()

                info["dependencies"].extend((os.path.join(filename + ".data", element.get("src")) for element in root.findall(".//*[@src]")))
        else:
            # We don't actually have to do anything here because there are no dependencies and the default values
            # automatically update for changes in the internal width, height, camera etc.
            # This is how we would open it if we needed to
            """with ZipFile(filename) as zipdir:
                with zipdir.open('main.xml') as mainfile:
                    tree = ElementTree.parse(mainfile)
                    root = tree.getroot()"""

        return info

    def getInputFormats(self):
        if self.version >= StrictVersion('0.6.0'):
            return ["pcl", "pclx"]
        else:
            return ["pcl"]

    def getOutputFormats(self):
        if self.version > StrictVersion('0.6.0'):
            return ["png", "jpg", "jpeg", "tif", "tiff", "bmp", "mp4", "avi", "gif", "webm"]
        elif self.version == StrictVersion('0.6.0'):
            return ["png", "jpg", "jpeg", "tif", "tiff", "bmp"]
        else:
            return ["png"]

    def render(self, filename, outputPath, startFrame, endFrame, format, updateCompletion, extraParams={}):
        comp = 0.0
        updateCompletion(comp)

        output = os.path.join(outputPath,"file")
        if not os.path.exists(outputPath):
            os.mkdir(outputPath)

        if self.version > StrictVersion('0.6.0'):
            commandline=[self.conf['binary'], filename, "-o", output, "--width", extraParams['width'], "--height", extraParams['height'], "--start", startFrame, "--end", endFrame]
            if is_true_string(extraParams['transparency']):
                commandline.append("--transparency")
            if extraParams['camera']:
                commandline.extend(["--camera", extraParams['camera']])
        elif self.version == StrictVersion('0.6.0'):
            commandline=[self.conf['binary'], filename, "--export-sequence", output, "--width", extraParams['width'], "--height", extraParams['height']]
            if is_true_string(extraParams['transparency']):
                commandline.append("--transparency")
        else:
            commandline=[self.conf['binary'], filename, "--export-sequence", output]

        print(commandline)
        subprocess.check_call(commandline)

        updateCompletion(1.0)
Exemplo n.º 21
0
 def _cmp(self, other):
     if isinstance(other, str):
         other = OmicronVersion(other)
     return StrictVersion._cmp(self, other)
class TestGLMRegressorConverter(unittest.TestCase):
    def test_model_linear_regression(self):
        model, X = fit_regression_model(linear_model.LinearRegression())
        model_onnx = convert_sklearn(
            model, "linear regression",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearRegression-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    @unittest.skipIf(
        StrictVersion(ort_version) <= StrictVersion("0.5.0"),
        reason="old onnxruntime does not support double")
    def test_model_linear_regression_multi(self):
        model, X = fit_regression_model(linear_model.LinearRegression(),
                                        n_targets=2)
        model_onnx = convert_sklearn(
            model, "linear regression",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearRegressionMulti-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    @unittest.skipIf(
        StrictVersion(ort_version) <= StrictVersion("0.5.0"),
        reason="old onnxruntime does not support double")
    def test_model_linear_regression64(self):
        model, X = fit_regression_model(linear_model.LinearRegression())
        model_onnx = convert_sklearn(model, "linear regression",
                                     [("input", DoubleTensorType(X.shape))])
        self.assertIsNotNone(model_onnx)
        self.assertIn("elem_type: 11", str(model_onnx))
        dump_data_and_model(
            X.astype(numpy.float64),
            model,
            model_onnx,
            basename="SklearnLinearRegression64-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    @unittest.skipIf(
        StrictVersion(ort_version) <= StrictVersion("0.5.0"),
        reason="old onnxruntime does not support double")
    def test_model_linear_regression64_multiple(self):
        model, X = fit_regression_model(linear_model.LinearRegression(),
                                        n_targets=2)
        model_onnx = convert_sklearn(model, "linear regression",
                                     [("input", DoubleTensorType(X.shape))])
        self.assertIsNotNone(model_onnx)
        self.assertIn("elem_type: 11", str(model_onnx))
        dump_data_and_model(
            X.astype(numpy.float64),
            model,
            model_onnx,
            basename="SklearnLinearRegression64Multi-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_linear_regression_int(self):
        model, X = fit_regression_model(
            linear_model.LinearRegression(), is_int=True)
        model_onnx = convert_sklearn(
            model, "linear regression",
            [("input", Int64TensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearRegressionInt-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_linear_regression_nointercept(self):
        model, X = fit_regression_model(
            linear_model.LinearRegression(fit_intercept=False))
        model_onnx = convert_sklearn(
            model, "linear regression",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearRegressionNoIntercept-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_linear_regression_bool(self):
        model, X = fit_regression_model(
            linear_model.LinearRegression(), is_bool=True)
        model_onnx = convert_sklearn(
            model, "linear regression",
            [("input", BooleanTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearRegressionBool",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_linear_svr(self):
        model, X = fit_regression_model(LinearSVR())
        model_onnx = convert_sklearn(
            model, "linear SVR",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearSvr-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_linear_svr_int(self):
        model, X = fit_regression_model(LinearSVR(), is_int=True)
        model_onnx = convert_sklearn(
            model, "linear SVR",
            [("input", Int64TensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearSvrInt-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_linear_svr_bool(self):
        model, X = fit_regression_model(LinearSVR(), is_bool=True)
        model_onnx = convert_sklearn(
            model, "linear SVR",
            [("input", BooleanTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLinearSVRBool",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_ridge(self):
        model, X = fit_regression_model(linear_model.Ridge())
        model_onnx = convert_sklearn(
            model, "ridge regression",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnRidge-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_ridge_int(self):
        model, X = fit_regression_model(linear_model.Ridge(), is_int=True)
        model_onnx = convert_sklearn(
            model, "ridge regression",
            [("input", Int64TensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnRidgeInt-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_ridge_bool(self):
        model, X = fit_regression_model(linear_model.Ridge(), is_bool=True)
        model_onnx = convert_sklearn(
            model, "ridge regression",
            [("input", BooleanTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnRidgeBool",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_sgd_regressor(self):
        model, X = fit_regression_model(linear_model.SGDRegressor())
        model_onnx = convert_sklearn(
            model,
            "scikit-learn SGD regression",
            [("input", FloatTensorType([None, X.shape[1]]))],
        )
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnSGDRegressor-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_sgd_regressor_int(self):
        model, X = fit_regression_model(
            linear_model.SGDRegressor(), is_int=True)
        model_onnx = convert_sklearn(
            model, "SGD regression",
            [("input", Int64TensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnSGDRegressorInt-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_sgd_regressor_bool(self):
        model, X = fit_regression_model(
            linear_model.SGDRegressor(), is_bool=True)
        model_onnx = convert_sklearn(
            model, "SGD regression",
            [("input", BooleanTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnSGDRegressorBool-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_elastic_net_regressor(self):
        model, X = fit_regression_model(linear_model.ElasticNet())
        model_onnx = convert_sklearn(
            model,
            "scikit-learn elastic-net regression",
            [("input", FloatTensorType([None, X.shape[1]]))],
        )
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnElasticNet-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_elastic_net_cv_regressor(self):
        model, X = fit_regression_model(linear_model.ElasticNetCV())
        model_onnx = convert_sklearn(
            model,
            "scikit-learn elastic-net regression",
            [("input", FloatTensorType([None, X.shape[1]]))],
        )
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnElasticNetCV-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_elastic_net_regressor_int(self):
        model, X = fit_regression_model(linear_model.ElasticNet(), is_int=True)
        model_onnx = convert_sklearn(
            model, "elastic net regression",
            [("input", Int64TensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnElasticNetRegressorInt-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_elastic_net_regressor_bool(self):
        model, X = fit_regression_model(
            linear_model.ElasticNet(), is_bool=True)
        model_onnx = convert_sklearn(
            model, "elastic net regression",
            [("input", BooleanTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnElasticNetRegressorBool",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lars(self):
        model, X = fit_regression_model(linear_model.Lars())
        model_onnx = convert_sklearn(
            model, "lars",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLars-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lars_cv(self):
        model, X = fit_regression_model(linear_model.LarsCV())
        model_onnx = convert_sklearn(
            model, "lars",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLarsCV-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lasso_lars(self):
        model, X = fit_regression_model(linear_model.LassoLars(alpha=0.01))
        model_onnx = convert_sklearn(
            model, "lasso lars",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLassoLars-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lasso_lars_cv(self):
        model, X = fit_regression_model(linear_model.LassoLarsCV())
        model_onnx = convert_sklearn(
            model, "lasso lars cv",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLassoLarsCV-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lasso_lars_ic(self):
        model, X = fit_regression_model(linear_model.LassoLarsIC())
        model_onnx = convert_sklearn(
            model, "lasso lars cv",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLassoLarsIC-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lasso_cv(self):
        model, X = fit_regression_model(linear_model.LassoCV())
        model_onnx = convert_sklearn(
            model, "lasso cv",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLassoCV-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lasso_lars_int(self):
        model, X = fit_regression_model(linear_model.LassoLars(), is_int=True)
        model_onnx = convert_sklearn(
            model, "lasso lars",
            [("input", Int64TensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLassoLarsInt-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_lasso_lars_bool(self):
        model, X = fit_regression_model(
            linear_model.LassoLars(), is_bool=True)
        model_onnx = convert_sklearn(
            model, "lasso lars",
            [("input", BooleanTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnLassoLarsBool",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_multi_linear_regression(self):
        model, X = fit_regression_model(linear_model.LinearRegression(),
                                        n_targets=2)
        model_onnx = convert_sklearn(
            model, "linear regression",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnMultiLinearRegression-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_ard_regression(self):
        model, X = fit_regression_model(
            linear_model.ARDRegression(), factor=0.001)
        model_onnx = convert_sklearn(
            model, "ard regression",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnARDRegression-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_theilsen(self):
        model, X = fit_regression_model(linear_model.TheilSenRegressor())
        model_onnx = convert_sklearn(
            model, "thiel-sen regressor",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnTheilSen-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_bayesian_ridge(self):
        model, X = fit_regression_model(linear_model.BayesianRidge())
        model_onnx = convert_sklearn(
            model, "bayesian ridge",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnBayesianRidge-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_bayesian_ridge_return_std(self):
        model, X = fit_regression_model(linear_model.BayesianRidge(),
                                        n_features=2, n_samples=20)
        model_onnx = convert_sklearn(
            model, "bayesian ridge",
            [("input", FloatTensorType([None, X.shape[1]]))],
            options={linear_model.BayesianRidge: {'return_std': True}})
        self.assertIsNotNone(model_onnx)

        sess = InferenceSession(model_onnx.SerializeToString())
        outputs = sess.run(None, {'input': X})
        pred, std = model.predict(X, return_std=True)
        assert_almost_equal(pred, outputs[0].ravel(), decimal=4)
        assert_almost_equal(std, outputs[1].ravel(), decimal=4)

    @unittest.skipIf(StrictVersion(ort_version) < StrictVersion("1.3.0"),
                     reason="output type")
    def test_model_bayesian_ridge_return_std_double(self):
        model, X = fit_regression_model(linear_model.BayesianRidge(),
                                        n_features=2, n_samples=100,
                                        n_informative=1)
        model_onnx = convert_sklearn(
            model, "bayesian ridge",
            [("input", DoubleTensorType([None, X.shape[1]]))],
            options={linear_model.BayesianRidge: {'return_std': True}})
        self.assertIsNotNone(model_onnx)

        X = X.astype(numpy.float64)
        sess = InferenceSession(model_onnx.SerializeToString())
        outputs = sess.run(None, {'input': X})
        pred, std = model.predict(X, return_std=True)
        assert_almost_equal(pred, outputs[0].ravel())
        assert_almost_equal(std, outputs[1].ravel(), decimal=4)

    def test_model_bayesian_ridge_return_std_normalize(self):
        model, X = fit_regression_model(
            linear_model.BayesianRidge(normalize=True),
            n_features=2, n_samples=50)
        model_onnx = convert_sklearn(
            model, "bayesian ridge",
            [("input", FloatTensorType([None, X.shape[1]]))],
            options={linear_model.BayesianRidge: {'return_std': True}})
        self.assertIsNotNone(model_onnx)

        sess = InferenceSession(model_onnx.SerializeToString())
        outputs = sess.run(None, {'input': X})
        pred, std = model.predict(X, return_std=True)
        assert_almost_equal(pred, outputs[0].ravel(), decimal=4)
        assert_almost_equal(std, outputs[1].ravel(), decimal=4)

    @unittest.skipIf(StrictVersion(ort_version) < StrictVersion("1.3.0"),
                     reason="output type")
    def test_model_bayesian_ridge_return_std_normalize_double(self):
        model, X = fit_regression_model(
            linear_model.BayesianRidge(normalize=True),
            n_features=2, n_samples=50)
        model_onnx = convert_sklearn(
            model, "bayesian ridge",
            [("input", DoubleTensorType([None, X.shape[1]]))],
            options={linear_model.BayesianRidge: {'return_std': True}})
        self.assertIsNotNone(model_onnx)

        X = X.astype(numpy.float64)
        sess = InferenceSession(model_onnx.SerializeToString())
        outputs = sess.run(None, {'input': X})
        pred, std = model.predict(X, return_std=True)
        assert_almost_equal(pred, outputs[0].ravel())
        assert_almost_equal(std, outputs[1].ravel(), decimal=4)

    def test_model_huber_regressor(self):
        model, X = fit_regression_model(linear_model.HuberRegressor())
        model_onnx = convert_sklearn(
            model, "huber regressor",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            basename="SklearnHuberRegressor-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_multi_task_lasso(self):
        model, X = fit_regression_model(linear_model.MultiTaskLasso(),
                                        n_targets=2)
        model_onnx = convert_sklearn(
            model, "multi-task lasso",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnMultiTaskLasso-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_multi_task_lasso_cv(self):
        model, X = fit_regression_model(linear_model.MultiTaskLassoCV(),
                                        n_targets=2)
        model_onnx = convert_sklearn(
            model, "mutli-task lasso cv",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnMultiTaskLassoCV-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_multi_task_elasticnet(self):
        model, X = fit_regression_model(linear_model.MultiTaskElasticNet(),
                                        n_targets=2)
        model_onnx = convert_sklearn(
            model, "multi-task elasticnet",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnMultiTaskElasticNet-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_orthogonal_matching_pursuit(self):
        model, X = fit_regression_model(
            linear_model.OrthogonalMatchingPursuit())
        model_onnx = convert_sklearn(
            model, "orthogonal matching pursuit",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnOrthogonalMatchingPursuit-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_passive_aggressive_regressor(self):
        model, X = fit_regression_model(
            linear_model.PassiveAggressiveRegressor())
        model_onnx = convert_sklearn(
            model, "passive aggressive regressor",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnPassiveAggressiveRegressor-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_ransac_regressor_default(self):
        model, X = fit_regression_model(
            linear_model.RANSACRegressor())
        model_onnx = convert_sklearn(
            model, "ransac regressor",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnRANSACRegressor-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_ransac_regressor_mlp(self):
        model, X = fit_regression_model(
            linear_model.RANSACRegressor(
                base_estimator=MLPRegressor(solver='lbfgs')))
        model_onnx = convert_sklearn(
            model, "ransac regressor",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnRANSACRegressorMLP-Dec3",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_ransac_regressor_tree(self):
        model, X = fit_regression_model(
            linear_model.RANSACRegressor(
                base_estimator=GradientBoostingRegressor()))
        model_onnx = convert_sklearn(
            model, "ransac regressor",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnRANSACRegressorTree-Dec3",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_multi_task_elasticnet_cv(self):
        model, X = fit_regression_model(linear_model.MultiTaskElasticNetCV(),
                                        n_targets=2)
        model_onnx = convert_sklearn(
            model, "multi-task elasticnet cv",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnMultiTaskElasticNetCV-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )

    def test_model_orthogonal_matching_pursuit_cv(self):
        model, X = fit_regression_model(
            linear_model.OrthogonalMatchingPursuitCV())
        model_onnx = convert_sklearn(
            model, "orthogonal matching pursuit cv",
            [("input", FloatTensorType([None, X.shape[1]]))])
        self.assertIsNotNone(model_onnx)
        dump_data_and_model(
            X,
            model,
            model_onnx,
            verbose=False,
            basename="SklearnOrthogonalMatchingPursuitCV-Dec4",
            allow_failure="StrictVersion("
            "onnxruntime.__version__)"
            "<= StrictVersion('0.2.1')",
        )
Exemplo n.º 23
0
# Architekturen entwerfen und testen

# Importe
import os
import time
import random
from distutils.version import StrictVersion

import sklearn
from sklearn.model_selection import train_test_split
assert StrictVersion(sklearn.__version__) >= StrictVersion('0.18.1')

# Import des Tensorflow-Backend Framework
import tensorflow as tf
tf.logging.set_verbosity(tf.logging.ERROR)
assert StrictVersion(tf.__version__) >= StrictVersion('1.1.0')

# Importe aus dem Keras Framework
import keras
assert StrictVersion(keras.__version__) >= StrictVersion('2.0.0')
from keras.models import Model, Sequential
from keras.layers import Dense, Dropout, Activation, Flatten, Input
from keras.layers import Convolution2D, MaxPooling2D, Conv2D
from keras.utils.np_utils import to_categorical
from keras.backend.tensorflow_backend import set_session

# Zusaetzliche Importe fuer die Verarbeitung von Daten und Zahlen
import matplotlib.pylab as plt
import matplotlib.pyplot as pyplot
import numpy as np
import matplotlib
Exemplo n.º 24
0
import weewx.engine
import weewx.units
import weeutil.config

from knxip.ip import KNXIPTunnel
from knxip.conversion import float_to_knx2, knx2_to_float, \
    knx_to_time, time_to_knx, knx_to_date, date_to_knx, datetime_to_knx,\
    knx_to_datetime
from knxip.core import KNXException, parse_group_address

VERSION = "0.1"
REQUIRED_WEEWX = "3.6.1"
REQUIRED_KNXIP = "0.3.2"

if StrictVersion(weewx.__version__) < StrictVersion(REQUIRED_WEEWX):
    raise weewx.UnsupportedFeature(
        "weewx %s or greater is required, found %s" %
        (REQUIRED_WEEWX, weewx.__version__))

#if StrictVersion(knxip.__version__) < StrictVersion(REQUIRED_KNXIP):
#    raise weewx.UnsupportedFeature("knxip %s or greater is required, found %s"
#                                   % (REQUIRED_KNXIP, knxip.__version__))

try:
    # Test for new-style weewx logging by trying to import weeutil.logger
    import weeutil.logger
    import logging
    log = logging.getLogger(__name__)

    def logdbg(msg):
Exemplo n.º 25
0
    def ensure_xcode(self):
        if self.os_version < StrictVersion('10.7'):
            if not os.path.exists('/Developer/Applications/Xcode.app'):
                print(XCODE_REQUIRED_LEGACY)

                subprocess.check_call(['open', XCODE_LEGACY])
                sys.exit(1)

        # OS X 10.7 have Xcode come from the app store. However, users can
        # still install Xcode into any arbitrary location. We honor the
        # location of Xcode as set by xcode-select. This should also pick up
        # developer preview releases of Xcode, which can be installed into
        # paths like /Applications/Xcode5-DP6.app.
        elif self.os_version >= StrictVersion('10.7'):
            select = self.which('xcode-select')
            try:
                output = self.check_output([select, '--print-path'],
                                           stderr=subprocess.STDOUT)
            except subprocess.CalledProcessError as e:
                # This seems to appear on fresh OS X machines before any Xcode
                # has been installed. It may only occur on OS X 10.9 and later.
                if b'unable to get active developer directory' in e.output:
                    print(XCODE_NO_DEVELOPER_DIRECTORY)
                    self._install_xcode_app_store()
                    assert False  # Above should exit.

                output = e.output

            # This isn't the most robust check in the world. It relies on the
            # default value not being in an application bundle, which seems to
            # hold on at least Mavericks.
            if b'.app/' not in output:
                print(XCODE_REQUIRED)
                self._install_xcode_app_store()
                assert False  # Above should exit.

        # Once Xcode is installed, you need to agree to the license before you can
        # use it.
        try:
            output = self.check_output(['/usr/bin/xcrun', 'clang'],
                                       stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            if b'license' in e.output:
                xcodebuild = self.which('xcodebuild')
                try:
                    self.check_output([xcodebuild, '-license'],
                                      stderr=subprocess.STDOUT)
                except subprocess.CalledProcessError as e:
                    if b'requires admin privileges' in e.output:
                        self.run_as_root([xcodebuild, '-license'])

        # Even then we're not done! We need to install the Xcode command line tools.
        # As of Mountain Lion, apparently the only way to do this is to go through a
        # menu dialog inside Xcode itself. We're not making this up.
        if self.os_version >= StrictVersion('10.7'):
            if not os.path.exists('/usr/bin/clang'):
                print(XCODE_COMMAND_LINE_TOOLS_MISSING)
                print(INSTALL_XCODE_COMMAND_LINE_TOOLS_STEPS)
                sys.exit(1)

            output = self.check_output(['/usr/bin/clang', '--version'])
            match = RE_CLANG_VERSION.search(output)
            if match is None:
                raise Exception('Could not determine Clang version.')

            version = StrictVersion(match.group(1))

            if version < APPLE_CLANG_MINIMUM_VERSION:
                print(UPGRADE_XCODE_COMMAND_LINE_TOOLS)
                print(INSTALL_XCODE_COMMAND_LINE_TOOLS_STEPS)
                sys.exit(1)
Exemplo n.º 26
0
XCODE_LEGACY = ('https://developer.apple.com/downloads/download.action?path=Developer_Tools/'
                'xcode_3.2.6_and_ios_sdk_4.3__final/xcode_3.2.6_and_ios_sdk_4.3.dmg')

MACPORTS_URL = {
    '13': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.13-HighSierra.pkg',
    '12': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.12-Sierra.pkg',
    '11': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.11-ElCapitan.pkg',
    '10': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.10-Yosemite.pkg',
    '9': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.9-Mavericks.pkg',
    '8': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.8-MountainLion.pkg',
    '7': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.7-Lion.pkg',
    '6': 'https://distfiles.macports.org/MacPorts/MacPorts-2.5.3-10.6-SnowLeopard.pkg', }

RE_CLANG_VERSION = re.compile('Apple (?:clang|LLVM) version (\d+\.\d+)')

APPLE_CLANG_MINIMUM_VERSION = StrictVersion('4.2')

XCODE_REQUIRED = '''
Xcode is required to build Firefox. Please complete the install of Xcode
through the App Store.

It's possible Xcode is already installed on this machine but it isn't being
detected. This is possible with developer preview releases of Xcode, for
example. To correct this problem, run:

  `xcode-select --switch /path/to/Xcode.app`.

e.g. `sudo xcode-select --switch /Applications/Xcode.app`.
'''

XCODE_REQUIRED_LEGACY = '''
Exemplo n.º 27
0
from collections import OrderedDict

import networkx as nx
import pandas as pd
import numpy as np
import re

import inspect

import logging
logger = logging.getLogger(__name__)

from distutils.version import StrictVersion, LooseVersion
try:
    _nx_version = StrictVersion(nx.__version__)
except ValueError:
    _nx_version = LooseVersion(nx.__version__)

if _nx_version >= '1.12':

    class OrderedGraph(nx.MultiGraph):
        node_dict_factory = OrderedDict
        adjlist_dict_factory = OrderedDict
elif _nx_version >= '1.10':

    class OrderedGraph(nx.MultiGraph):
        node_dict_factory = OrderedDict
        adjlist_dict_factory = OrderedDict

        def __init__(self, data=None, **attr):
Exemplo n.º 28
0
    def __init__(self):
        gr.top_block.__init__(self, "Top Block")
        Qt.QWidget.__init__(self)
        self.setWindowTitle("Top Block")
        qtgui.util.check_set_qss()
        try:
            self.setWindowIcon(Qt.QIcon.fromTheme('gnuradio-grc'))
        except:
            pass
        self.top_scroll_layout = Qt.QVBoxLayout()
        self.setLayout(self.top_scroll_layout)
        self.top_scroll = Qt.QScrollArea()
        self.top_scroll.setFrameStyle(Qt.QFrame.NoFrame)
        self.top_scroll_layout.addWidget(self.top_scroll)
        self.top_scroll.setWidgetResizable(True)
        self.top_widget = Qt.QWidget()
        self.top_scroll.setWidget(self.top_widget)
        self.top_layout = Qt.QVBoxLayout(self.top_widget)
        self.top_grid_layout = Qt.QGridLayout()
        self.top_layout.addLayout(self.top_grid_layout)

        self.settings = Qt.QSettings("GNU Radio", "top_block")

        if StrictVersion(Qt.qVersion()) < StrictVersion("5.0.0"):
            self.restoreGeometry(self.settings.value("geometry").toByteArray())
        else:
            self.restoreGeometry(
                self.settings.value("geometry", type=QtCore.QByteArray))

        ##################################################
        # Variables
        ##################################################
        self.samp_rate = samp_rate = 32000
        self.message = message = list(ord(i) for i in '123456789')

        ##################################################
        # Blocks
        ##################################################
        self.comsys_pn9_whitening_0_0 = comsys.pn9_whitening(False)
        self.comsys_pn9_whitening_0 = comsys.pn9_whitening(False)
        self.comsys_crc_tagged_stream_0_0 = comsys.crc_tagged_stream(
            True, "packet_len")
        self.comsys_crc_tagged_stream_0 = comsys.crc_tagged_stream(
            False, "packet_len")
        self.blocks_vector_source_x_0_0 = blocks.vector_source_b(
            message, True, 1, [])
        self.blocks_stream_to_tagged_stream_0 = blocks.stream_to_tagged_stream(
            gr.sizeof_char, 1, len(message), "packet_len")
        self.blocks_file_sink_0 = blocks.file_sink(gr.sizeof_char * 1,
                                                   '/dev/pts/0', False)
        self.blocks_file_sink_0.set_unbuffered(True)

        ##################################################
        # Connections
        ##################################################
        self.connect((self.blocks_stream_to_tagged_stream_0, 0),
                     (self.comsys_crc_tagged_stream_0, 0))
        self.connect((self.blocks_vector_source_x_0_0, 0),
                     (self.blocks_stream_to_tagged_stream_0, 0))
        self.connect((self.comsys_crc_tagged_stream_0, 0),
                     (self.comsys_pn9_whitening_0_0, 0))
        self.connect((self.comsys_crc_tagged_stream_0_0, 0),
                     (self.blocks_file_sink_0, 0))
        self.connect((self.comsys_pn9_whitening_0, 0),
                     (self.comsys_crc_tagged_stream_0_0, 0))
        self.connect((self.comsys_pn9_whitening_0_0, 0),
                     (self.comsys_pn9_whitening_0, 0))
Exemplo n.º 29
0
def domain(domain_name):
    # Validate the domain existing in the local DB
    domain = Domain.query.filter(Domain.name == domain_name).first()
    if not domain:
        abort(404)

    # Query domain's rrsets from PowerDNS API
    rrsets = Record().get_rrsets(domain.name)
    current_app.logger.debug("Fetched rrests: \n{}".format(
        pretty_json(rrsets)))

    # API server might be down, misconfigured
    if not rrsets and domain.type != 'Slave':
        abort(500)

    quick_edit = Setting().get('record_quick_edit')
    records_allow_to_edit = Setting().get_records_allow_to_edit()
    forward_records_allow_to_edit = Setting(
    ).get_forward_records_allow_to_edit()
    reverse_records_allow_to_edit = Setting(
    ).get_reverse_records_allow_to_edit()
    ttl_options = Setting().get_ttl_options()
    records = []

    # Render the "records" to display in HTML datatable
    #
    # BUG: If we have multiple records with the same name
    # and each record has its own comment, the display of
    # [record-comment] may not consistent because PDNS API
    # returns the rrsets (records, comments) has different
    # order than its database records.
    # TODO:
    #   - Find a way to make it consistent, or
    #   - Only allow one comment for that case
    if StrictVersion(Setting().get('pdns_version')) >= StrictVersion('4.0.0'):
        for r in rrsets:
            if r['type'] in records_allow_to_edit:
                r_name = r['name'].rstrip('.')

                # If it is reverse zone and pretty_ipv6_ptr setting
                # is enabled, we reformat the name for ipv6 records.
                if Setting().get('pretty_ipv6_ptr') and r[
                        'type'] == 'PTR' and 'ip6.arpa' in r_name and '*' not in r_name:
                    r_name = dns.reversename.to_address(
                        dns.name.from_text(r_name))

                # Create the list of records in format that
                # PDA jinja2 template can understand.
                index = 0
                for record in r['records']:
                    if (len(r['comments']) > index):
                        c = r['comments'][index]['content']
                    else:
                        c = ''
                    record_entry = RecordEntry(
                        name=r_name,
                        type=r['type'],
                        status='Disabled' if record['disabled'] else 'Active',
                        ttl=r['ttl'],
                        data=record['content'],
                        comment=c,
                        is_allowed_edit=True)
                    index += 1
                    records.append(record_entry)
    else:
        # Unsupported version
        abort(500)

    if not re.search(r'ip6\.arpa|in-addr\.arpa$', domain_name):
        editable_records = forward_records_allow_to_edit
    else:
        editable_records = reverse_records_allow_to_edit

    return render_template('domain.html',
                           domain=domain,
                           records=records,
                           editable_records=editable_records,
                           quick_edit=quick_edit,
                           ttl_options=ttl_options,
                           current_user=current_user)
Exemplo n.º 30
0
def changelog(domain_name):
    g.user = current_user
    login_manager.anonymous_user = Anonymous
    domain = Domain.query.filter(Domain.name == domain_name).first()
    if not domain:
        abort(404)

    # Query domain's rrsets from PowerDNS API
    rrsets = Record().get_rrsets(domain.name)
    current_app.logger.debug("Fetched rrests: \n{}".format(
        pretty_json(rrsets)))

    # API server might be down, misconfigured
    if not rrsets and domain.type != 'Slave':
        abort(500)

    records_allow_to_edit = Setting().get_records_allow_to_edit()
    records = []

    # get all changelogs for this domain, in descening order
    if current_user.role.name in ['Administrator', 'Operator']:
        histories = History.query.filter(
            History.domain_id == domain.id).order_by(
                History.created_on.desc()).all()
    else:
        # if the user isn't an administrator or operator,
        # allow_user_view_history must be enabled to get here,
        # so include history for the domains for the user
        histories = db.session.query(History) \
            .join(Domain, History.domain_id == Domain.id) \
            .outerjoin(DomainUser, Domain.id == DomainUser.domain_id) \
            .outerjoin(Account, Domain.account_id == Account.id) \
            .outerjoin(AccountUser, Account.id == AccountUser.account_id) \
            .order_by(History.created_on.desc()) \
            .filter(
                db.and_(db.or_(
                                DomainUser.user_id == current_user.id,
                                AccountUser.user_id == current_user.id
                        ),
                        History.domain_id == domain.id
                )
            ).all()

    if StrictVersion(Setting().get('pdns_version')) >= StrictVersion('4.0.0'):
        for r in rrsets:
            if r['type'] in records_allow_to_edit:
                r_name = r['name'].rstrip('.')

                # If it is reverse zone and pretty_ipv6_ptr setting
                # is enabled, we reformat the name for ipv6 records.
                if Setting().get('pretty_ipv6_ptr') and r[
                        'type'] == 'PTR' and 'ip6.arpa' in r_name and '*' not in r_name:
                    r_name = dns.reversename.to_address(
                        dns.name.from_text(r_name))

                # Create the list of records in format that
                # PDA jinja2 template can understand.
                index = 0
                for record in r['records']:
                    if (len(r['comments']) > index):
                        c = r['comments'][index]['content']
                    else:
                        c = ''
                    record_entry = RecordEntry(
                        name=r_name,
                        type=r['type'],
                        status='Disabled' if record['disabled'] else 'Active',
                        ttl=r['ttl'],
                        data=record['content'],
                        comment=c,
                        is_allowed_edit=True)
                    index += 1
                    records.append(record_entry)
    else:
        # Unsupported version
        abort(500)

    changes_set = dict()
    for i in range(len(histories)):
        extract_changelogs_from_a_history_entry(changes_set, histories[i], i)
        if i in changes_set and len(
                changes_set[i]) == 0:  # if empty, then remove the key
            changes_set.pop(i)
    return render_template('domain_changelog.html',
                           domain=domain,
                           allHistoryChanges=changes_set)
Exemplo n.º 31
0
eventlet.hubs.use_hub(utils.get_hub())
eventlet.patcher.monkey_patch(all=False, socket=True)
eventlet.debug.hub_exceptions(False)

from swiftclient import get_auth, http_connection

has_insecure = False
try:
    from swiftclient import __version__ as client_version
    # Prevent a ValueError in StrictVersion with '2.0.3.68.ga99c2ff'
    client_version = '.'.join(client_version.split('.')[:3])
except ImportError:
    # Pre-PBR we had version, not __version__. Anyhow...
    client_version = '1.2'
from distutils.version import StrictVersion
if StrictVersion(client_version) >= StrictVersion('2.0'):
    has_insecure = True


config = {}
web_front_end = None
normalized_urls = None

# If no config was read, we will fall back to old school env vars
swift_test_auth_version = None
swift_test_auth = os.environ.get('SWIFT_TEST_AUTH')
swift_test_user = [os.environ.get('SWIFT_TEST_USER'), None, None, '', '', '']
swift_test_key = [os.environ.get('SWIFT_TEST_KEY'), None, None, '', '', '']
swift_test_tenant = ['', '', '', '', '', '']
swift_test_perm = ['', '', '', '', '', '']
swift_test_domain = ['', '', '', '', '', '']
Exemplo n.º 32
0
#
# Any use of this code is strictly unauthorized without the written consent
# by Voxmeter A/S. This code is proprietary of Voxmeter A/S.
# 
################################################################################
import shutil, datetime, os.path, copy, operator, math, sys
from distutils.version import StrictVersion

from openpyxl import load_workbook
from openpyxl.utils import coordinate_from_string, column_index_from_string, get_column_letter
#from openpyxl.styles import PatternFill, Border, Side, Alignment, Protection, Font
from openpyxl.styles import Font, PatternFill, Border

# Check version
from openpyxl import __version__
test_version = StrictVersion(__version__) < StrictVersion("2.5.0")
if test_version:
    print("You need to have openpyxl version 2.5.0. You have %s"%__version__)
    sys.exit()

# Set standards
SKIP_THESE = [
    '[+] og [-] = signifikant forskel på 95% eller mere',
]
Col_width_max = 177 # 100+50+20+5+2

# Map number to danish month
month_dic = {'1':'januar','2':'februar','3':'marts','4':'april','5':'maj','6':'juni',
            '7':'juli','8':'august','9':'september','10':'oktober','11':'november','12':'december'}

Exemplo n.º 33
0
def create_app(config='CTFd.config.Config'):
    app = CTFdFlask(__name__)
    with app.app_context():
        app.config.from_object(config)

        theme_loader = ThemeLoader(os.path.join(app.root_path, 'themes'),
                                   followlinks=True)
        app.jinja_loader = theme_loader

        from CTFd.models import db, Teams, Solves, Challenges, Fails, Flags, Tags, Files, Tracking

        url = create_database()

        # This allows any changes to the SQLALCHEMY_DATABASE_URI to get pushed back in
        # This is mostly so we can force MySQL's charset
        app.config['SQLALCHEMY_DATABASE_URI'] = str(url)

        # Register database
        db.init_app(app)

        # Register Flask-Migrate
        migrations.init_app(app, db)

        # Alembic sqlite support is lacking so we should just create_all anyway
        if url.drivername.startswith('sqlite'):
            db.create_all()
            stamp()
        else:
            # This creates tables instead of db.create_all()
            # Allows migrations to happen properly
            upgrade()

        from CTFd.models import ma

        ma.init_app(app)

        app.db = db
        app.VERSION = __version__

        from CTFd.cache import cache

        cache.init_app(app)
        app.cache = cache

        # If you have multiple workers you must have a shared cache
        socketio.init_app(app,
                          async_mode=app.config.get('SOCKETIO_ASYNC_MODE'),
                          message_queue=app.config.get('CACHE_REDIS_URL'))

        if app.config.get('REVERSE_PROXY'):
            app.wsgi_app = ProxyFix(app.wsgi_app)

        version = utils.get_config('ctf_version')

        # Upgrading from an older version of CTFd
        if version and (StrictVersion(version) < StrictVersion(__version__)):
            if confirm_upgrade():
                run_upgrade()
            else:
                exit()

        if not version:
            utils.set_config('ctf_version', __version__)

        if not utils.get_config('ctf_theme'):
            utils.set_config('ctf_theme', 'core')

        update_check(force=True)

        init_request_processors(app)
        init_template_filters(app)
        init_template_globals(app)

        # Importing here allows tests to use sensible names (e.g. api instead of api_bp)
        from CTFd.views import views
        from CTFd.teams import teams
        from CTFd.users import users
        from CTFd.challenges import challenges
        from CTFd.scoreboard import scoreboard
        from CTFd.auth import auth
        from CTFd.admin import admin
        from CTFd.api import api
        from CTFd.events import events
        from CTFd.errors import page_not_found, forbidden, general_error, gateway_error

        app.register_blueprint(views)
        app.register_blueprint(teams)
        app.register_blueprint(users)
        app.register_blueprint(challenges)
        app.register_blueprint(scoreboard)
        app.register_blueprint(auth)
        app.register_blueprint(api)
        app.register_blueprint(events)

        app.register_blueprint(admin)

        app.register_error_handler(404, page_not_found)
        app.register_error_handler(403, forbidden)
        app.register_error_handler(500, general_error)
        app.register_error_handler(502, gateway_error)

        init_logs(app)
        init_plugins(app)

        return app
Exemplo n.º 34
0
import sys
import tarfile
import tensorflow as tf
import zipfile

from distutils.version import StrictVersion
from collections import defaultdict
from io import StringIO
from matplotlib import pyplot as plt
from PIL import Image

# This is needed since the notebook is stored in the object_detection folder.
sys.path.append("..")
from object_detection.utils import ops as utils_ops

if StrictVersion(tf.__version__) < StrictVersion('1.9.0'):
  raise ImportError('Please upgrade your TensorFlow installation to v1.9.* or later!')
from object_detection.utils import label_map_util

from object_detection.utils import visualization_utils as vis_util
MODEL_NAME = 'ssd_mobilenet_v1_coco_2017_11_17'
MODEL_FILE = MODEL_NAME + '.tar.gz'
DOWNLOAD_BASE = 'http://download.tensorflow.org/models/object_detection/'

# Path to frozen detection graph. This is the actual model that is used for the object detection.
PATH_TO_FROZEN_GRAPH = MODEL_NAME + '/frozen_inference_graph.pb'

# List of the strings that is used to add correct label for each box.
PATH_TO_LABELS = os.path.join('data', 'mscoco_label_map.pbtxt')
#
# opener = urllib.request.URLopener()
Exemplo n.º 35
0
SECRET_KEY = 'o)04)%_us9ed1l7*cv&5@t(2*r#$^r7o(q^4p@y9@b20_ay_jv'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = []

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin', 'django.contrib.auth',
    'django.contrib.contenttypes', 'django.contrib.sessions', 'tests.testapp'
]

# Django 1.8 requires that abstract model app is in INSTALLED_APPS
if StrictVersion('1.8') <= StrictVersion(
        django.__version__) < StrictVersion('1.9'):
    INSTALLED_APPS.append('django_lifecycle')

MIDDLEWARE = [
    'django.middleware.security.SecurityMiddleware',
    'django.contrib.sessions.middleware.SessionMiddleware',
    'django.middleware.common.CommonMiddleware',
    'django.middleware.csrf.CsrfViewMiddleware',
    'django.contrib.auth.middleware.AuthenticationMiddleware',
    'django.contrib.messages.middleware.MessageMiddleware',
    'django.middleware.clickjacking.XFrameOptionsMiddleware',
]

ROOT_URLCONF = 'tests.urls'
Exemplo n.º 36
0
 def __init__(self, v):
     print v
     if v.endswith('a'):
         v += '0'
     StrictVersion.__init__(self, v)
Exemplo n.º 37
0
 def __init__(self, version):
     match = re.match("dlib-(\d+).(\d+).*", version)
     major = match.group(1)
     minor = match.group(2)
     StrictVersion.__init__(self, major + "." + minor)
Exemplo n.º 38
0
 def __init__(self, vstring):
     StrictVersion.__init__(self, vstring)
     self.prerelease = None
            newHTML = html.replace("</td>", string2Inject, 1)
            node.text = newHTML

      except:
        arcpy.AddWarning("No attachment match for ID: {}".format(idVal))

  tree.write(docKML)
  del tree
  del docKML


if __name__ == '__main__':

  prodInfo = arcpy.GetInstallInfo()
  if prodInfo['ProductName'] == "Desktop":
    if StrictVersion(prodInfo['Version']) >= StrictVersion('10.5'):
      arcpy.AddWarning("The KML to Layer tool was enhanced to automatically include attachments \
      at the 10.5 release, effectively making this tool obsolete.")
  elif prodInfo['ProductName'] == "ArcGISPro":
    if StrictVersion(prodInfo['Version']) >= StrictVersion('1.4'):
      arcpy.AddWarning("The KML to Layer tool was enhanced to automatically include attachments \
      at the 1.4 release, effectively making this tool obsolete.")

  inputFeatures = arcpy.GetParameterAsText(0)
  outputKML = arcpy.GetParameterAsText(1)
  outputScale = arcpy.GetParameterAsText(2)
  clamped = arcpy.GetParameterAsText(3)
  uniqueID = arcpy.GetParameterAsText(4)
  height = arcpy.GetParameterAsText(5)
  width = arcpy.GetParameterAsText(6)
Exemplo n.º 40
0
from mgr_util import get_default_addr
from rbd import RBD

# Defaults for the Prometheus HTTP server.  Can also set in config-key
# see https://github.com/prometheus/prometheus/wiki/Default-port-allocations
# for Prometheus exporter port registry

DEFAULT_PORT = 9283

# When the CherryPy server in 3.2.2 (and later) starts it attempts to verify
# that the ports its listening on are in fact bound. When using the any address
# "::" it tries both ipv4 and ipv6, and in some environments (e.g. kubernetes)
# ipv6 isn't yet configured / supported and CherryPy throws an uncaught
# exception.
if cherrypy is not None:
    v = StrictVersion(cherrypy.__version__)
    # the issue was fixed in 3.2.3. it's present in 3.2.2 (current version on
    # centos:7) and back to at least 3.0.0.
    if StrictVersion("3.1.2") <= v < StrictVersion("3.2.3"):
        # https://github.com/cherrypy/cherrypy/issues/1100
        from cherrypy.process import servers
        servers.wait_for_occupied_port = lambda host, port: None


# cherrypy likes to sys.exit on error.  don't let it take us down too!
def os_exit_noop(*args, **kwargs):
    pass


os._exit = os_exit_noop
Exemplo n.º 41
0
def is_osx_10_10_or_greater(cls):
    import platform
    release = platform.mac_ver()[0]
    return release and StrictVersion(release) >= StrictVersion('10.10')
Exemplo n.º 42
0
class TestCompiler(TestCase):
    def setUp(self):
        self.proto_dir = TemporaryDirectory()
        self.out_dir = TemporaryDirectory()

    def add_proto_file(self, content, name='test.proto'):
        file_name = path.join(self.proto_dir.name, name)
        dir_name = path.dirname(file_name)

        if not path.exists(dir_name):
            makedirs(dir_name, exist_ok=True)

        with open(file_name, 'w') as proto_file:
            proto_file.write(content)

    def run_compiler(self, files='test.proto'):
        if not isinstance(files, list):
            files = [files]

        files = map(lambda name: path.join(self.proto_dir.name, name), files)

        new_env = environ.copy()
        new_env['PATH'] += ':' + path.normpath(
            path.join(path.dirname(__file__), '..', 'bin'))
        if 'PYTHONPATH' in new_env:
            new_env['PYTHONPATH'] += ':' + path.normpath(
                path.join(path.dirname(__file__), '..'))
        else:
            new_env['PYTHONPATH'] = path.normpath(
                path.join(path.dirname(__file__), '..'))

        args = [
            'protoc',
            '--python3_out=' + self.out_dir.name,
            '--proto_path=' + self.proto_dir.name,
        ]
        args.extend(files)

        proc = Popen(args, stderr=PIPE, env=new_env)
        proc.wait()

        if proc.returncode:
            value = proc.stderr.readline()
            while value:
                print(value)
                value = proc.stderr.readline()

            raise ValueError

    def return_module(self, name='test'):
        file_name = path.join(self.out_dir.name, name + '.py')

        sys_path.append(self.out_dir.name)
        loader = SourceFileLoader(self._testMethodName, file_name)
        module = loader.load_module(self._testMethodName)
        sys_path.remove(self.out_dir.name)

        return module

    def test_simple_fields(self):
        msg_code = '''
        message TestMsg {
            optional bool a = 1;
            optional string b = 2;
        }'''

        self.add_proto_file(msg_code)
        self.run_compiler()
        msgs = self.return_module()

        msg = msgs.TestMsg()
        msg.parse_from_bytes(b'\x08\x01\x12\x07\x74\x65\x73\x74\x69\x6E\x67')
        self.assertEqual(msg.a, True)
        self.assertEqual(msg.b, 'testing')

    def test_embedded_messages(self):
        msg_code = '''
        message TestA {
            message Foo {
                optional int32 a = 1;
            }

            optional Foo b = 3;
        }

        message Bar {
            optional int32 a = 1;
        }

        message TestB {
            optional Bar b = 3;
        }

        message TestC {
            optional TestA.Foo b = 3;
        }'''

        self.add_proto_file(msg_code)
        self.run_compiler()
        msgs = self.return_module()

        self.assertEqual(type(msgs.TestA.Foo), type)

        msgA = msgs.TestA()
        msgA.parse_from_bytes(b'\x1a\x03\x08\x96\x01')
        self.assertEqual(msgA.b.a, 150)

        msgB = msgs.TestB()
        msgB.parse_from_bytes(b'\x1a\x03\x08\x96\x01')
        self.assertEqual(msgB.b.a, 150)

        msgC = msgs.TestC()
        msgC.parse_from_bytes(b'\x1a\x03\x08\x96\x01')
        self.assertEqual(msgC.b.a, 150)

    def test_enums(self):
        msg_code = '''
        message TestA {
            enum Foo {
                Opt1 = 1;
                Opt2 = 2;
                Opt3 = 3;
            }
            optional Foo a = 1;
        }

        enum Bar {
            Opt1 = 1;
            Opt2 = 2;
        }

        message TestB {
            optional Bar a = 1;
        }'''

        self.add_proto_file(msg_code)
        self.run_compiler()
        msgs = self.return_module()

        self.assertTrue(isinstance(msgs.TestA.Foo, type))
        self.assertTrue(issubclass(msgs.TestA.Foo, Enum))

        msg_a = msgs.TestA()
        msg_a.parse_from_bytes(b'\x08\x02')
        self.assertEqual(msg_a.a, msgs.TestA.Foo.Opt2)

        msg_b = msgs.TestB()
        msg_b.parse_from_bytes(b'\x08\x02')
        self.assertEqual(msg_b.a, msgs.Bar.Opt2)

    def test_default_option(self):
        msg_code = '''
        enum Foo {
            Opt1 = 1;
            Opt2 = 2;
        }

        message TestA {
            optional bool a = 1 [default = true];
            optional string b = 2 [default = 'asd'];
            optional bytes c = 3 [default = 'q\x08e'];
            optional Foo d = 4 [default = Opt2];
            optional int32 e = 5 [default = 1];
        }'''

        self.add_proto_file(msg_code)
        self.run_compiler()
        msgs = self.return_module()

        msg_a = msgs.TestA()

        self.assertEqual(msg_a.a, True)
        self.assertEqual(msg_a.b, 'asd')
        self.assertEqual(msg_a.c, b'q\x08e')
        self.assertEqual(msg_a.d, msgs.Foo.Opt2)
        self.assertEqual(msg_a.e, 1)

    def test_message_without_fields(self):
        msg_code = '''
        message Foo {
        }'''

        self.add_proto_file(msg_code)
        self.run_compiler()

        self.assertEqual(type(self.return_module()), ModuleType)

    @skipIf(protoc_version < StrictVersion("2.5.0"),
            "allow_alias requires protoc >= 2.5.0")
    def test_enum_alias(self):
        msg_code = '''
        enum EnumAllowingAlias {
            option allow_alias = true;
            UNKNOWN = 0;
            STARTED = 1;
            RUNNING = 1;
        }'''

        # Compile without warnings
        self.add_proto_file(msg_code)
        self.run_compiler()
        self.assertEqual(type(self.return_module()), ModuleType)

        msg_code = '''
        enum EnumForciblyNotAllowingAlias {
            option allow_alias = false;
            UNKNOWN = 0;
            STARTED = 1;
            RUNNING = 1;
        }'''

        # Protoc will crash with non-zero return code
        self.add_proto_file(msg_code)
        self.assertRaises(ValueError, self.run_compiler)

    def test_extend_message(self):
        msg_code = '''
        message Foo {
            message Bar {
                extensions 100 to 199;
            }

            extend Foo {
                optional int32 test = 104;
            }

            extensions 100 to 199;
        }

        extend Foo {
            optional int32 foo = 101;
        }

        extend Foo.Bar {
            optional int32 bar = 102;
        }'''

        self.add_proto_file(msg_code)
        self.run_compiler()
        msgs = self.return_module()

        msg_foo = msgs.Foo()
        msg_foo.parse_from_bytes(b'\xa8\x06{\xc0\x06\x95\x06')

        self.assertTrue(hasattr(msg_foo, 'foo'))
        self.assertEqual(msg_foo.foo, 123)

        self.assertTrue(hasattr(msg_foo, 'test'))
        self.assertEqual(msg_foo.test, 789)

        msg_bar = msgs.Foo.Bar()
        msg_bar.parse_from_bytes(b'\xb0\x06\xc8\x03')
        self.assertTrue(hasattr(msg_bar, 'bar'))
        self.assertEqual(msg_bar.bar, 456)

    def test_import(self):
        foo_code = '''
        message Foo {
            optional int32 a = 1;
        }'''

        bar_code = '''
        import "foo.proto";

        message Bar {
            optional Foo b = 3;
        }'''

        self.add_proto_file(foo_code, 'foo.proto')
        self.add_proto_file(bar_code, 'bar.proto')

        self.run_compiler('bar.proto')

        bar = self.return_module('bar')

        msg = bar.Bar()
        self.assertEqual(type(msg.b), bar.Foo)

        msg.parse_from_bytes(b'\x1a\x03\x08\x96\x01')
        self.assertEqual(msg.b.a, 150)

    def test_import_from_subdirectory(self):
        foo_code = '''
        message Foo {
            optional int32 a = 1;
        }'''

        bar_code = '''
        import "dir/foo.proto";

        message Bar {
            optional Foo b = 3;
        }'''

        self.add_proto_file(foo_code, 'dir/foo.proto')
        self.add_proto_file(bar_code, 'bar.proto')

        self.run_compiler('bar.proto')

        bar = self.return_module('bar')

        msg = bar.Bar()
        self.assertEqual(type(msg.b), bar.Foo)

        msg.parse_from_bytes(b'\x1a\x03\x08\x96\x01')
        self.assertEqual(msg.b.a, 150)

    @skipIf(protoc_version < StrictVersion("2.5.0"),
            "public import requires protoc >= 2.5.0")
    def test_public_import(self):
        foo_code = '''
        message Foo {
            optional int32 a = 1;
        }'''

        bar_code = '''
        import public "foo.proto";
        '''

        final_code = '''
        import "bar.proto";

        message Bar {
            optional Foo b = 3;
        }'''

        self.add_proto_file(foo_code, 'foo.proto')
        self.add_proto_file(bar_code, 'bar.proto')
        self.add_proto_file(final_code, 'final.proto')

        self.run_compiler('final.proto')

        final = self.return_module('final')

        msg = final.Bar()
        self.assertEqual(type(msg.b), final.Foo)

        msg.parse_from_bytes(b'\x1a\x03\x08\x96\x01')
        self.assertEqual(msg.b.a, 150)

    def test_package(self):
        package_code = '''
        package tutorial;

        enum PhoneType {
            MOBILE = 0;
            HOME = 1;
            WORK = 2;
        }

        message Foo {
            optional PhoneType a = 1;
        }'''

        self.add_proto_file(package_code)
        self.run_compiler()
        self.assertEqual(type(self.return_module()), ModuleType)

    def test_packages_import(self):
        foo_code = '''
        package foo;

        message Foo {
            optional int32 a = 1;
        }'''

        bar_code = '''
        package test1;

        import "foo.proto";

        message Bar {
            optional foo.Foo b = 3;
        }'''

        self.add_proto_file(foo_code, 'foo.proto')
        self.add_proto_file(bar_code, 'bar.proto')
        self.run_compiler('foo.proto')
Exemplo n.º 43
0
from distutils.version import StrictVersion
from enum import Enum
from importlib.machinery import SourceFileLoader
from os import environ, makedirs, path
from subprocess import check_output, Popen, PIPE
from sys import path as sys_path
from tempfile import TemporaryDirectory
from types import ModuleType
from unittest import TestCase, skipIf

protoc_version = StrictVersion(
    check_output("protoc --version || true", shell=True)[10:-1].decode())


class TestCompiler(TestCase):
    def setUp(self):
        self.proto_dir = TemporaryDirectory()
        self.out_dir = TemporaryDirectory()

    def add_proto_file(self, content, name='test.proto'):
        file_name = path.join(self.proto_dir.name, name)
        dir_name = path.dirname(file_name)

        if not path.exists(dir_name):
            makedirs(dir_name, exist_ok=True)

        with open(file_name, 'w') as proto_file:
            proto_file.write(content)

    def run_compiler(self, files='test.proto'):
        if not isinstance(files, list):
Exemplo n.º 44
0
ensure_in_syspath('../../../')

import salt.ext.six as six

try:
    import zmq
    from zmq.eventloop.ioloop import ZMQIOLoop
    HAS_ZMQ_IOLOOP = True
except ImportError:
    HAS_ZMQ_IOLOOP = False


@skipIf(HAS_ZMQ_IOLOOP is False,
        'PyZMQ version must be >= 14.0.1 to run these tests.')
@skipIf(
    StrictVersion(zmq.__version__) < StrictVersion('14.0.1'),
    'PyZMQ must be >= 14.0.1 to run these tests.')
class TestSaltAPIHandler(SaltnadoTestCase):
    def get_app(self):
        urls = [('/', saltnado.SaltAPIHandler)]

        application = self.build_tornado_app(urls)

        application.event_listener = saltnado.EventListener({}, self.opts)
        return application

    def test_root(self):
        '''
        Test the root path which returns the list of clients we support
        '''
        response = self.fetch(
Exemplo n.º 45
0
def main():
    argument_spec = openstack_full_argument_spec(
        name=dict(required=True),
        zone_type=dict(required=False, choice=['primary', 'secondary']),
        email=dict(required=False, default=None),
        description=dict(required=False, default=None),
        ttl=dict(required=False, default=None, type='int'),
        masters=dict(required=False, default=None, type='list'),
        state=dict(default='present', choices=['absent', 'present']),
    )

    module_kwargs = openstack_module_kwargs()
    module = AnsibleModule(argument_spec,
                           supports_check_mode=True,
                           **module_kwargs)

    if not HAS_SHADE:
        module.fail_json(msg='shade is required for this module')
    if StrictVersion(shade.__version__) < StrictVersion('1.8.0'):
        module.fail_json(msg="To utilize this module, the installed version of"
                         "the shade library MUST be >=1.8.0")

    name = module.params.get('name')
    state = module.params.get('state')

    try:
        cloud = shade.openstack_cloud(**module.params)
        zone = cloud.get_zone(name)

        if state == 'present':
            zone_type = module.params.get('zone_type')
            email = module.params.get('email')
            description = module.params.get('description')
            ttl = module.params.get('ttl')
            masters = module.params.get('masters')

            if module.check_mode:
                module.exit_json(changed=_system_state_change(
                    state, email, description, ttl, masters, zone))

            if zone is None:
                zone = cloud.create_zone(name=name,
                                         zone_type=zone_type,
                                         email=email,
                                         description=description,
                                         ttl=ttl,
                                         masters=masters)
                changed = True
            else:
                if masters is None:
                    masters = []

                pre_update_zone = zone
                changed = _system_state_change(state, email, description, ttl,
                                               masters, pre_update_zone)
                if changed:
                    zone = cloud.update_zone(name,
                                             email=email,
                                             description=description,
                                             ttl=ttl,
                                             masters=masters)
            module.exit_json(changed=changed, zone=zone)

        elif state == 'absent':
            if module.check_mode:
                module.exit_json(changed=_system_state_change(
                    state, None, None, None, None, zone))

            if zone is None:
                changed = False
            else:
                cloud.delete_zone(name)
                changed = True
            module.exit_json(changed=changed)

    except shade.OpenStackCloudException as e:
        module.fail_json(msg=str(e))
Exemplo n.º 46
0
import numpy as np
import pandas as pd
import json_tricks
from distutils.version import StrictVersion

import psychopy
from psychopy import logging
from psychopy.tools.filetools import (openOutputFile, genDelimiter,
                                      genFilenameFromDelimiter)
from psychopy.tools.fileerrortools import handleFileCollision
from psychopy.tools.arraytools import extendArr
from .utils import _getExcelCellName

try:
    import openpyxl
    if StrictVersion(openpyxl.__version__) >= StrictVersion('2.4.0'):
        # openpyxl moved get_column_letter to utils.cell
        from openpyxl.utils.cell import get_column_letter
    else:
        from openpyxl.cell import get_column_letter
    from openpyxl.reader.excel import load_workbook, Workbook
    haveOpenpyxl = True
except ImportError:
    haveOpenpyxl = False

_experiments = weakref.WeakValueDictionary()


class _ComparisonMixin(object):
    def __eq__(self, other):
        # NoneType and booleans, for example, don't have a .__dict__ attribute.
Exemplo n.º 47
0
missing_template = '''
*****
***** package {0} not found
***** Please install it (pip install {0} or conda install {0})
***** in order to use {1}
*****
'''

valueerror_template = '''
*****
***** package {0} version not understood
***** Please make sure the installed version ({1})
***** is compatible with the minimum required version ({2})
***** in order to use {3}
*****
'''

# now test the versions of extras
for extra, (module_name, min_version) in extras.items():
    try:
        module = import_module(module_name)
        if StrictVersion(module.__version__) < StrictVersion(min_version):
            print(version_template.format(module_name, min_version, extra))
    except ImportError:
        print(missing_template.format(module_name, extra))
    except ValueError:
        print(
            valueerror_template.format(module_name, module.__version__,
                                       min_version, extra))
Exemplo n.º 48
0
import sys

from hpccm.common import cpu_arch
from hpccm.common import container_type
from hpccm.common import linux_distro

# Global variables
g_cpu_arch = cpu_arch.X86_64  # CPU architecture
if platform.machine() == 'aarch64':
    g_cpu_arch = cpu_arch.AARCH64
elif platform.machine() == 'ppc64le':
    g_cpu_arch = cpu_arch.PPC64LE
g_cpu_target = None  # CPU optimization target
g_ctype = container_type.DOCKER  # Container type
g_linux_distro = linux_distro.UBUNTU  # Linux distribution
g_linux_version = StrictVersion('16.04')  # Linux distribution version
g_singularity_version = StrictVersion('2.6')  # Singularity version
g_wd = '/var/tmp'  # Working directory


def get_cpu_architecture():
    """Return the architecture string for the currently configured CPU
  architecture, e.g., `aarch64`, `ppc64le`, or `x86_64`.

  """

    this = sys.modules[__name__]
    if this.g_cpu_arch == cpu_arch.AARCH64:
        return 'aarch64'
    elif this.g_cpu_arch == cpu_arch.PPC64LE:
        if this.g_linux_distro == linux_distro.UBUNTU:
Exemplo n.º 49
0
def set_linux_distro(distro):
    """Set the Linux distribution and version

  In most cases, the `baseimage` primitive should be relied upon to
  set the Linux distribution.  Only use this function if you really
  know what you are doing.

  # Arguments

  distro (string): Valid values are `centos7`, `centos8`, `rhel7`,
  `rhel8`, `rockylinux8`, `ubuntu16`, `ubuntu18`, and `ubuntu20`.
  `ubuntu` is an alias for `ubuntu16`, `centos` is an alias for
  `centos7`, and `rhel` is an alias for `rhel7`.

  """
    this = sys.modules[__name__]
    if distro == 'centos':
        this.g_linux_distro = linux_distro.CENTOS
        this.g_linux_version = StrictVersion('7.0')
    elif distro == 'centos7':
        this.g_linux_distro = linux_distro.CENTOS
        this.g_linux_version = StrictVersion('7.0')
    elif distro == 'centos8':
        this.g_linux_distro = linux_distro.CENTOS
        this.g_linux_version = StrictVersion('8.0')
    elif distro == 'rhel':
        this.g_linux_distro = linux_distro.RHEL
        this.g_linux_version = StrictVersion('7.0')
    elif distro == 'rhel7':
        this.g_linux_distro = linux_distro.RHEL
        this.g_linux_version = StrictVersion('7.0')
    elif distro == 'rhel8':
        this.g_linux_distro = linux_distro.RHEL
        this.g_linux_version = StrictVersion('8.0')
    elif distro == 'rockylinux8':
        this.g_linux_distro = linux_distro.ROCKYLINUX
        this.g_linux_version = StrictVersion('8.0')
    elif distro == 'ubuntu':
        this.g_linux_distro = linux_distro.UBUNTU
        this.g_linux_version = StrictVersion('16.04')
    elif distro == 'ubuntu16':
        this.g_linux_distro = linux_distro.UBUNTU
        this.g_linux_version = StrictVersion('16.04')
    elif distro == 'ubuntu18':
        this.g_linux_distro = linux_distro.UBUNTU
        this.g_linux_version = StrictVersion('18.04')
    elif distro == 'ubuntu20':
        this.g_linux_distro = linux_distro.UBUNTU
        this.g_linux_version = StrictVersion('20.04')
    else:
        logging.warning(
            'Unable to determine the Linux distribution, defaulting to Ubuntu')
        this.g_linux_distro = linux_distro.UBUNTU
        this.g_linux_version = StrictVersion('16.04')
Exemplo n.º 50
0
def main(top_block_cls=top_block, options=None):

    from distutils.version import StrictVersion
    if StrictVersion(Qt.qVersion()) >= StrictVersion("4.5.0"):
        style = gr.prefs().get_string('qtgui', 'style', 'raster')
        Qt.QApplication.setGraphicsSystem(style)
    qapp = Qt.QApplication(sys.argv)

    ######## ACTUALLY WHERE STUFF HAPPENS #######
    tb = top_block_cls()
    tb.start()
    tb.show()
    print('Receiving ...')

    global darksky
    global darksky_exists

    def snapshot(
            int_time):  #straight snapshot over a certain integration time.
        tb.set_integration_time(int_time)
        # print 'Integration time set to '+str(int_time)+ ' seconds.'
        print 'Snapshot ' + str(int_time) + ' sec'
        vec = tb.get_variable_function_probe()  #old vector
        pointa = vec[0]
        pointb = vec[-1]
        tb.toggle_copy(True)  #start copying
        while vec[0] == pointa and vec[-1] == pointb:
            pytime.sleep(1)
            vec = tb.get_variable_function_probe()
        tb.toggle_copy(False)  #stop copying
        return np.array(vec)

    def dark_sky_calib(int_time):  #for use when pointing at the dark sky
        global darksky
        darksky = snapshot(int_time)
        global darksky_exists
        darksky_exists = True
        return

    def observe(int_time):  #dark sky calbrated snapshot
        vec = snapshot(int_time)
        wait(int_time)
        global darksky_exists
        if darksky_exists:
            calib = vec - darksky
            return calib
        else:
            print('Warning: No dark sky calibration has been performed.')
            return vec

    def wait(sec):
        pytime.sleep(sec)
        return

    def graphing(int_time, iter=float('inf')):
        plt.ion()
        plt.figure()
        vec = tb.get_variable_function_probe()  #vector
        n = len(vec)
        x = np.linspace(flo, fhi, n)
        i = 0
        while (i < iter):
            plt.pause(int_time)
            y = observe(int_time)
            plt.clf()
            plt.xlabel('Frequency (MHz)')
            plt.ylabel('Scaled power')
            plt.axvline(x=1420.406, color='black', ls='--')
            plt.ticklabel_format(useOffset=False)
            plt.plot(x, y)
            plt.draw()
            i += 1
        return ()

    def track(N):
        client.track(N)
#    License for the specific language governing permissions and limitations
#    under the License.

from types import ModuleType

from distutils.version import StrictVersion

from neutron.plugins.ml2.drivers import type_tunnel
from neutron import version

# Some constants and verifier functions have been deprecated but are still
# used by earlier releases of neutron. In order to maintain
# backwards-compatibility with stable/mitaka this will act as a translator
# that passes constants and functions according to version number.

NEUTRON_VERSION = StrictVersion(str(version.version_info))
NEUTRON_NEWTON_VERSION = StrictVersion('9.0.0')
NEUTRON_OCATA_VERSION = StrictVersion('10.0.0')

n_c = __import__('neutron.common.constants', fromlist=['common.constants'])
constants = __import__('neutron_lib.constants', fromlist=['constants'])

if NEUTRON_VERSION >= NEUTRON_NEWTON_VERSION:
    from neutron.conf import common as base_config
    from neutron_lib.api import validators
    is_attr_set = validators.is_attr_set
    validators = validators.validators
    n_c_attr_names = getattr(n_c, "_mg__my_globals", None)
else:
    from neutron.api.v2 import attributes
    from neutron.common import config as base_config
Exemplo n.º 52
0
    files.append('build/util/LASTCHANGE.blink')
    print 'copying files to ' + third_party_chromium
    for i in xrange(len(files)):
        printProgress(i+1, len(files))
        f = files[i]
        if not isInChromiumBlacklist(f) and not isInGitBlacklist(f):
            copyFile(f, os.path.join(third_party_chromium, f))
    print("")

commandNotFound = subprocess.call(['which', 'dos2unix'])

if not commandNotFound:
    dos2unixVersion , err = subprocess.Popen(['dos2unix', '-V', '| true'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
    if not dos2unixVersion:
        raise Exception("You need dos2unix version 6.0.6 minimum.")
    dos2unixVersion = StrictVersion(dos2unixVersion.splitlines()[0].split()[1])

if commandNotFound or dos2unixVersion < StrictVersion('6.0.6'):
    raise Exception("You need dos2unix version 6.0.6 minimum.")

os.chdir(third_party)
ignore_case_setting = subprocess.Popen(['git', 'config', '--get', 'core.ignorecase'], stdout=subprocess.PIPE).communicate()[0]
if 'true' in ignore_case_setting:
    raise Exception("Your 3rdparty repository is configured to ignore case. "
                    "A snapshot created with these settings would cause problems on case sensitive file systems.")

clearDirectory(third_party)

exportNinja()
exportChromium()
Exemplo n.º 53
0
    RedisError,
    ConnectionError,
    TimeoutError,
    BusyLoadingError,
    ResponseError,
    InvalidResponse,
    AuthenticationError,
    NoScriptError,
    ExecAbortError,
    ReadOnlyError
)
from WebModel.utils.redis.utils import HIREDIS_AVAILABLE
if HIREDIS_AVAILABLE:
    import hiredis

    hiredis_version = StrictVersion(hiredis.__version__)
    HIREDIS_SUPPORTS_CALLABLE_ERRORS = \
        hiredis_version >= StrictVersion('0.1.3')
    HIREDIS_SUPPORTS_BYTE_BUFFER = \
        hiredis_version >= StrictVersion('0.1.4')

    if not HIREDIS_SUPPORTS_BYTE_BUFFER:
        msg = ("redis-py works best with hiredis >= 0.1.4. You're running "
               "hiredis %s. Please consider upgrading." % hiredis.__version__)
        warnings.warn(msg)

    HIREDIS_USE_BYTE_BUFFER = True
    # only use byte buffer if hiredis supports it and the Python version
    # is >= 2.7
    if not HIREDIS_SUPPORTS_BYTE_BUFFER or (
            sys.version_info[0] == 2 and sys.version_info[1] < 7):