Пример #1
0
def _format_commands_diff(commands_diff):
    """Format the commands differences into a human readable format."""
    output = StringIO()
    for key, value in sorted(commands_diff.items()):
        if key == 'hammer':
            continue
        output.write('{}{}\n'.format(
            key,
            ' (new command)' if value['added_command'] else ''
        ))
        if value.get('added_subcommands'):
            output.write('  Added subcommands:\n')
            for subcommand in value.get('added_subcommands'):
                output.write('    * {}\n'.format(subcommand))
        if value.get('added_options'):
            output.write('  Added options:\n')
            for option in value.get('added_options'):
                output.write('    * {}\n'.format(option))
        if value.get('removed_subcommands'):
            output.write('  Removed subcommands:')
            for subcommand in value.get('removed_subcommands'):
                output.write('    * {}'.format(subcommand))
        if value.get('removed_options'):
            output.write('  Removed options:\n')
            for option in value.get('removed_options'):
                output.write('    * {}\n'.format(option))
        output.write('\n')
    output_value = output.getvalue()
    output.close()
    return output_value
Пример #2
0
    def post(self, request):
        dataio = None
        success = False
        message = u""
        data = u""
        try:
            if self.check_basic_auth():
                dataio = StringIO(request.read())

                load_categories(get_text_from_data(dataio))
                success = True
                message = u"Успешно загружены данные из 1С."

                return HttpResponse("success")
            else:
                error_message = u"Ошибка загрузки данных из 1С: неправильное имя пользователя или пароль."
                auth_message = u"HTTP_AUTHORIZATION: " + self.request.META.get('HTTP_AUTHORIZATION', ' ')
                success = False
                message = u"\n".join([error_message, auth_message])
                return HttpResponse("failure", status=401)
        except Exception as e:
            if transaction.is_dirty():
                transaction.rollback()
            success = False
            message = u"\n".join([u"Ошибка загрузки данных из 1С:", traceback.format_exc()])
            if dataio:
                data = get_text_from_data(dataio)
                dataio.close()
            raise
        finally:
            with transaction.commit_on_success():
                Log.objects.create(success=success, message=message, data=data)
Пример #3
0
    def make_word(self, seed=None, min=3, max=30, tries=100):
        if seed is not None or not self._seeded:
            self._random.seed(seed)

        out = StringIO()
        tail = CircularBuffer(self.tail)
        tail.append(WORD_START)

        while True:
            c = self.choose_transition(self.transitions(tail.tuple()), self._random.random())

            if c == WORD_STOP:
                break
            else:
                out.write(c)
                tail.append(c)

        result = out.getvalue()
        out.close()

        if min <= len(result) <= max:
            return result
        elif tries > 0:
            return self.make_word(seed, min, max, tries - 1)
        else:
            raise MatrixError
Пример #4
0
 def do_POST(self):
     """Serve a POST request."""
     r, info = self.deal_post_data()
     print(r, info, "by: ", self.client_address)
     f = StringIO()
     f.write('<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">')
     f.write("<html>\n<title>Upload Result Page</title>\n")
     f.write("<body>\n<h2>Upload Result Page</h2>\n")
     f.write("<hr>\n")
     if r:
         f.write("<strong>Success:</strong>")
     else:
         f.write("<strong>Failed:</strong>")
     f.write(info)
     f.write("<br><a href=\"%s\">back</a>" % self.headers['referer'])
     f.write("<hr><small>Powerd By: bones7456, check new version at ")
     f.write("<a href=\"http://li2z.cn/?s=SimpleHTTPServerWithUpload\">")
     f.write("here</a>.</small></body>\n</html>\n")
     length = f.tell()
     f.seek(0)
     self.send_response(200)
     self.send_header("Content-type", "text/html")
     self.send_header("Content-Length", str(length))
     self.end_headers()
     if f:
         self.copyfile(f, self.wfile)
         f.close()
Пример #5
0
    def test_roundtrip_generators(self):
        # test that fasta and qual files can be streamed into memory and back
        # out to disk using generator reader and writer
        fps = list(map(lambda e: list(map(get_data_path, e)),
                       [('empty', 'empty'),
                        ('fasta_multi_seq_roundtrip',
                         'qual_multi_seq_roundtrip')]))

        for fasta_fp, qual_fp in fps:
            with open(fasta_fp, 'U') as fh:
                exp_fasta = fh.read()
            with open(qual_fp, 'U') as fh:
                exp_qual = fh.read()

            fasta_fh = StringIO()
            qual_fh = StringIO()
            _generator_to_fasta(_fasta_to_generator(fasta_fp, qual=qual_fp),
                                fasta_fh, qual=qual_fh)
            obs_fasta = fasta_fh.getvalue()
            obs_qual = qual_fh.getvalue()
            fasta_fh.close()
            qual_fh.close()

            self.assertEqual(obs_fasta, exp_fasta)
            self.assertEqual(obs_qual, exp_qual)
Пример #6
0
 def toXml(self, filename='', compress=False):
     xml = StringIO()
     xml.write("<?xml version='1.0' encoding='UTF-8'?>\n")
     xml.write(
         "<!DOCTYPE svg PUBLIC \"-//W3C//DTD SVG 1.0//EN\" \"http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd \">\n")
     self.svg.toXml(0, xml)
     if not filename:
         if compress:
             import gzip
             f = StringIO()
             zf = gzip.GzipFile(fileobj=f, mode='wb')
             zf.write(xml.getvalue())
             zf.close()
             f.seek(0)
             return f.read()
         else:
             return xml.getvalue()
     else:
         if filename[-4:] == 'svgz':
             import gzip
             f = gzip.GzipFile(
                 filename=filename, mode="wb", compresslevel=9)
             f.write(xml.getvalue())
             f.close()
         else:
             f = file(filename, 'w')
             f.write(xml.getvalue())
             f.close()
Пример #7
0
    def test_sequence_to_fastq_kwargs_passed(self):
        for constructor in [Sequence, DNA, RNA, Protein]:
            for components, kwargs_expected_fp in self.valid_files:
                for expected_kwargs, expected_fp in kwargs_expected_fp:

                    observed_kwargs = {}
                    # TODO:
                    # some of the test files contain characters which are
                    # invalid for RNA, so don't validate for now. Need to
                    # fix this
                    if constructor is RNA:
                        observed_kwargs['validate'] = False

                    # Can't use partials for this because the read
                    # function below can't operate on partials
                    if hasattr(constructor, 'lowercase'):
                        expected_kwargs['lowercase'] = 'introns'
                        observed_kwargs['lowercase'] = 'introns'

                    fh = StringIO()
                    for c in components:
                        obj = constructor(
                            c[2],
                            metadata={'id': c[0], 'description': c[1]},
                            positional_metadata={'quality': c[3]},
                            **observed_kwargs)
                        write(obj, into=fh, format='fastq', **expected_kwargs)

                    observed = fh.getvalue()
                    fh.close()

                    with open(expected_fp, 'U') as f:
                        expected = f.read()

                    self.assertEqual(observed, expected)
Пример #8
0
 def __call__(self, parser, namespace, values, option_string=None):
     output = StringIO()
     parser.print_help(output)
     text = output.getvalue()
     output.close()
     pydoc.pager(text)
     parser.exit()
Пример #9
0
    def _in_process_execute(self, command):
        # from azure.cli import  as cli_main
        from azure.cli.main import main as cli_main
        from six import StringIO
        from vcr.errors import CannotOverwriteExistingCassetteException

        if command.startswith('az '):
            command = command[3:]

        output_buffer = StringIO()
        try:
            # issue: stderr cannot be redirect in this form, as a result some failure information
            # is lost when command fails.
            self.exit_code = cli_main(shlex.split(command), file=output_buffer) or 0
            self.output = output_buffer.getvalue()
        except CannotOverwriteExistingCassetteException as ex:
            raise AssertionError(ex)
        except CliExecutionError as ex:
            if ex.exception:
                raise ex.exception
            else:
                raise ex
        except Exception as ex:  # pylint: disable=broad-except
            self.exit_code = 1
            self.output = output_buffer.getvalue()
            self.process_error = ex
        finally:
            output_buffer.close()
Пример #10
0
    def _save_data(self, key, hmmfile, stofile, colnames, x, cur=None):
        with open(hmmfile) as fh:
            hmmdata = fh.read()

        with open(stofile) as fh:
            stodata = fh.read()

        phylo = True if x.dtype == float else False

        coldata = json.dumps(colnames)
        xfile = StringIO()
        np.save(xfile, x)
        xdata = bz2.compress(xfile.getvalue(), 9)
        xfile.close()

        closable = False
        if cur is None:
            conn = sqlite3.connect(self.__filename)
            cur = conn.cursor()
            closable = True

        IdepiProjectData.__insert(cur, IdepiProjectData.__HMM, key, hmmdata)
        IdepiProjectData.__insert(cur, IdepiProjectData.__ALIGNMENT, key, stodata)
        IdepiProjectData.__insert(cur, IdepiProjectData.__COLUMNS, key, coldata)
        if phylo:
            IdepiProjectData.__insert(cur, IdepiProjectData.__PHYLO_DATA, key, xdata)
        else:
            IdepiProjectData.__insert(cur, IdepiProjectData.__DISCRETE_DATA, key, xdata)

        if closable:
            conn.close()
Пример #11
0
 def open(self, filename=None):
     if filename is None:
         filename = self._base_uri
     else:
         if self._file_type == 's3':
             filename = urljoin(self._base_uri.replace(
                 's3://', 'http://'), filename.replace('\\', '/')).replace('http://', 's3://')
         elif self._file_type == 'http':
             filename = urljoin(self._base_uri, filename.replace('\\', '/'))
         else:
             filename = os.path.abspath(os.path.join(os.path.dirname(
                 self._base_uri.replace('\\', '/')), filename.replace('\\', '/')))
     f = None
     if self._file_type == 's3':
         uri_header, uri_body = filename.split('://', 1)
         us = uri_body.split('/')
         bucketname = us.pop(0)
         key = '/'.join(us)
         logger.info('Opening {}'.format(key))
         f = StringIO(self._s3_bucket.Object(key).get()['Body'].read())
     elif self._file_type == 'http':
         f = request.urlopen(filename)
     else:
         f = open(filename, 'rb')
     yield f
     f.close()
Пример #12
0
    def _log_metadata(self, project):
        dependencies = self._extract_dependencies(project)

        output = StringIO()
        writer = csv.DictWriter(output, delimiter=',',
                                quoting=csv.QUOTE_MINIMAL,
                                lineterminator="\n",
                                fieldnames=ordered_fieldnames)

        for dep in dependencies:
            license, homepage = self._get_pypi_license_homepage(**dep)

            if homepage and 'launchpad.net' in homepage:
                license = self._get_launchpad_license(homepage)

            if license == "UNKNOWN":
                license = ""

            if homepage == "UNKNOWN":
                homepage = ""

            info = dep
            info['license_info'] = license
            info['homepage'] = homepage
            info['project_name'] = project.name
            writer.writerow(info)

        self._project_deps[project.name] = output.getvalue()
        output.close()
Пример #13
0
def test_config_yaml_with_env():
    #assert False, "Not Implemented"
    fileobj = StringIO(CONFIG)
    result = config_from_yaml(fileobj=fileobj, silent=True, upper_only=False, parse_env=True)
    fileobj.close()
    print(result)
    assert result['postfix']['home'] == HOME_ENV    
Пример #14
0
    def test_roundtrip_biological_sequences(self):
        fps = map(get_data_path, ['fasta_multi_seq_roundtrip',
                                  'fasta_sequence_collection_different_type'])

        for reader, writer in ((_fasta_to_biological_sequence,
                                _biological_sequence_to_fasta),
                               (_fasta_to_nucleotide_sequence,
                                _nucleotide_sequence_to_fasta),
                               (_fasta_to_dna_sequence,
                                _dna_sequence_to_fasta),
                               (_fasta_to_rna_sequence,
                                _rna_sequence_to_fasta),
                               (_fasta_to_protein_sequence,
                                _protein_sequence_to_fasta)):
            for fp in fps:
                # read
                obj1 = reader(fp)

                # write
                fh = StringIO()
                writer(obj1, fh)
                fh.seek(0)

                # read
                obj2 = reader(fh)
                fh.close()

                self.assertTrue(obj1.equals(obj2))
Пример #15
0
class Test_resource_validators(unittest.TestCase):

    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_generate_deployment_name_from_file(self):
        #verify auto-gen from uri
        namespace = mock.MagicMock()
        namespace.template_uri = 'https://templates/template123.json?foo=bar'
        namespace.template_file = None
        namespace.deployment_name = None
        validate_deployment_name(namespace)
        self.assertEqual('template123', namespace.deployment_name)

        namespace = mock.MagicMock()
        namespace.template_file = __file__
        namespace.template_uri = None
        namespace.deployment_name = None
        validate_deployment_name(namespace)
        self.assertEqual('test_validators', namespace.deployment_name)

        #verify use default if get a file content
        namespace = mock.MagicMock()
        namespace.template_file = '{"foo":"bar"}'
        namespace.template_uri = None
        namespace.deployment_name = None
        validate_deployment_name(namespace)
        self.assertEqual('deployment1', namespace.deployment_name)
Пример #16
0
def export_config(config=None, export_path=None, format='stdout'):
    
    if format == 'stdout':
        pprint.pprint(config)
        return

    fp = None
    stdout = False    
    try:
        if export_path and export_path != 'stdout':
            fp = open(export_path, 'wt')
        else:
            stdout = True
            fp = StringIO()
    
        if format == 'yaml':
            yaml_dump(config, fp, Dumper=YAMLDumper, explicit_start=False, default_flow_style=False)
        
        elif format == 'json':
            jsonmod.dump(config, fp, indent=3)
    except:
        raise
    finally:
        if fp:
            if stdout:
                print(fp.getvalue())
            
            fp.close()
Пример #17
0
    def test_roundtrip_sequence_collections_and_alignments(self):
        fps = list(map(lambda e: list(map(get_data_path, e)),
                       [('empty', 'empty'),
                        ('fasta_sequence_collection_different_type',
                         'qual_sequence_collection_different_type')]))

        for reader, writer in ((_fasta_to_sequence_collection,
                                _sequence_collection_to_fasta),
                               (_fasta_to_alignment,
                                _alignment_to_fasta)):
            for fasta_fp, qual_fp in fps:
                # read
                obj1 = reader(fasta_fp, qual=qual_fp)

                # write
                fasta_fh = StringIO()
                qual_fh = StringIO()
                writer(obj1, fasta_fh, qual=qual_fh)
                fasta_fh.seek(0)
                qual_fh.seek(0)

                # read
                obj2 = reader(fasta_fh, qual=qual_fh)
                fasta_fh.close()
                qual_fh.close()

                # TODO remove this custom equality testing code when
                # SequenceCollection has an equals method (part of #656).
                # We need this method to include IDs and description in the
                # comparison (not part of SequenceCollection.__eq__).
                self.assertEqual(obj1, obj2)
                for s1, s2 in zip(obj1, obj2):
                    self.assertTrue(s1.equals(s2))
Пример #18
0
class TestCorpus(unittest.TestCase):

    def setUp(self):
        self.feature_table = FeatureTable.load(get_feature_table_fixture("feature_table.json"))
        self.corpus = Corpus.load(get_corpus_fixture("corpus.txt"))

    def test_number_of_words(self):
        self.assertEqual(len(self.corpus), 82)

    def test_get_item(self):
        self.assertEqual(self.corpus[3], "bab")
        self.assertEqual(self.corpus[5], "aab")

    def test_unicode_str(self):
        self.assertEqual(str(self.corpus), "Corpus with 82 words")
        if sys.version_info < (3, 0):
            self.assertEqual(unicode(self.corpus), "Corpus with 82 words")

    def test_get_list_corpus(self):
        corpus = Corpus.load(get_corpus_fixture("test_list_corpus.txt"))
        self.assertEqual(len(corpus), 5)


    def test_print_corpus(self):
        self.out = StringIO()
        self.saved_stdout = sys.stdout
        sys.stdout = self.out      # temporarily take over sys.stdout
        self.corpus.print_corpus()
        self.assertEqual(len(self.out.getvalue().split("\n")), 9)
        self.out.close()
        sys.stdout = self.saved_stdout
Пример #19
0
def run_tests(modules, parallel, run_live, tests):

    if not modules and not tests:
        display('No tests set to run.')
        sys.exit(1)

    display("""
=============
  Run Tests
=============
""")
    if modules:
        display('Modules: {}'.format(', '.join(name for name, _, _ in modules)))

    # set environment variable
    if run_live:
        os.environ['AZURE_TEST_RUN_LIVE'] = 'True'

    test_paths = tests or [p for _, _, p in modules]

    display('Drive test by nosetests')
    from six import StringIO
    old_stderr = sys.stderr
    test_stderr = StringIO()
    sys.stderr = test_stderr
    runner = get_nose_runner(parallel=parallel, process_timeout=3600 if run_live else 600)
    results = runner([path for path in test_paths])
    stderr_val = test_stderr.getvalue()
    sys.stderr = old_stderr
    test_stderr.close()
    failed_tests = summarize_tests(stderr_val)
    return results, failed_tests
Пример #20
0
 def test_simple_cmd_with_redirected_output_fileobject(self):
     buff = StringIO()
     ret = ProcessHelper.run_subprocess(self.ECHO_COMMAND, output=buff)
     assert ret == 0
     assert not os.path.exists(self.OUT_FILE)
     assert buff.readline().strip("\n") == self.PHRASE
     buff.close()
Пример #21
0
    def _in_process_execute(self, cli_ctx, command, expect_failure=False):
        from six import StringIO
        from vcr.errors import CannotOverwriteExistingCassetteException

        if command.startswith('az '):
            command = command[3:]

        stdout_buf = StringIO()
        logging_buf = StringIO()
        try:
            # issue: stderr cannot be redirect in this form, as a result some failure information
            # is lost when command fails.
            self.exit_code = cli_ctx.invoke(shlex.split(command), out_file=stdout_buf) or 0
            self.output = stdout_buf.getvalue()
            self.applog = logging_buf.getvalue()

        except CannotOverwriteExistingCassetteException as ex:
            raise AssertionError(ex)
        except CliExecutionError as ex:
            if expect_failure:
                self.exit_code = 1
                self.output = stdout_buf.getvalue()
                self.applog = logging_buf.getvalue()
            elif ex.exception:
                raise ex.exception
            else:
                raise ex
        except Exception as ex:  # pylint: disable=broad-except
            self.exit_code = 1
            self.output = stdout_buf.getvalue()
            self.process_error = ex
        finally:
            stdout_buf.close()
            logging_buf.close()
Пример #22
0
def get_correct_indentation_diff(code, filename):
    """
    Generate a diff to make code correctly indented.

    :param code: a string containing a file's worth of Python code
    :param filename: the filename being considered (used in diff generation only)
    :returns: a unified diff to make code correctly indented, or
              None if code is already correctedly indented
    """
    code_buffer = StringIO(code)
    output_buffer = StringIO()
    reindenter = reindent.Reindenter(code_buffer)
    reindenter.run()
    reindenter.write(output_buffer)
    reindent_output = output_buffer.getvalue()
    output_buffer.close()
    if code != reindent_output:
        diff_generator = difflib.unified_diff(code.splitlines(True), reindent_output.splitlines(True),
                                              fromfile=filename, tofile=filename + " (reindented)")
        # work around http://bugs.python.org/issue2142
        diff_tuple = map(clean_diff_line_for_python_bug_2142, diff_generator)
        diff = "".join(diff_tuple)
        return diff
    else:
        return None
Пример #23
0
    def test_roundtrip_sequence_collections_and_alignments(self):
        fps = list(map(lambda e: list(map(get_data_path, e)),
                       [('empty', 'empty'),
                        ('fasta_sequence_collection_different_type',
                         'qual_sequence_collection_different_type')]))

        for reader, writer in ((_fasta_to_sequence_collection,
                                _sequence_collection_to_fasta),
                               (_fasta_to_alignment,
                                _alignment_to_fasta)):
            for fasta_fp, qual_fp in fps:
                # read
                obj1 = reader(fasta_fp, qual=qual_fp)

                # write
                fasta_fh = StringIO()
                qual_fh = StringIO()
                writer(obj1, fasta_fh, qual=qual_fh)
                fasta_fh.seek(0)
                qual_fh.seek(0)

                # read
                obj2 = reader(fasta_fh, qual=qual_fh)
                fasta_fh.close()
                qual_fh.close()

                self.assertEqual(obj1, obj2)
Пример #24
0
    def cmd(self, command, checks=None, allowed_exceptions=None, debug=False): #pylint: disable=no-self-use
        allowed_exceptions = allowed_exceptions or []
        if not isinstance(allowed_exceptions, list):
            allowed_exceptions = [allowed_exceptions]

        if self._debug or debug:
            print('\n\tRUNNING: {}'.format(command))
        command_list = shlex.split(command)
        output = StringIO()
        try:
            cli_main(command_list, file=output)
        except Exception as ex: # pylint: disable=broad-except
            ex_msg = str(ex)
            if not next((x for x in allowed_exceptions if x in ex_msg), None):
                raise ex
        self._track_executed_commands(command_list)
        result = output.getvalue().strip()
        output.close()

        if self._debug or debug:
            print('\tRESULT: {}\n'.format(result))

        if checks:
            checks = [checks] if not isinstance(checks, list) else checks
            for check in checks:
                check.compare(result)

        result = result or '{}'
        try:
            return json.loads(result)
        except Exception: # pylint: disable=broad-except
            return result
Пример #25
0
    def test_roundtrip_biological_sequences(self):
        fps = list(map(lambda e: list(map(get_data_path, e)),
                       [('fasta_multi_seq_roundtrip',
                         'qual_multi_seq_roundtrip'),
                        ('fasta_sequence_collection_different_type',
                         'qual_sequence_collection_different_type')]))

        for reader, writer in ((_fasta_to_biological_sequence,
                                _biological_sequence_to_fasta),
                               (partial(_fasta_to_dna_sequence,
                                        validate=False),
                                _dna_sequence_to_fasta),
                               (partial(_fasta_to_rna_sequence,
                                        validate=False),
                                _rna_sequence_to_fasta),
                               (partial(_fasta_to_protein_sequence,
                                        validate=False),
                                _protein_sequence_to_fasta)):
            for fasta_fp, qual_fp in fps:
                # read
                obj1 = reader(fasta_fp, qual=qual_fp)

                # write
                fasta_fh = StringIO()
                qual_fh = StringIO()
                writer(obj1, fasta_fh, qual=qual_fh)
                fasta_fh.seek(0)
                qual_fh.seek(0)

                # read
                obj2 = reader(fasta_fh, qual=qual_fh)
                fasta_fh.close()
                qual_fh.close()

                self.assertEqual(obj1, obj2)
Пример #26
0
    def fetch_data(self):
        # create a data frame directly from the full text of
        # the response from the returned file-descriptor.
        data = self.fetch_url(self.url)
        fd = StringIO()

        if isinstance(data, str):
            fd.write(data)
        else:
            for chunk in data:
                fd.write(chunk)

        self.fetch_size = fd.tell()

        fd.seek(0)

        try:
            # see if pandas can parse csv data
            frames = read_csv(fd, **self.pandas_kwargs)

            frames_hash = hashlib.md5(str(fd.getvalue()).encode('utf-8'))
            self.fetch_hash = frames_hash.hexdigest()
        except pd.parser.CParserError:
            # could not parse the data, raise exception
            raise Exception('Error parsing remote CSV data.')
        finally:
            fd.close()

        return frames
Пример #27
0
 def write_to_db(self, db, transaction=None, commit=True):
     if transaction is None:
         transaction = db
     fp = StringIO()
     if len(self) < Timeseries.MAX_ALL_BOTTOM:
         top = ''
         middle = None
         self.write(fp)
         bottom = fp.getvalue()
     else:
         dates = sorted(self.keys())
         self.write(fp, end=dates[Timeseries.ROWS_IN_TOP_BOTTOM - 1])
         top = fp.getvalue()
         fp.truncate(0)
         fp.seek(0)
         self.write(fp, start=dates[Timeseries.ROWS_IN_TOP_BOTTOM],
                    end=dates[-(Timeseries.ROWS_IN_TOP_BOTTOM + 1)])
         middle = self.blob_create(
             zlib.compress(fp.getvalue().encode('ascii')))
         fp.truncate(0)
         fp.seek(0)
         self.write(fp, start=dates[-Timeseries.ROWS_IN_TOP_BOTTOM])
         bottom = fp.getvalue()
     fp.close()
     c = db.cursor()
     c.execute("DELETE FROM ts_records WHERE id=%d" % (self.id))
     c.execute("""INSERT INTO ts_records (id, top, middle, bottom)
                  VALUES (%s, %s, %s, %s)""", (self.id, top, middle,
               bottom))
     c.close()
     if commit:
         transaction.commit()
Пример #28
0
class Test_resource_validators(unittest.TestCase):

    @classmethod
    def setUpClass(cls):
        pass
        
    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.io = StringIO()
        
    def tearDown(self):
        self.io.close()

    def test_resource_type_valid(self):
        input = 'Test.Namespace/testtype'
        actual = validate_resource_type(input)
        self.assertEqual(actual.namespace, 'Test.Namespace')
        self.assertEqual(actual.type, 'testtype')

    def test_resource_type_invalid(self):
        pass

    def test_parent_valid(self):
        input = 'testtype/mytesttype'
        actual = validate_parent(input)
        self.assertEqual(actual.type, 'testtype')
        self.assertEqual(actual.name, 'mytesttype')        

    def test_parent_invalid(self):
        pass
Пример #29
0
def qtree(q):
    q = optimize(q)
    f = StringIO()
    q.print_tree(out=f)
    f.seek(0)
    out = f.read()
    f.close()
    return out
Пример #30
0
 def test_newick_to_tree_node_convert_underscores(self):
     fh = StringIO('(_:0.1, _a, _b)__;')
     tree = _newick_to_tree_node(fh, convert_underscores=False)
     fh2 = StringIO()
     _tree_node_to_newick(tree, fh2)
     self.assertEquals(fh2.getvalue(), "('_':0.1,'_a','_b')'__';\n")
     fh2.close()
     fh.close()
Пример #31
0
#!/usr/bin/env python

from six import StringIO
from six import print_

output = StringIO()
output.write('First line.\n')
print_('Second line.', file=output)

# Retrieve file contents -- this will be
# 'First line.\nSecond line.\n'
contents = output.getvalue()
print(contents)

# Close object and discard memory buffer --
# .getvalue() will now raise an exception.
output.close()
Пример #32
0
 def __repr__(self):
     str_buffer = StringIO()
     self._pprint_impl(0, str_buffer)
     contents = str_buffer.getvalue()
     str_buffer.close()
     return contents
Пример #33
0
        def toXml(self, filename='', compress=False):
            """drawing.toXml()        ---->to the screen
            drawing.toXml(filename)---->to the file
            writes a svg drawing to the screen or to a file
            compresses if filename ends with svgz or if compress is true
            """
            doctype = implementation.createDocumentType(
                'svg', "-//W3C//DTD SVG 1.0//EN""", 'http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd ')

            global root
            # root is defined global so it can be used by the appender. Its also possible to use it as an arugument but
            # that is a bit messy.
            root = implementation.createDocument(None, None, doctype)
            # Create the xml document.
            global appender

            def appender(element, elementroot):
                """This recursive function appends elements to an element and sets the attributes
                and type. It stops when alle elements have been appended"""
                if element.namespace:
                    e = root.createElementNS(element.namespace, element.type)
                else:
                    e = root.createElement(element.type)
                if element.text:
                    textnode = root.createTextNode(element.text)
                    e.appendChild(textnode)
                # in element.attributes is supported from python 2.2
                for attribute in list(element.attributes.keys()):
                    e.setAttribute(
                        attribute, str(element.attributes[attribute]))
                if element.elements:
                    for el in element.elements:
                        e = appender(el, e)
                elementroot.appendChild(e)
                return elementroot
            root = appender(self.svg, root)
            if not filename:
                xml = StringIO()
                PrettyPrint(root, xml)
                if compress:
                    import gzip
                    f = StringIO()
                    zf = gzip.GzipFile(fileobj=f, mode='wb')
                    zf.write(xml.getvalue())
                    zf.close()
                    f.seek(0)
                    return f.read()
                else:
                    return xml.getvalue()
            else:
                try:
                    if filename[-4:] == 'svgz':
                        import gzip
                        xml = StringIO()
                        PrettyPrint(root, xml)
                        f = gzip.GzipFile(
                            filename=filename, mode='wb', compresslevel=9)
                        f.write(xml.getvalue())
                        f.close()
                    else:
                        f = open(filename, 'w')
                        PrettyPrint(root, f)
                        f.close()
                except:
                    print("Cannot write SVG file: " + filename)
Пример #34
0
def need_to_install_distro(remote):
    """
    Installing kernels on rpm won't setup grub/boot into them.  This installs
    the newest kernel package and checks its version and compares against
    the running kernel (uname -r).  Similar check for deb.

    :returns: False if running the newest distro kernel. Returns the version of
              the newest if it is not running.
    """
    dist_release = remote.os.name
    package_type = remote.os.package_type
    current = get_version_of_running_kernel(remote)
    log.info("Running kernel on {node}: {version}".format(
        node=remote.shortname, version=current))
    installed_version = None
    if package_type == 'rpm':
        if dist_release in ['opensuse', 'sle']:
            install_stdout = remote.sh(
                'sudo zypper --non-interactive install kernel-default')
        else:
            install_stdout = remote.sh('sudo yum install -y kernel')
            match = re.search("Package (.*) already installed",
                              install_stdout,
                              flags=re.MULTILINE)
            if 'Nothing to do' in install_stdout:
                installed_version = match.groups()[0] if match else ''
                err_mess = StringIO()
                err_mess.truncate(0)
                remote.run(args=[
                    'echo', 'no',
                    run.Raw('|'), 'sudo', 'yum', 'reinstall', 'kernel',
                    run.Raw('||'), 'true'
                ],
                           stderr=err_mess)
                reinstall_stderr = err_mess.getvalue()
                err_mess.close()
                if 'Skipping the running kernel' in reinstall_stderr:
                    running_version = re.search(
                        "Skipping the running kernel: (.*)",
                        reinstall_stderr,
                        flags=re.MULTILINE).groups()[0]
                    if installed_version == running_version:
                        log.info(
                            'Newest distro kernel already installed and running'
                        )
                        return False
                else:
                    remote.run(args=[
                        'sudo', 'yum', 'reinstall', '-y', 'kernel',
                        run.Raw('||'), 'true'
                    ])
        newest = get_latest_image_version_rpm(remote)

    if package_type == 'deb':
        newest = get_latest_image_version_deb(remote, dist_release)

    if current in newest or current.replace('-', '_') in newest:
        log.info('Newest distro kernel installed and running')
        return False
    log.info('Not newest distro kernel. Current: {cur} Expected: {new}'.format(
        cur=current, new=newest))
    return newest
Пример #35
0
def get_latest_image_version_deb(remote, ostype):
    """
    Get kernel image version of the newest kernel deb package.
    Used for distro case.

    Round-about way to get the newest kernel uname -r compliant version string
    from the virtual package which is the newest kenel for debian/ubuntu.
    """
    remote.run(args=['sudo', 'apt-get', 'clean'])
    remote.run(args=['sudo', 'apt-get', 'update'])
    output = StringIO()
    newest = ''
    # Depend of virtual package has uname -r output in package name. Grab that.
    # Note that a dependency list may have multiple comma-separated entries,
    # but also each entry may be an alternative (pkg1 | pkg2)
    if 'debian' in ostype:
        remote.run(
            args=['sudo', 'apt-get', '-y', 'install', 'linux-image-amd64'],
            stdout=output)
        remote.run(args=['dpkg', '-s', 'linux-image-amd64'], stdout=output)
        for line in output.getvalue().split('\n'):
            if 'Depends:' in line:
                newest = line.split('linux-image-')[1]
                output.close()
                return newest
    # Ubuntu is a depend in a depend.
    if 'ubuntu' in ostype:
        try:
            remote.run(args=[
                'sudo', 'DEBIAN_FRONTEND=noninteractive', 'apt-get', '-y',
                'install', 'linux-image-current-generic'
            ])
            remote.run(args=['dpkg', '-s', 'linux-image-current-generic'],
                       stdout=output)
            for line in output.getvalue().split('\n'):
                if 'Depends:' in line:
                    depends = line.split('Depends: ')[1]
                    remote.run(
                        args=['sudo', 'apt-get', '-y', 'install', depends])
            remote.run(args=['dpkg', '-s', depends], stdout=output)
        except run.CommandFailedError:
            # Non precise ubuntu machines (like trusty) don't have
            # linux-image-current-generic so use linux-image-generic instead.
            remote.run(args=[
                'sudo', 'DEBIAN_FRONTEND=noninteractive', 'apt-get', '-y',
                'install', 'linux-image-generic'
            ],
                       stdout=output)
            remote.run(args=['dpkg', '-s', 'linux-image-generic'],
                       stdout=output)
        for line in output.getvalue().split('\n'):
            if 'Depends:' in line:
                newest = line.split('linux-image-')[1]
                if ',' in newest:
                    newest = newest.split(',')[0]
                if '|' in newest:
                    # not strictly correct, as any of the |-joined
                    # packages may satisfy the dependency
                    newest = newest.split('|')[0].strip()
    output.close()
    return newest
Пример #36
0
class TestParser(unittest.TestCase):
    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_register_simple_commands(self):
        def test_handler1():
            pass

        def test_handler2():
            pass

        command = CliCommand('command the-name', test_handler1)
        command2 = CliCommand('sub-command the-second-name', test_handler2)
        cmd_table = {
            'command the-name': command,
            'sub-command the-second-name': command2
        }

        parser = AzCliCommandParser()
        parser.load_command_table(cmd_table)
        args = parser.parse_args('command the-name'.split())
        self.assertIs(args.func, test_handler1)

        args = parser.parse_args('sub-command the-second-name'.split())
        self.assertIs(args.func, test_handler2)

        AzCliCommandParser.error = VerifyError(self, )
        parser.parse_args('sub-command'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_required_parameter(self):
        def test_handler(args):  # pylint: disable=unused-argument
            pass

        command = CliCommand('test command', test_handler)
        command.add_argument('req', '--req', required=True)
        cmd_table = {'test command': command}

        parser = AzCliCommandParser()
        parser.load_command_table(cmd_table)

        args = parser.parse_args('test command --req yep'.split())
        self.assertIs(args.func, test_handler)

        AzCliCommandParser.error = VerifyError(self)
        parser.parse_args('test command'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_nargs_parameter(self):
        def test_handler():
            pass

        command = CliCommand('test command', test_handler)
        command.add_argument('req', '--req', required=True, nargs=2)
        cmd_table = {'test command': command}

        parser = AzCliCommandParser()
        parser.load_command_table(cmd_table)

        args = parser.parse_args('test command --req yep nope'.split())
        self.assertIs(args.func, test_handler)

        AzCliCommandParser.error = VerifyError(self)
        parser.parse_args('test command -req yep'.split())
        self.assertTrue(AzCliCommandParser.error.called)
Пример #37
0
    def from_directories(cls,
                         directories,
                         pattern=None,
                         ignore=(),
                         write=None,
                         relative_to=None):
        """
        convert directories to a simple manifest; returns ManifestParser instance

        pattern -- shell pattern (glob) or patterns of filenames to match
        ignore -- directory names to ignore
        write -- filename or file-like object of manifests to write;
                 if `None` then a StringIO instance will be created
        relative_to -- write paths relative to this path;
                       if false then the paths are absolute
        """

        # determine output
        opened_manifest_file = None  # name of opened manifest file
        absolute = not relative_to  # whether to output absolute path names as names
        if isinstance(write, string_types):
            opened_manifest_file = write
            write = open(write, 'w')
        if write is None:
            write = StringIO()

        # walk the directories, generating manifests
        def callback(directory, dirpath, dirnames, filenames):

            # absolute paths
            filenames = [
                os.path.join(dirpath, filename) for filename in filenames
            ]
            # ensure new manifest isn't added
            filenames = [
                filename for filename in filenames
                if filename != opened_manifest_file
            ]
            # normalize paths
            if not absolute and relative_to:
                filenames = [
                    relpath(filename, relative_to) for filename in filenames
                ]

            # write to manifest
            write_content = '\n'.join([
                '[{}]'.format(denormalize_path(filename))
                for filename in filenames
            ])
            print(write_content, file=write)

        cls._walk_directories(directories,
                              callback,
                              pattern=pattern,
                              ignore=ignore)

        if opened_manifest_file:
            # close file
            write.close()
            manifests = [opened_manifest_file]
        else:
            # manifests/write is a file-like object;
            # rewind buffer
            write.flush()
            write.seek(0)
            manifests = [write]

        # make a ManifestParser instance
        return cls(manifests=manifests)
Пример #38
0
class Test_storage_validators(unittest.TestCase):

    @classmethod
    def setUpClass(cls):
        pass
        
    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.io = StringIO()
        
    def tearDown(self):
        self.io.close()

    def test_permission_validator(self):
        from azure.storage.blob.models import ContainerPermissions
        from argparse import Namespace
        
        ns1 = Namespace(permission='rwdl')
        ns2 = Namespace(permission='abc')
        get_permission_validator(ContainerPermissions)(ns1)
        self.assertTrue(isinstance(ns1.permission, ContainerPermissions))
        with self.assertRaises(ValueError):
            get_permission_validator(ContainerPermissions)(ns2)

    def test_datetime_string_type(self):
        input = "2017-01-01T12:30Z"
        actual = datetime_string_type(input)
        expected = "2017-01-01T12:30Z"
        self.assertEqual(actual, expected)

        input = "2017-01-01 12:30"
        with self.assertRaises(ValueError):
            actual = datetime_string_type(input)

    def test_datetime_type(self):
        input = "2017-01-01T12:30Z"
        actual = datetime_type(input)
        expected = datetime(2017, 1, 1, 12, 30, 0)
        self.assertEqual(actual, expected)

        input = "2017-01-01 12:30"
        with self.assertRaises(ValueError):
            actual = datetime_type(input)

    def test_ipv4_range_type(self):
        input = "111.22.3.111"
        actual = ipv4_range_type(input)
        expected = input
        self.assertEqual(actual, expected)

        input = "111.22.3.111-222.11.44.111"
        actual = ipv4_range_type(input)
        expected = input
        self.assertEqual(actual, expected)

        input = "111.22"
        with self.assertRaises(ValueError):        
            actual = ipv4_range_type(input)
    
        input = "111.22.33.44-"
        with self.assertRaises(ValueError):
            actual = ipv4_range_type(input)

    def test_resource_types_type(self):
        input = "sso"
        actual = str(resource_type_type(input))
        expected = "so"
        self.assertEqual(actual, expected)

        input = "blob"
        with self.assertRaises(ValueError):
            actual = resource_type_type(input)

    def test_services_type(self):
        input = "ttfqbqtf"
        actual = str(services_type(input))
        expected = "bqtf"
        self.assertEqual(actual, expected)

        input = "everything"
        with self.assertRaises(ValueError):
            actual = services_type(input)
Пример #39
0
def table2string(table, out=None):
    """Given list of lists figure out their common widths and print to out

    Parameters
    ----------
    table : list of lists of strings
      What is aimed to be printed
    out : None or stream
      Where to print. If None -- will print and return string

    Returns
    -------
    string if out was None
    """

    print2string = out is None
    if print2string:
        out = StringIO()

    # equalize number of elements in each row
    Nelements_max = len(table) \
                    and max(len(x) for x in table)

    for i, table_ in enumerate(table):
        table[i] += [''] * (Nelements_max - len(table_))

    # figure out lengths within each column
    atable = np.asarray(table).astype(str)
    # eat whole entry while computing width for @w (for wide)
    markup_strip = re.compile('^@([lrc]|w.*)')
    col_width = [
        max([len(markup_strip.sub('', x)) for x in column])
        for column in atable.T
    ]
    string = ""
    for i, table_ in enumerate(table):
        string_ = ""
        for j, item in enumerate(table_):
            item = str(item)
            if item.startswith('@'):
                align = item[1]
                item = item[2:]
                if not align in ['l', 'r', 'c', 'w']:
                    raise ValueError('Unknown alignment %s. Known are l,r,c' %
                                     align)
            else:
                align = 'c'

            NspacesL = max(ceil((col_width[j] - len(item)) / 2.0), 0)
            NspacesR = max(col_width[j] - NspacesL - len(item), 0)

            if align in ['w', 'c']:
                pass
            elif align == 'l':
                NspacesL, NspacesR = 0, NspacesL + NspacesR
            elif align == 'r':
                NspacesL, NspacesR = NspacesL + NspacesR, 0
            else:
                raise RuntimeError('Should not get here with align=%s' % align)

            string_ += "%%%ds%%s%%%ds " \
                       % (NspacesL, NspacesR) % ('', item, '')
        string += string_.rstrip() + '\n'
    out.write(string)

    if print2string:
        value = out.getvalue()
        out.close()
        return value
Пример #40
0
    def csv(self, number_of_events, event_type='random_event'):
        print("Generating CSV test data with {} events.".format(
            number_of_events))
        event = eval('self.{}'.format(event_type))

        csv = StringIO()
        header = [
            schema.Required.event().id,
            schema.Required.seqno().id,
            schema.Required.timezone_sign().id,
            schema.Required.timezone_hour().id,
            schema.Required.timezone_min().id,
            schema.Required.locale().id,
            schema.Required.uid().id,
            schema.Required.message_id().id,
            schema.Required.build_id().id,
            schema.Required.platform_id().id,
            schema.Required.timestamp_utc().id,
            schema.Required.source().id,
            schema.Required.session_id().id
        ]
        custom = []
        for attr in event["attributes"]:
            header.append(attr["id"])
            custom.append(str(attr["value"]))
        header = self.__context[c.KEY_SEPERATOR_CSV].join(header)
        custom = self.__context[c.KEY_SEPERATOR_CSV].join(custom)
        csv.write(header + c.NEW_LINE)

        for i in range(0, number_of_events):
            csv.write("{}{}".format(event["id"],
                                    self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(i, self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("-{}".format(self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("3{}".format(self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("0{}".format(self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(
                self.__loc[randint(0,
                                   len(self.__loc) - 1)],
                self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(
                self.__player_id[randint(0,
                                         len(self.__player_id) - 1)],
                self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(str(self.uuid),
                                    self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(
                self.__buildid[randint(0,
                                       len(self.__buildid) - 1)],
                self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(
                self.__platforms[randint(0,
                                         len(self.__platforms) - 1)],
                self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(time.time(),
                                    self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write("{}{}".format(self.__source,
                                    self.__context[c.KEY_SEPERATOR_CSV]))
            if len(custom) == 0:
                csv.write("{}".format(self.__session_id[randint(
                    0,
                    len(self.__session_id) - 1)]))
            else:
                csv.write("{}{}".format(
                    self.__session_id[randint(0,
                                              len(self.__session_id) - 1)],
                    self.__context[c.KEY_SEPERATOR_CSV]))
            csv.write(custom + c.NEW_LINE)

        result = csv.getvalue()
        csv.close()
        return result
Пример #41
0
def get_asos(time_window, lat_range=None, lon_range=None, station=None):
    """
    Returns all of the station observations from the Iowa Mesonet from either
    a given latitude and longitude window or a given station code.

    Parameters
    ----------
    time_window: tuple
        A 2 member list or tuple containing the start and end times. The
        times must be python datetimes.
    lat_range: tuple
        The latitude window to grab all of the ASOS observations from.
    lon_range: tuple
        The longitude window to grab all of the ASOS observations from.
    station: str
        The station ID to grab the ASOS observations from.

    Returns
    -------
    asos_ds: dict of xarray datasets
        A dictionary of ACT datasets whose keys are the ASOS station IDs.

    Examples
    --------
    If you want to obtain timeseries of ASOS observations for Chicago O'Hare
    Airport, simply do::

        $ time_window = [datetime(2020, 2, 4, 2, 0), datetime(2020, 2, 10, 10, 0)]
        $ station = "KORD"
        $ my_asoses = act.discovery.get_asos(time_window, station="ORD")
    """

    # First query the database for all of the JSON info for every station
    # Only add stations whose lat/lon are within the Grid's boundaries
    regions = """AF AL_ AI_ AQ_ AG_ AR_ AK AL AM_
        AO_ AS_ AR AW_ AU_ AT_
        AZ_ BA_ BE_ BB_ BG_ BO_ BR_ BF_
        BT_ BS_ BI_ BM_ BB_ BY_ BZ_ BJ_ BW_ AZ CA CA_AB
        CA_BC CD_ CK_ CF_ CG_ CL_ CM_ CO CO_ CN_ CR_ CT
        CU_ CV_ CY_ CZ_ DE DK_ DJ_ DM_ DO_
        DZ EE_ ET_ FK_ FM_ FJ_ FI_ FR_ GF_ PF_
        GA_ GM_ GE_ DE_ GH_ GI_ KY_ GB_ GR_ GL_ GD_
        GU_ GT_ GN_ GW_ GY_ HT_ HN_ HK_ HU_ IS_ IN_
        ID_ IR_ IQ_ IE_ IL_ IT_ CI_ JM_ JP_
        JO_ KZ_ KE_ KI_ KW_ LA_ LV_ LB_ LS_ LR_ LY_
        LT_ LU_ MK_ MG_ MW_ MY_ MV_ ML_ CA_MB
        MH_ MR_ MU_ YT_ MX_ MD_ MC_ MA_ MZ_ MM_ NA_ NP_
        AN_ NL_ CA_NB NC_ CA_NF NF_ NI_
        NE_ NG_ MP_ KP_ CA_NT NO_ CA_NS CA_NU OM_
        CA_ON PK_ PA_ PG_ PY_ PE_ PH_ PN_ PL_
        PT_ CA_PE PR_ QA_ CA_QC RO_ RU_RW_ SH_ KN_
        LC_ VC_ WS_ ST_ CA_SK SA_ SN_ RS_ SC_
        SL_ SG_ SK_ SI_ SB_ SO_ ZA_ KR_ ES_ LK_ SD_ SR_
        SZ_ SE_ CH_ SY_ TW_ TJ_ TZ_ TH_
        TG_ TO_ TT_ TU TN_ TR_ TM_ UG_ UA_ AE_ UN_ UY_
        UZ_ VU_ VE_ VN_ VI_ YE_ CA_YT ZM_ ZW_
        EC_ EG_ FL GA GQ_ HI HR_ IA ID IL IO_ IN KS
        KH_ KY KM_ LA MA MD ME
        MI MN MO MS MT NC ND NE NH NJ NM NV NY OH OK
        OR PA RI SC SV_ SD TD_ TN TX UT VA VT VG_
        WA WI WV WY"""

    networks = ["AWOS"]
    metadata_list = {}
    if lat_range is not None and lon_range is not None:
        lon_min, lon_max = lon_range
        lat_min, lat_max = lat_range
        for region in regions.split():
            networks.append("%s_ASOS" % (region, ))

        site_list = []
        for network in networks:
            # Get metadata
            uri = ("https://mesonet.agron.iastate.edu/"
                   "geojson/network/%s.geojson") % (network, )
            data = urlopen(uri)
            jdict = json.load(data)
            for site in jdict["features"]:
                lat = site["geometry"]["coordinates"][1]
                lon = site["geometry"]["coordinates"][0]
                if lat >= lat_min and lat <= lat_max:
                    if lon >= lon_min and lon <= lon_max:
                        station_metadata_dict = {}
                        station_metadata_dict["site_latitude"] = lat
                        station_metadata_dict["site_longitude"] = lat
                        for my_keys in site["properties"]:
                            station_metadata_dict[my_keys] = site[
                                "properties"][my_keys]
                        metadata_list[site["properties"]
                                      ["sid"]] = station_metadata_dict
                        site_list.append(site["properties"]["sid"])
    elif station is not None:
        site_list = [station]
        for region in regions.split():
            networks.append("%s_ASOS" % (region, ))
        for network in networks:
            # Get metadata
            uri = ("https://mesonet.agron.iastate.edu/"
                   "geojson/network/%s.geojson") % (network, )
            data = urlopen(uri)
            jdict = json.load(data)
            for site in jdict["features"]:
                lat = site["geometry"]["coordinates"][1]
                lon = site["geometry"]["coordinates"][0]
                if site["properties"]["sid"] == station:
                    station_metadata_dict = {}
                    station_metadata_dict["site_latitude"] = lat
                    station_metadata_dict["site_longitude"] = lon
                    for my_keys in site["properties"]:
                        if my_keys == "elevation":
                            station_metadata_dict["elevation"] = \
                                '%f meter' % site["properties"][my_keys]
                        else:
                            station_metadata_dict[my_keys] = \
                                site["properties"][my_keys]
                    metadata_list[station] = station_metadata_dict

        # Get station metadata
    else:
        raise ValueError(
            "Either both lat_range and lon_range or station must " +
            "be specified!")

    # Get the timestamp for each request
    start_time = time_window[0]
    end_time = time_window[1]

    SERVICE = "http://mesonet.agron.iastate.edu/cgi-bin/request/asos.py?"
    service = SERVICE + "data=all&tz=Etc/UTC&format=comma&latlon=yes&"

    service += start_time.strftime(
        "year1=%Y&month1=%m&day1=%d&hour1=%H&minute1=%M&")
    service += end_time.strftime(
        "year2=%Y&month2=%m&day2=%d&hour2=%H&minute2=%M")
    station_obs = {}
    for stations in site_list:
        uri = "%s&station=%s" % (service, stations)
        print("Downloading: %s" % (stations, ))
        data = _download_data(uri)
        buf = StringIO()
        buf.write(data)
        buf.seek(0)

        my_df = pd.read_csv(buf, skiprows=5, na_values="M")

        if len(my_df['lat'].values) == 0:
            warnings.warn(
                "No data available at station %s between time %s and %s" %
                (stations, start_time.strftime('%Y-%m-%d %H:%M:%S'),
                 end_time.strftime('%Y-%m-%d %H:%M:%S')))
        else:

            def to_datetime(x):
                return datetime.strptime(x, "%Y-%m-%d %H:%M")

            my_df["time"] = my_df["valid"].apply(to_datetime)
            my_df = my_df.set_index("time")
            my_df = my_df.drop("valid", axis=1)
            my_df = my_df.drop("station", axis=1)
            my_df = my_df.to_xarray()

            my_df.attrs = metadata_list[stations]
            my_df["lon"].attrs["units"] = "degree"
            my_df["lon"].attrs["long_name"] = "Longitude"
            my_df["lat"].attrs["units"] = "degree"
            my_df["lat"].attrs["long_name"] = "Latitude"

            my_df["tmpf"].attrs["units"] = "degrees Fahrenheit"
            my_df["tmpf"].attrs[
                "long_name"] = "Temperature in degrees Fahrenheit"

            # Fahrenheit to Celsius
            my_df["temp"] = (5. / 9. * my_df["tmpf"]) - 32.0
            my_df["temp"].attrs["units"] = "degrees Celsius"
            my_df["temp"].attrs["long_name"] = "Temperature in degrees Celsius"
            my_df["dwpf"].attrs["units"] = "degrees Fahrenheit"
            my_df["dwpf"].attrs[
                "long_name"] = "Dewpoint temperature in degrees Fahrenheit"

            # Fahrenheit to Celsius
            my_df["dwpc"] = (5. / 9. * my_df["tmpf"]) - 32.0
            my_df["dwpc"].attrs["units"] = "degrees Celsius"
            my_df["dwpc"].attrs[
                "long_name"] = "Dewpoint temperature in degrees Celsius"
            my_df["relh"].attrs["units"] = "percent"
            my_df["relh"].attrs["long_name"] = "Relative humidity"
            my_df["drct"].attrs["units"] = "degrees"
            my_df["drct"].attrs["long_name"] = "Wind speed in degrees"
            my_df["sknt"].attrs["units"] = "knots"
            my_df["sknt"].attrs["long_name"] = "Wind speed in knots"
            my_df["spdms"] = my_df["sknt"] * 0.514444
            my_df["spdms"].attrs["units"] = "m s-1"
            my_df["spdms"].attrs[
                "long_name"] = "Wind speed in meters per second"
            my_df['u'] = -np.sin(np.deg2rad(my_df["drct"])) * my_df["spdms"]
            my_df['u'].attrs["units"] = "m s-1"
            my_df['u'].attrs["long_name"] = "Zonal component of surface wind"
            my_df['v'] = -np.cos(np.deg2rad(my_df["drct"])) * my_df["spdms"]
            my_df['v'].attrs["units"] = "m s-1"
            my_df['v'].attrs[
                "long_name"] = "Meridional component of surface wind"
            my_df["mslp"].attrs["units"] = "mb"
            my_df["mslp"].attrs["long_name"] = "Mean Sea Level Pressure"
            my_df["alti"].attrs["units"] = "in Hg"
            my_df["alti"].attrs[
                "long_name"] = "Atmospheric pressure in inches of Mercury"
            my_df["vsby"].attrs["units"] = "mi"
            my_df["vsby"].attrs["long_name"] = "Visibility"
            my_df["vsbykm"] = my_df["vsby"] * 1.60934
            my_df["vsbykm"].attrs["units"] = 'km'
            my_df["vsbykm"].attrs["long_name"] = "Visibility"
            my_df["gust"] = my_df["gust"] * 0.514444
            my_df["gust"].attrs["units"] = 'm s-1'
            my_df["gust"].attrs["long_name"] = "Wind gust speed"
            my_df["skyc1"].attrs["long_name"] = "Sky level 1 coverage"
            my_df["skyc2"].attrs["long_name"] = "Sky level 2 coverage"
            my_df["skyc3"].attrs["long_name"] = "Sky level 3 coverage"
            my_df["skyc4"].attrs["long_name"] = "Sky level 4 coverage"
            my_df["skyl1"] = my_df["skyl1"] * 0.3048
            my_df["skyl2"] = my_df["skyl2"] * 0.3048
            my_df["skyl3"] = my_df["skyl3"] * 0.3048
            my_df["skyl4"] = my_df["skyl4"] * 0.3048
            my_df["skyl1"].attrs["long_name"] = "Sky level 1 altitude"
            my_df["skyl2"].attrs["long_name"] = "Sky level 2 altitude"
            my_df["skyl3"].attrs["long_name"] = "Sky level 3 altitude"
            my_df["skyl4"].attrs["long_name"] = "Sky level 4 altitude"
            my_df["skyl1"].attrs["long_name"] = "meter"
            my_df["skyl2"].attrs["long_name"] = "meter"
            my_df["skyl3"].attrs["long_name"] = "meter"
            my_df["skyl4"].attrs["long_name"] = "meter"

            my_df["wxcodes"].attrs["long_name"] = "Weather code"
            my_df["ice_accretion_1hr"] = my_df["ice_accretion_1hr"] * 2.54
            my_df["ice_accretion_1hr"].attrs["units"] = "cm"
            my_df["ice_accretion_1hr"].attrs[
                "long_name"] = "1 hour ice accretion"
            my_df["ice_accretion_3hr"] = my_df["ice_accretion_3hr"] * 2.54
            my_df["ice_accretion_3hr"].attrs["units"] = "cm"
            my_df["ice_accretion_3hr"].attrs[
                "long_name"] = "3 hour ice accretion"
            my_df["ice_accretion_6hr"] = my_df["ice_accretion_3hr"] * 2.54
            my_df["ice_accretion_6hr"].attrs["units"] = "cm"
            my_df["ice_accretion_6hr"].attrs[
                "long_name"] = "6 hour ice accretion"
            my_df["peak_wind_gust"] = my_df["peak_wind_gust"] * 0.514444
            my_df["peak_wind_gust"].attrs["units"] = 'm s-1'
            my_df["peak_wind_gust"].attrs["long_name"] = "Peak wind gust speed"
            my_df["peak_wind_drct"].attrs["drct"] = 'degree'
            my_df["peak_wind_drct"].attrs[
                "long_name"] = "Peak wind gust direction"
            my_df['u_peak'] = -np.sin(np.deg2rad(
                my_df["peak_wind_drct"])) * my_df["peak_wind_gust"]
            my_df['u_peak'].attrs["units"] = "m s-1"
            my_df['u_peak'].attrs[
                "long_name"] = "Zonal component of surface wind"
            my_df['v_peak'] = -np.cos(np.deg2rad(
                my_df["peak_wind_drct"])) * my_df["peak_wind_gust"]
            my_df['v_peak'].attrs["units"] = "m s-1"
            my_df['v_peak'].attrs[
                "long_name"] = "Meridional component of surface wind"
            my_df["metar"].attrs["long_name"] = "Raw METAR code"
            my_df.attrs['_datastream'] = stations
            buf.close()

            station_obs[stations] = my_df
    return station_obs
Пример #42
0
class log_output(object):
    """Context manager that logs its output to a file.

    In the simplest case, the usage looks like this::

        with log_output('logfile.txt'):
            # do things ... output will be logged

    Any output from the with block will be redirected to ``logfile.txt``.
    If you also want the output to be echoed to ``stdout``, use the
    ``echo`` parameter::

        with log_output('logfile.txt', echo=True):
            # do things ... output will be logged and printed out

    And, if you just want to echo *some* stuff from the parent, use
    ``force_echo``::

        with log_output('logfile.txt', echo=False) as logger:
            # do things ... output will be logged

            with logger.force_echo():
                # things here will be echoed *and* logged

    Under the hood, we spawn a daemon and set up a pipe between this
    process and the daemon.  The daemon writes our output to both the
    file and to stdout (if echoing).  The parent process can communicate
    with the daemon to tell it when and when not to echo; this is what
    force_echo does.  You can also enable/disable echoing by typing 'v'.

    We try to use OS-level file descriptors to do the redirection, but if
    stdout or stderr has been set to some Python-level file object, we
    use Python-level redirection instead.  This allows the redirection to
    work within test frameworks like nose and pytest.
    """
    def __init__(self, file_like=None, echo=False, debug=False, buffer=False):
        """Create a new output log context manager.

        Args:
            file_like (str or stream): open file object or name of file where
                output should be logged
            echo (bool): whether to echo output in addition to logging it
            debug (bool): whether to enable tty debug mode during logging
            buffer (bool): pass buffer=True to skip unbuffering output; note
                this doesn't set up any *new* buffering

        log_output can take either a file object or a filename. If a
        filename is passed, the file will be opened and closed entirely
        within ``__enter__`` and ``__exit__``. If a file object is passed,
        this assumes the caller owns it and will close it.

        By default, we unbuffer sys.stdout and sys.stderr because the
        logger will include output from executed programs and from python
        calls.  If stdout and stderr are buffered, their output won't be
        printed in the right place w.r.t. output from commands.

        Logger daemon is not started until ``__enter__()``.

        """
        self.file_like = file_like
        self.echo = echo
        self.debug = debug
        self.buffer = buffer

        self._active = False  # used to prevent re-entry

    def __call__(self, file_like=None, echo=None, debug=None, buffer=None):
        """Thie behaves the same as init. It allows a logger to be reused.

        Arguments are the same as for ``__init__()``.  Args here take
        precedence over those passed to ``__init__()``.

        With the ``__call__`` function, you can save state between uses
        of a single logger.  This is useful if you want to remember,
        e.g., the echo settings for a prior ``with log_output()``::

            logger = log_output()

            with logger('foo.txt'):
                # log things; user can change echo settings with 'v'

            with logger('bar.txt'):
                # log things; logger remembers prior echo settings.

        """
        if file_like is not None:
            self.file_like = file_like
        if echo is not None:
            self.echo = echo
        if debug is not None:
            self.debug = debug
        if buffer is not None:
            self.buffer = buffer
        return self

    def __enter__(self):
        if self._active:
            raise RuntimeError("Can't re-enter the same log_output!")

        if self.file_like is None:
            raise RuntimeError(
                "file argument must be set by either __init__ or __call__")

        # set up a stream for the daemon to write to
        self.close_log_in_parent = True
        self.write_log_in_parent = False
        if isinstance(self.file_like, string_types):
            self.log_file = open(self.file_like, 'w')

        elif _file_descriptors_work(self.file_like):
            self.log_file = self.file_like
            self.close_log_in_parent = False

        else:
            self.log_file = StringIO()
            self.write_log_in_parent = True

        # record parent color settings before redirecting.  We do this
        # because color output depends on whether the *original* stdout
        # is a TTY.  New stdout won't be a TTY so we force colorization.
        self._saved_color = tty.color._force_color
        forced_color = tty.color.get_color_when()

        # also record parent debug settings -- in case the logger is
        # forcing debug output.
        self._saved_debug = tty._debug

        # OS-level pipe for redirecting output to logger
        self.read_fd, self.write_fd = os.pipe()

        # Multiprocessing pipe for communication back from the daemon
        # Currently only used to save echo value between uses
        self.parent, self.child = multiprocessing.Pipe()

        # Sets a daemon that writes to file what it reads from a pipe
        try:
            # need to pass this b/c multiprocessing closes stdin in child.
            try:
                input_stream = os.fdopen(os.dup(sys.stdin.fileno()))
            except BaseException:
                input_stream = None  # just don't forward input if this fails

            self.process = multiprocessing.Process(target=self._writer_daemon,
                                                   args=(input_stream, ))
            self.process.daemon = True  # must set before start()
            self.process.start()
            os.close(self.read_fd)  # close in the parent process

        finally:
            if input_stream:
                input_stream.close()

        # Flush immediately before redirecting so that anything buffered
        # goes to the original stream
        sys.stdout.flush()
        sys.stderr.flush()

        # Now do the actual output rediction.
        self.use_fds = _file_descriptors_work(sys.stdout, sys.stderr)
        if self.use_fds:
            # We try first to use OS-level file descriptors, as this
            # redirects output for subprocesses and system calls.

            # Save old stdout and stderr file descriptors
            self._saved_stdout = os.dup(sys.stdout.fileno())
            self._saved_stderr = os.dup(sys.stderr.fileno())

            # redirect to the pipe we created above
            os.dup2(self.write_fd, sys.stdout.fileno())
            os.dup2(self.write_fd, sys.stderr.fileno())
            os.close(self.write_fd)

        else:
            # Handle I/O the Python way. This won't redirect lower-level
            # output, but it's the best we can do, and the caller
            # shouldn't expect any better, since *they* have apparently
            # redirected I/O the Python way.

            # Save old stdout and stderr file objects
            self._saved_stdout = sys.stdout
            self._saved_stderr = sys.stderr

            # create a file object for the pipe; redirect to it.
            pipe_fd_out = os.fdopen(self.write_fd, 'w')
            sys.stdout = pipe_fd_out
            sys.stderr = pipe_fd_out

        # Unbuffer stdout and stderr at the Python level
        if not self.buffer:
            sys.stdout = Unbuffered(sys.stdout)
            sys.stderr = Unbuffered(sys.stderr)

        # Force color and debug settings now that we have redirected.
        tty.color.set_color_when(forced_color)
        tty._debug = self.debug

        # track whether we're currently inside this log_output
        self._active = True

        # return this log_output object so that the user can do things
        # like temporarily echo some ouptut.
        return self

    def __exit__(self, exc_type, exc_val, exc_tb):
        # Flush any buffered output to the logger daemon.
        sys.stdout.flush()
        sys.stderr.flush()

        # restore previous output settings, either the low-level way or
        # the python way
        if self.use_fds:
            os.dup2(self._saved_stdout, sys.stdout.fileno())
            os.close(self._saved_stdout)

            os.dup2(self._saved_stderr, sys.stderr.fileno())
            os.close(self._saved_stderr)
        else:
            sys.stdout = self._saved_stdout
            sys.stderr = self._saved_stderr

        # print log contents in parent if needed.
        if self.write_log_in_parent:
            string = self.parent.recv()
            self.file_like.write(string)

        if self.close_log_in_parent:
            self.log_file.close()

        # recover and store echo settings from the child before it dies
        self.echo = self.parent.recv()

        # join the daemon process. The daemon will quit automatically
        # when the write pipe is closed; we just wait for it here.
        self.process.join()

        # restore old color and debug settings
        tty.color._force_color = self._saved_color
        tty._debug = self._saved_debug

        self._active = False  # safe to enter again

    @contextmanager
    def force_echo(self):
        """Context manager to force local echo, even if echo is off."""
        if not self._active:
            raise RuntimeError(
                "Can't call force_echo() outside log_output region!")

        # This uses the xon/xoff to highlight regions to be echoed in the
        # output. We us these control characters rather than, say, a
        # separate pipe, because they're in-band and assured to appear
        # exactly before and after the text we want to echo.
        sys.stdout.write(xon)
        sys.stdout.flush()
        yield
        sys.stdout.write(xoff)
        sys.stdout.flush()

    def _writer_daemon(self, stdin):
        """Daemon that writes output to the log file and stdout."""
        # Use line buffering (3rd param = 1) since Python 3 has a bug
        # that prevents unbuffered text I/O.
        in_pipe = os.fdopen(self.read_fd, 'r', 1)
        os.close(self.write_fd)

        echo = self.echo  # initial echo setting, user-controllable
        force_echo = False  # parent can force echo for certain output

        # list of streams to select from
        istreams = [in_pipe, stdin] if stdin else [in_pipe]

        log_file = self.log_file
        try:
            with keyboard_input(stdin):
                while True:
                    # No need to set any timeout for select.select
                    # Wait until a key press or an event on in_pipe.
                    rlist, _, _ = select.select(istreams, [], [])

                    # Allow user to toggle echo with 'v' key.
                    # Currently ignores other chars.
                    if stdin in rlist:
                        if stdin.read(1) == 'v':
                            echo = not echo

                    # Handle output from the with block process.
                    if in_pipe in rlist:
                        # If we arrive here it means that in_pipe was
                        # ready for reading : it should never happen that
                        # line is false-ish
                        line = in_pipe.readline()
                        if not line:
                            break  # EOF

                        # find control characters and strip them.
                        controls = control.findall(line)
                        line = re.sub(control, '', line)

                        # Echo to stdout if requested or forced
                        if echo or force_echo:
                            sys.stdout.write(line)
                            sys.stdout.flush()

                        # Stripped output to log file.
                        log_file.write(_strip(line))
                        log_file.flush()

                        if xon in controls:
                            force_echo = True
                        if xoff in controls:
                            force_echo = False
        except BaseException:
            tty.error("Exception occurred in writer daemon!")
            traceback.print_exc()

        finally:
            # send written data back to parent if we used a StringIO
            if self.write_log_in_parent:
                self.child.send(log_file.getvalue())
            log_file.close()

        # send echo value back to the parent so it can be preserved.
        self.child.send(echo)
class TestStorageValidators(unittest.TestCase):
    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_permission_validator(self):
        ContainerPermissions = get_sdk(ResourceType.DATA_STORAGE,
                                       'blob.models#ContainerPermissions')

        ns1 = Namespace(permission='rwdl')
        ns2 = Namespace(permission='abc')
        get_permission_validator(ContainerPermissions)(ns1)
        self.assertTrue(isinstance(ns1.permission, ContainerPermissions))
        with self.assertRaises(ValueError):
            get_permission_validator(ContainerPermissions)(ns2)

    def test_datetime_string_type(self):
        input = "2017-01-01T12:30Z"
        actual = get_datetime_type(True)(input)
        expected = "2017-01-01T12:30Z"
        self.assertEqual(actual, expected)

        input = "2017-01-01 12:30"
        with self.assertRaises(ValueError):
            actual = get_datetime_type(True)(input)

    def test_datetime_type(self):
        input = "2017-01-01T12:30Z"
        actual = get_datetime_type(False)(input)
        expected = datetime(2017, 1, 1, 12, 30, 0)
        self.assertEqual(actual, expected)

        input = "2017-01-01 12:30"
        with self.assertRaises(ValueError):
            actual = get_datetime_type(False)(input)

    def test_ipv4_range_type(self):
        input = "111.22.3.111"
        actual = ipv4_range_type(input)
        expected = input
        self.assertEqual(actual, expected)

        input = "111.22.3.111-222.11.44.111"
        actual = ipv4_range_type(input)
        expected = input
        self.assertEqual(actual, expected)

        input = "111.22"
        with self.assertRaises(ValueError):
            actual = ipv4_range_type(input)

        input = "111.22.33.44-"
        with self.assertRaises(ValueError):
            actual = ipv4_range_type(input)

    def test_resource_types_type(self):
        input = "sso"
        actual = str(resource_type_type(input))
        expected = "so"
        self.assertEqual(actual, expected)

        input = "blob"
        with self.assertRaises(ValueError):
            actual = resource_type_type(input)

    def test_services_type(self):
        input = "ttfqbqtf"
        actual = str(services_type(input))
        expected = "bqtf"
        self.assertEqual(actual, expected)

        input = "everything"
        with self.assertRaises(ValueError):
            actual = services_type(input)

    def test_storage_process_blob_source_uri_redundent_parameter(self):
        with self.assertRaises(ValueError):
            process_blob_source_uri(
                Namespace(copy_source='https://example.com',
                          source_sas='some_sas'))
        with self.assertRaises(ValueError):
            process_blob_source_uri(
                Namespace(copy_source='https://example.com',
                          source_account_name='account_name'))
Пример #44
0
class S3HandlerTestUpload(unittest.TestCase):
    """
    This class tests the ability to upload objects into an S3 bucket as
    well as multipart uploads
    """
    def setUp(self):
        self.session = botocore.session.get_session(EnvironmentVariables)
        self.service = self.session.get_service('s3')
        self.endpoint = self.service.get_endpoint('us-east-1')
        params = {'region': 'us-east-1', 'acl': ['private']}
        self.s3_handler = S3Handler(self.session, params)
        self.s3_handler_multi = S3Handler(self.session,
                                          multi_threshold=10,
                                          chunksize=2,
                                          params=params)
        self.bucket = create_bucket(self.session)
        self.loc_files = make_loc_files()
        self.s3_files = [
            self.bucket + '/text1.txt',
            self.bucket + '/another_directory/text2.txt'
        ]
        self.output = StringIO()
        self.saved_stderr = sys.stderr
        sys.stderr = self.output

    def tearDown(self):
        self.output.close()
        sys.stderr = self.saved_stderr
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_upload(self):
        # Confirm there are no objects in the bucket.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 0)
        # Create file info objects to perform upload.
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    operation_name='upload',
                    size=0,
                    service=self.service,
                    endpoint=self.endpoint,
                ))
        # Perform the upload.
        self.s3_handler.call(tasks)
        # Confirm the files were uploaded.
        self.assertEqual(len(list_contents(self.bucket, self.session)), 2)

    def test_multi_upload(self):
        files = [self.loc_files[0], self.loc_files[1]]
        tasks = []
        for i in range(len(files)):
            tasks.append(
                FileInfo(
                    src=self.loc_files[i],
                    dest=self.s3_files[i],
                    size=15,
                    operation_name='upload',
                    service=self.service,
                    endpoint=self.endpoint,
                ))

        # Note nothing is uploaded because the file is too small
        # a print statement will show up if it fails.
        self.s3_handler_multi.call(tasks)
        print_op = "Error: Your proposed upload is smaller than the minimum"
        self.assertIn(print_op, self.output.getvalue())
class TestStorageValidators(unittest.TestCase):
    def setUp(self):
        self.io = StringIO()
        self.cli = MockCLI()
        self.loader = MockLoader(self.cli)

    def tearDown(self):
        self.io.close()

    def test_permission_validator(self):
        t_container_permissions = get_sdk(self.cli, CUSTOM_DATA_STORAGE, 'blob.models#ContainerPermissions')

        ns1 = Namespace(permission='rwdl')
        ns2 = Namespace(permission='abc')
        get_permission_validator(t_container_permissions)(ns1)
        self.assertTrue(isinstance(ns1.permission, t_container_permissions))
        with self.assertRaises(ValueError):
            get_permission_validator(t_container_permissions)(ns2)

    def test_datetime_string_type(self):
        input = "2017-01-01T12:30Z"
        actual = get_datetime_type(True)(input)
        expected = "2017-01-01T12:30Z"
        self.assertEqual(actual, expected)

        input = "2017-01-01 12:30"
        with self.assertRaises(ValueError):
            get_datetime_type(True)(input)

    def test_datetime_type(self):
        import datetime
        input = "2017-01-01T12:30Z"
        actual = get_datetime_type(False)(input)
        expected = datetime.datetime(2017, 1, 1, 12, 30, 0)
        self.assertEqual(actual, expected)

        input = "2017-01-01 12:30"
        with self.assertRaises(ValueError):
            actual = get_datetime_type(False)(input)

    def test_ipv4_range_type(self):
        input = "111.22.3.111"
        actual = ipv4_range_type(input)
        expected = input
        self.assertEqual(actual, expected)

        input = "111.22.3.111-222.11.44.111"
        actual = ipv4_range_type(input)
        expected = input
        self.assertEqual(actual, expected)

        input = "111.22"
        with self.assertRaises(ValueError):
            actual = ipv4_range_type(input)

        input = "111.22.33.44-"
        with self.assertRaises(ValueError):
            actual = ipv4_range_type(input)

    def test_resource_types_type(self):
        input = "sso"
        actual = str(resource_type_type(self.loader)(input))
        expected = "so"
        self.assertEqual(actual, expected)

        input = "blob"
        with self.assertRaises(ValueError):
            actual = resource_type_type(self.loader)(input)

    def test_services_type(self):
        input = "ttfqbqtf"
        actual = str(services_type(self.loader)(input))
        if supported_api_version(self.cli, CUSTOM_DATA_STORAGE, max_api='2016-05-31') or \
           supported_api_version(self.cli, CUSTOM_DATA_STORAGE, min_api='2017-07-29'):
            expected = "bqtf"
        else:
            expected = "bqf"
        self.assertEqual(actual, expected)

        input = "everything"
        with self.assertRaises(ValueError):
            services_type(self.loader)(input)

    def test_storage_process_blob_source_uri_redundent_parameter(self):
        with self.assertRaises(ValueError):
            process_blob_source_uri(MockCmd(self.cli),
                                    Namespace(copy_source='https://example.com', source_sas='some_sas'))
        with self.assertRaises(ValueError):
            process_blob_source_uri(MockCmd(self.cli),
                                    Namespace(copy_source='https://example.com', source_account_name='account_name'))

    def test_storage_get_char_options_validator(self):
        with self.assertRaises(ValueError) as cm:
            get_char_options_validator('abc', 'no_such_property')(object())
        self.assertEqual('Missing options --no-such-property.', str(cm.exception))

        ns = Namespace(services='bcd')
        with self.assertRaises(ValueError) as cm:
            get_char_options_validator('abc', 'services')(ns)
        self.assertEqual('--services: only valid values are: a, b, c.', str(cm.exception))

        ns = Namespace(services='ab')
        get_char_options_validator('abc', 'services')(ns)

        result = getattr(ns, 'services')
        self.assertIs(type(result), set)
        self.assertEqual(result, set('ab'))
Пример #46
0
def main():
    print_env_vars()
    time_str = datetime.utcnow().strftime('%Y%m%d%H%M%S')
    az(["login"], _out=sys.stdout, _err=sys.stdout)
    resource_group = 'azurecli-release-debian-' + time_str
    vm_name = 'vm-debian-' + time_str
    print_status('Creating resource group.')
    az(['group', 'create', '-l', 'westus', '-n', resource_group],
       _out=sys.stdout,
       _err=sys.stdout)
    print_status('Creating VM.')
    az([
        'vm', 'create', '-g', resource_group, '-n', vm_name,
        '--generate-ssh-keys', '--authentication-type', 'ssh', '--image',
        'Canonical:UbuntuServer:14.04.4-LTS:latest', '--admin-username',
        'ubuntu'
    ],
       _out=sys.stdout,
       _err=sys.stdout)
    io = StringIO()
    print_status('Getting VM IP address.')
    az([
        'vm', 'list-ip-addresses', '--resource-group', resource_group,
        '--name', vm_name, '--query',
        '[0].virtualMachine.network.publicIpAddresses[0].ipAddress'
    ],
       _out=io)
    ip_address = io.getvalue().strip().replace('"', '')
    print_status('VM IP address is {}'.format(ip_address))
    io.close()
    vm_connect_str = "ubuntu@{}".format(ip_address)
    my_vm = ssh.bake(['-oStrictHostKeyChecking=no', vm_connect_str])
    print_status('Installing git.')
    my_vm([
        'sudo', 'apt-get', 'update', '&&', 'sudo', 'apt-get', 'install', '-y',
        'git'
    ],
          _out=sys.stdout,
          _err=sys.stdout)
    io = StringIO()
    my_vm(['mktemp', '-d'], _out=io)
    repo_dir = io.getvalue().strip()
    io.close()
    print_status('Cloning repo.')
    my_vm([
        'git', 'clone', 'https://github.com/{}'.format(
            script_env.get('REPO_NAME')), repo_dir
    ],
          _out=sys.stdout,
          _err=sys.stdout)
    path_to_debian_build_script = os.path.join(repo_dir, 'packaged_releases',
                                               'debian', 'debian_build.sh')
    path_to_dir_creator = os.path.join(repo_dir, 'packaged_releases', 'debian',
                                       'debian_dir_creator.sh')
    io = StringIO()
    my_vm(['mktemp', '-d'], _out=io)
    build_artifact_dir = io.getvalue().strip()
    io.close()
    print_status('Running debian build scripts.')
    my_vm(['chmod', '+x', path_to_debian_build_script, path_to_dir_creator],
          _out=sys.stdout,
          _err=sys.stdout)
    my_vm([
        'export', 'CLI_VERSION={}'.format(script_env.get('CLI_VERSION')), '&&',
        'export', 'CLI_DOWNLOAD_SHA256={}'.format(
            script_env.get('CLI_DOWNLOAD_SHA256')), '&&', 'export',
        'BUILD_ARTIFACT_DIR={}'.format(build_artifact_dir), '&&',
        path_to_debian_build_script, path_to_dir_creator
    ],
          _out=sys.stdout,
          _err=sys.stdout)
    print_status('Debian build complete.')
    io = StringIO()
    my_vm(['ls', build_artifact_dir], _out=io)
    artifact_name = io.getvalue().strip()
    io.close()
    deb_file_path = os.path.join(build_artifact_dir, artifact_name)
    print_status('Installing the .deb on the build machine')
    my_vm(['sudo', 'dpkg', '-i', deb_file_path],
          _out=sys.stdout,
          _err=sys.stdout)
    # Upload to Azure Storage
    print_status('Uploading .deb to Azure storage.')
    my_vm([
        'az', 'storage', 'container', 'create', '--name', 'repos',
        '--public-access', 'blob', '--connection-string', '"{}"'.format(
            script_env.get('AZURE_STORAGE_CONNECTION_STRING'))
    ],
          _out=sys.stdout,
          _err=sys.stdout)
    my_vm([
        'az', 'storage', 'blob', 'upload', '-f', deb_file_path, '-n',
        artifact_name, '-c', 'repos', '--connection-string', '"{}"'.format(
            script_env.get('AZURE_STORAGE_CONNECTION_STRING'))
    ],
          _out=sys.stdout,
          _err=sys.stdout)
    io = StringIO()
    my_vm([
        'az', 'storage', 'blob', 'url', '-n', artifact_name, '-c', 'repos',
        '--output', 'tsv', '--connection-string', '"{}"'.format(
            script_env.get('AZURE_STORAGE_CONNECTION_STRING'))
    ],
          _out=io)
    deb_url = io.getvalue().strip()
    io.close()
    print_status('Debian file uploaded to the following URL.')
    print_status(deb_url)
    # Publish to apt service
    my_vm(['wget', '-q', 'https://bootstrap.pypa.io/get-pip.py'],
          _out=sys.stdout,
          _err=sys.stdout)
    my_vm(['sudo', 'python', 'get-pip.py'], _out=sys.stdout, _err=sys.stdout)
    my_vm(['sudo', 'pip', 'install', '--upgrade', 'requests'],
          _out=sys.stdout,
          _err=sys.stdout)
    upload_script = REPO_UPLOAD_SCRIPT_TMPL.format(
        cli_version=script_env.get('CLI_VERSION'),
        repo_id=script_env.get('DEBIAN_REPO_ID'),
        source_url=deb_url,
        repo_package_url=script_env.get('DEBIAN_REPO_URL'),
        repo_user=script_env.get('DEBIAN_REPO_USERNAME'),
        repo_pass=script_env.get('DEBIAN_REPO_PASSWORD'))
    my_vm(['echo', '-e', '"{}"'.format(upload_script), '>>', 'repo_upload.py'],
          _out=sys.stdout,
          _err=sys.stdout)
    my_vm(['python', 'repo_upload.py'], _out=sys.stdout, _err=sys.stdout)
    print_status('Done. :)')
    give_chance_to_cancel('Delete resource group (in background)')
    az(['group', 'delete', '--name', resource_group, '--yes', '--no-wait'],
       _out=sys.stdout,
       _err=sys.stdout)
    print_status('Finished. :)')
Пример #47
0
class TestApplication(unittest.TestCase):

    @classmethod
    def setUpClass(cls):
        pass

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_application_register_and_call_handlers(self):
        handler_called = [False]

        def handler(**kwargs):
            kwargs['args'][0] = True

        def other_handler(**kwargs): # pylint: disable=unused-variable
            self.assertEqual(kwargs['args'], 'secret sauce')

        config = Configuration([])
        app = Application(config)

        app.raise_event('was_handler_called', args=handler_called)
        self.assertFalse(handler_called[0],
                         "Raising event with no handlers registered somehow failed...")

        app.register('was_handler_called', handler)
        self.assertFalse(handler_called[0])

        # Registered handler won't get called if event with different name
        # is raised...
        app.raise_event('other_handler_called', args=handler_called)
        self.assertFalse(handler_called[0], 'Wrong handler called!')

        app.raise_event('was_handler_called', args=handler_called)
        self.assertTrue(handler_called[0], "Handler didn't get called")

        app.raise_event('other_handler_called', args='secret sauce')



    def test_list_value_parameter(self):
        hellos = []

        def handler(args):
            hellos.append(args)

        command = CliCommand('test command', handler)
        command.add_argument('hello', '--hello', nargs='+', action=IterateAction)
        command.add_argument('something', '--something')
        cmd_table = {'test command': command}

        argv = 'az test command --hello world sir --something else'.split()
        config = Configuration(argv)
        config.get_command_table = lambda: cmd_table
        application = Application(config)
        application.execute(argv[1:])

        self.assertEqual(2, len(hellos))
        self.assertEqual(hellos[0]['hello'], 'world')
        self.assertEqual(hellos[0]['something'], 'else')
        self.assertEqual(hellos[1]['hello'], 'sir')
        self.assertEqual(hellos[1]['something'], 'else')
Пример #48
0
def make_html_slider(drawing, keyframes, obs_div, other, div_timeseries,
                     visualize_controls):
    nkeyframes = len(keyframes.timestamps)

    # language=html
    controls_html = """\

<div id="slidecontainer">
<div id='fixedui'>
    Select time: <input autofocus type="range" min="0" max="%s" value="0" class="slider" id="time-range" 
    onchange="showVal(this.value)" oninput="showVal(this.value)"/>
    <span id="time-display"></span>
    </div>
</div>
<style type='text/css'>
    #slidecontainer {
    height: 3em;
    }
    #time-range {
    width: 50%%;
    }
    #fixedui { 
    position: fixed; 
    width: 100%%;
    height: 3em;
    background-color: white;
    }
    .keyframe[visualize="hide"] {
        display: none;
    }
    .keyframe[visualize="show"] {
        display: inherit;
    }
    td#obs {
        padding: 1em;
        vertical-align: top;
    }
    
    #observation_sequence {
        width: 320px;
    }
    td#obs img { width: 90%%;} 
</style>
<script type='text/javascript'>
    function showVal(newVal) {
        elements = document.querySelectorAll('.keyframe');
        elements.forEach(_ => _.setAttribute('visualize', 'hide'));
        elements_show = document.querySelectorAll('.keyframe' + newVal );  
        elements_show.forEach(_ => _.setAttribute('visualize', 'show'));
    }
    document.addEventListener("DOMContentLoaded", function(event) {
        showVal(0);
    });
</script>
""" % (nkeyframes - 1)

    if nkeyframes <= 1:
        controls_html += """
        <style>
        .slidecontainer {
        display: none;
        }
        </style>
        """

    controls = bs(controls_html)

    valbox = controls.find("span", id="time-display")
    assert valbox is not None
    for i, timestamp in keyframes:
        t = Tag(name="span")
        t.attrs["class"] = "keyframe keyframe%d" % i
        t.attrs["visualize"] = "hide"
        t.append("t = %.2f" % timestamp)

        valbox.append(t)

    from six import StringIO

    f = StringIO()
    drawing.write(f, pretty=True)
    drawing_svg = f.getvalue()
    f.close()
    # drawing_svg = drawing.tostring(pretty=True)
    # language=html
    doc = """\
<html lang='en'>
<head></head>
<body>
<style>
/*svg {{ background-color: #eee;}}*/
body {{
    font-family: system-ui, sans-serif;
}}
</style>
{controls}
<table>
<tr>
<td style="width: 640px; vertical-align:top;">
{drawing}
</td>
<td id="obs" >
{visualize_controls}
<div id="observation_sequence">
{obs_div}
</div>
</td>
</tr>
</table>

<script type='text/javascript'>
    showVal(0); 
</script>

{div_timeseries}
{other}
</body>
</html>
    """.format(
        controls=str(controls),
        drawing=drawing_svg,
        obs_div=obs_div,
        other=other,
        div_timeseries=div_timeseries,
        visualize_controls=visualize_controls,
    )
    return doc
Пример #49
0
class TestParser(unittest.TestCase):

    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_register_simple_commands(self):
        def test_handler1():
            pass

        def test_handler2():
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'command the-name', test_handler1)
        command2 = AzCliCommand(cli.loader, 'sub-command the-second-name', test_handler2)
        cmd_table = {'command the-name': command, 'sub-command the-second-name': command2}
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)
        args = parser.parse_args('command the-name'.split())
        self.assertIs(args.func, command)

        args = parser.parse_args('sub-command the-second-name'.split())
        self.assertIs(args.func, command2)

        AzCliCommandParser.error = VerifyError(self,)
        parser.parse_args('sub-command'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_required_parameter(self):
        def test_handler(args):  # pylint: disable=unused-argument
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('req', '--req', required=True)
        cmd_table = {'test command': command}
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)

        args = parser.parse_args('test command --req yep'.split())
        self.assertIs(args.func, command)

        AzCliCommandParser.error = VerifyError(self)
        parser.parse_args('test command'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_nargs_parameter(self):
        def test_handler():
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('req', '--req', required=True, nargs=2)
        cmd_table = {'test command': command}
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)

        args = parser.parse_args('test command --req yep nope'.split())
        self.assertIs(args.func, command)

        AzCliCommandParser.error = VerifyError(self)
        parser.parse_args('test command -req yep'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_case_insensitive_enum_choices(self):
        from enum import Enum

        class TestEnum(Enum):  # pylint: disable=too-few-public-methods

            opt1 = "ALL_CAPS"
            opt2 = "camelCase"
            opt3 = "snake_case"

        def test_handler():
            pass

        cli = DummyCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('opt', '--opt', required=True, **enum_choice_list(TestEnum))
        cmd_table = {'test command': command}
        cli.commands_loader.command_table = cmd_table

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cli.commands_loader)

        args = parser.parse_args('test command --opt alL_cAps'.split())
        self.assertEqual(args.opt, 'ALL_CAPS')

        args = parser.parse_args('test command --opt CAMELCASE'.split())
        self.assertEqual(args.opt, 'camelCase')

        args = parser.parse_args('test command --opt sNake_CASE'.split())
        self.assertEqual(args.opt, 'snake_case')

    def _mock_import_lib(_):
        mock_obj = mock.MagicMock()
        mock_obj.__path__ = __name__
        return mock_obj

    def _mock_iter_modules(_):
        return [(None, __name__, None)]

    def _mock_extension_modname(ext_name, ext_dir):
        return ext_name

    def _mock_get_extensions():
        MockExtension = namedtuple('Extension', ['name', 'preview', 'experimental', 'path', 'get_metadata'])
        return [MockExtension(name=__name__ + '.ExtCommandsLoader', preview=False, experimental=False, path=None, get_metadata=lambda: {}),
                MockExtension(name=__name__ + '.Ext2CommandsLoader', preview=False, experimental=False, path=None, get_metadata=lambda: {})]

    def _mock_load_command_loader(loader, args, name, prefix):
        from enum import Enum

        class TestEnum(Enum):  # pylint: disable=too-few-public-methods
            enum_1 = 'enum_1'
            enum_2 = 'enum_2'

        def test_handler():
            pass

        class TestCommandsLoader(AzCommandsLoader):
            def load_command_table(self, args):
                super(TestCommandsLoader, self).load_command_table(args)
                command = AzCliCommand(loader, 'test module', test_handler)
                command.add_argument('opt', '--opt', required=True, **enum_choice_list(TestEnum))
                self.command_table['test module'] = command
                return self.command_table

        # A command from an extension
        class ExtCommandsLoader(AzCommandsLoader):

            def load_command_table(self, args):
                super(ExtCommandsLoader, self).load_command_table(args)
                command = AzCliCommand(loader, 'test extension', test_handler)
                command.add_argument('opt', '--opt', required=True, **enum_choice_list(TestEnum))
                self.command_table['test extension'] = command
                return self.command_table

        if prefix == 'azure.cli.command_modules.':
            command_loaders = {'TestCommandsLoader': TestCommandsLoader}
        else:
            command_loaders = {'ExtCommandsLoader': ExtCommandsLoader}

        module_command_table = {}
        for _, loader_cls in command_loaders.items():
            command_loader = loader_cls(cli_ctx=loader.cli_ctx)
            command_table = command_loader.load_command_table(args)
            if command_table:
                module_command_table.update(command_table)
                loader.loaders.append(command_loader)  # this will be used later by the load_arguments method
        return module_command_table, command_loader.command_group_table

    @mock.patch('importlib.import_module', _mock_import_lib)
    @mock.patch('pkgutil.iter_modules', _mock_iter_modules)
    @mock.patch('azure.cli.core.commands._load_command_loader', _mock_load_command_loader)
    @mock.patch('azure.cli.core.extension.get_extension_modname', _mock_extension_modname)
    @mock.patch('azure.cli.core.extension.get_extensions', _mock_get_extensions)
    def test_parser_error_spellchecker(self):
        cli = DummyCli()
        main_loader = MainCommandsLoader(cli)
        cli.loader = main_loader

        cli.loader.load_command_table(None)

        parser = cli.parser_cls(cli)
        parser.load_command_table(cli.loader)

        logger_msgs = []
        choice_lists = []
        original_get_close_matches = difflib.get_close_matches

        def mock_log_error(_, msg):
            logger_msgs.append(msg)

        def mock_get_close_matches(*args, **kwargs):
            choice_lists.append(original_get_close_matches(*args, **kwargs))

        # run multiple faulty commands and save error logs, as well as close matches
        with mock.patch('logging.Logger.error', mock_log_error), \
                mock.patch('difflib.get_close_matches', mock_get_close_matches):
            faulty_cmd_args = [
                'test module1 --opt enum_1',
                'test extension1 --opt enum_1',
                'test foo_bar --opt enum_3',
                'test module --opt enum_3',
                'test extension --opt enum_3'
            ]
            for text in faulty_cmd_args:
                with self.assertRaises(SystemExit):
                    parser.parse_args(text.split())
        parser.parse_args('test module --opt enum_1'.split())

        # assert the right type of error msg is logged for command vs argument parsing
        self.assertEqual(len(logger_msgs), 5)
        for msg in logger_msgs[:3]:
            self.assertIn("not in the", msg)
            self.assertIn("command group", msg)
        for msg in logger_msgs[3:]:
            self.assertIn("not a valid value for '--opt'.", msg)

        # assert the right choices are matched as "close".
        # If these don't hold, matching algorithm should be deemed flawed.
        for choices in choice_lists[:2]:
            self.assertEqual(len(choices), 1)
        self.assertEqual(len(choice_lists[2]), 0)
        for choices in choice_lists[3:]:
            self.assertEqual(len(choices), 2)
            for choice in ['enum_1', 'enum_2']:
                self.assertIn(choice, choices)
Пример #50
0
def _mol_writer(data,
                fmt='sdf',
                filepath_or_buffer=None,
                update_properties=True,
                molecule_column=None,
                columns=None):
    """Universal writing function for private use.

    .. versionadded:: 0.3

    Parameters
    ----------
    fmt : string
        The format of molecular file

    filepath_or_buffer : string or None
        File path

    update_properties : bool, optional (default=True)
        Switch to update properties from the DataFrames to the molecules
        while writting.

    molecule_column : string or None, optional (default='mol')
        Name of molecule column. If None the molecules will be skipped.

    columns : list or None, optional (default=None)
        A list of columns to write to file. If None then all available
        fields are written.

    """
    if filepath_or_buffer is None:
        out = StringIO()
    elif hasattr(filepath_or_buffer, 'write'):
        out = filepath_or_buffer
    else:
        out = oddt.toolkit.Outputfile(fmt, filepath_or_buffer, overwrite=True)
    if isinstance(data, pd.DataFrame):
        molecule_column = molecule_column or data._molecule_column
        for ix, row in data.iterrows():
            mol = row[molecule_column].clone
            if update_properties:
                new_data = row.to_dict()
                del new_data[molecule_column]
                mol.data.update(new_data)
            if columns:
                for k in mol.data.keys():
                    if k not in columns:
                        del mol.data[k]
            if filepath_or_buffer is None or hasattr(filepath_or_buffer,
                                                     'write'):
                out.write(mol.write(fmt))
            else:
                out.write(mol)
    elif isinstance(data, pd.Series):
        for mol in data:
            if filepath_or_buffer is None or hasattr(filepath_or_buffer,
                                                     'write'):
                out.write(mol.write(fmt))
            else:
                out.write(mol)
    if filepath_or_buffer is None:
        return out.getvalue()
    elif not hasattr(filepath_or_buffer, 'write'):  # dont close foreign buffer
        out.close()
class Test_resource_validators(unittest.TestCase):
    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_validate_lock_params(self):
        valid = [{
            'test': 'just name',
            'name': 'foo'
        }, {
            'test': 'name and group',
            'name': 'foo',
            'resource_group_name': 'foo',
        }, {
            'test': 'name, group, type [compact]',
            'name': 'foo',
            'resource_group_name': 'bar',
            'resource_name': 'baz',
            'resource_type': 'Microsoft.Compute/VirtualMachines'
        }, {
            'test': 'name, group, type, namespace',
            'name': 'foo',
            'resource_group_name': 'bar',
            'resource_name': 'baz',
            'resource_type': 'VirtualMachines',
            'resource_provider_namespace': 'Microsoft.Compute',
        }, {
            'test': 'name, group, type, namespace, parent',
            'name': 'foo',
            'resource_group_name': 'bar',
            'resource_name': 'baz',
            'resource_type': 'VirtualMachines',
            'resource_provider_namespace': 'Microsoft.Compute',
            'parent_resource_path': 'Foo.Bar/baz',
        }]
        for valid_namespace in valid:
            namespace_obj = NamespaceObject()
            for key in valid_namespace:
                setattr(namespace_obj, key, valid_namespace[key])
            try:
                # If unexpected invalid, this throws, so no need for asserts
                validate_lock_parameters(namespace_obj)
            except CLIError as ex:
                self.fail('Test {} failed. {}'.format(valid_namespace['test'],
                                                      ex))

    def test_validate_lock_params_invalid(self):
        invalid = [{
            'test': 'just name and type',
            'name': 'foo',
            'resource_type': 'baz'
        }, {
            'test': 'name and group and type',
            'name': 'foo',
            'resource_group_name': 'foo',
            'resource_type': 'bar',
        }, {
            'test': 'name, group, no type',
            'name': 'foo',
            'resource_group_name': 'bar',
            'resource_name': 'baz',
        }, {
            'test': 'name, group, type, namespace',
            'name': 'foo',
            'resource_group_name': 'bar',
            'resource_name': 'baz',
            'resource_provider_namespace': 'Microsoft.Compute',
        }, {
            'test': 'name, group, type, namespace, parent',
            'name': 'foo',
            'resource_group_name': 'bar',
            'resource_type': 'VirtualMachines',
            'resource_provider_namespace': 'Microsoft.Compute',
            'parent_resource_path': 'Foo.Bar/baz',
        }]
        for invalid_namespace in invalid:
            with self.assertRaises(CLIError):
                namespace_obj = NamespaceObject()
                for key in invalid_namespace:
                    setattr(namespace_obj, key, invalid_namespace[key])
                validate_lock_parameters(namespace_obj)

    def test_generate_deployment_name_from_file(self):
        # verify auto-gen from uri
        namespace = mock.MagicMock()
        namespace.template_uri = 'https://templates/template123.json?foo=bar'
        namespace.template_file = None
        namespace.deployment_name = None
        _validate_deployment_name(namespace)
        self.assertEqual('template123', namespace.deployment_name)

        namespace = mock.MagicMock()
        namespace.template_file = __file__
        namespace.template_uri = None
        namespace.deployment_name = None
        _validate_deployment_name(namespace)

        file_base_name = os.path.basename(__file__)
        file_base_name = file_base_name[:str.find(file_base_name, '.')]
        self.assertEqual(file_base_name, namespace.deployment_name)

        # verify use default if get a file content
        namespace = mock.MagicMock()
        namespace.template_file = '{"foo":"bar"}'
        namespace.template_uri = None
        namespace.deployment_name = None
        _validate_deployment_name(namespace)
        self.assertEqual('deployment1', namespace.deployment_name)
Пример #52
0
    def run_pipe_sequence(self,
                          pipe_sequence,
                          final_ins=None,
                          final_outs=None,
                          final_errs=None,
                          environ={},
                          cwd=None):
        if self.debug:
            self.logger.debug(str(pipe_sequence))

        _, current_state = self.get_current_worker_and_state()

        n_simple_commands = len(pipe_sequence.lst)

        prev_outs = None
        for idx, simple_command in enumerate(pipe_sequence.lst):

            # The temporary_environ needs to be reset for each simple command
            # i.e. A=42 script1 | script2
            # The value of A should not be carried to script2
            current_state.temporary_environ = {}
            for assignment in simple_command.assignments:
                current_state.temporary_environ[
                    assignment.identifier] = assignment.value

            # Only update the worker's env for pure assignments
            if simple_command.cmd_word == '' and idx == 0 and n_simple_commands == 1:
                current_state.environ.update(current_state.temporary_environ)
                current_state.temporary_environ = {}

            if prev_outs:
                # If previous output has gone to a file, we use a dummy empty string as ins
                ins = StringIO() if type(prev_outs) == file else prev_outs
            else:
                ins = final_ins or current_state.sys_stdin__

            outs = current_state.sys_stdout__
            errs = current_state.sys_stderr__

            if simple_command.io_redirect:
                # Truncate file or append to file
                mode = 'w' if simple_command.io_redirect.operator == '>' else 'a'
                # For simplicity, stdout redirect works for stderr as well.
                # Note this is different from a real shell.
                if simple_command.io_redirect.filename == '&3':
                    outs = _SYS_STDOUT
                    errs = _SYS_STDERR
                else:
                    errs = outs = open(simple_command.io_redirect.filename,
                                       mode)

            elif idx < n_simple_commands - 1:  # before the last piped command
                outs = StringIO()

            else:
                if final_outs:
                    outs = final_outs
                if final_errs:
                    errs = final_errs

            if self.debug:
                self.logger.debug('io %s %s\n' % (ins, outs))

            try:
                if simple_command.cmd_word != '':
                    script_file = self.find_script_file(
                        simple_command.cmd_word)

                    if self.debug:
                        self.logger.debug('script is %s\n' % script_file)

                    if self.input_encoding_utf8:
                        # Python 2 is not fully unicode compatible. Some modules (e.g. runpy)
                        # insist for ASCII arguments. The encoding here helps eliminates possible
                        # errors caused by unicode arguments.
                        simple_command_args = [
                            arg.encode('utf-8') for arg in simple_command.args
                        ]
                    else:
                        simple_command_args = simple_command.args

                    if script_file.endswith('.py'):
                        self.exec_py_file(script_file, simple_command_args,
                                          ins, outs, errs)

                    elif is_binary_file(script_file):
                        raise ShNotExecutable(script_file)

                    else:
                        self.exec_sh_file(script_file, simple_command_args,
                                          ins, outs, errs)

                else:
                    current_state.return_value = 0

                if current_state.return_value != 0:
                    break  # break out of the pipe_sequence, but NOT pipe_sequence list

                if isinstance(outs, StringIO):
                    outs.seek(
                        0)  # rewind for next command in the pipe sequence

                prev_outs = outs

            # This catch all exception is for when the exception is raised
            # outside of the actual command execution, i.e. exec_py_file
            # exec_sh_file, e.g. command not found, not executable etc.
            except ShFileNotFound as e:
                err_msg = '%s\n' % e.args[0]
                if self.debug:
                    self.logger.debug(err_msg)
                self.stash.write_message(err_msg)
                # set exit code to 127
                current_state.return_value = 127
                break  # break out of the pipe_sequence, but NOT pipe_sequence list
            except Exception as e:
                err_msg = '%s\n' % e.args[0]
                if self.debug:
                    self.logger.debug(err_msg)
                self.stash.write_message(err_msg)
                break  # break out of the pipe_sequence, but NOT pipe_sequence list

            finally:
                if isinstance(outs, file) and not isinstance(outs, StringIO):
                    # StringIO is subclass of IOBase in py3 but not in py2
                    outs.close()
                if isinstance(ins, StringIO):  # release the string buffer
                    ins.close()
Пример #53
0
class TestOutput(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        pass

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_out_json_valid(self):
        """
        The JSON output when the input is a dict should be the dict serialized to JSON
        """
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'id': '0b1f6472'
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "id": "0b1f6472"
}
"""))

    def test_out_json_from_ordered_dict(self):
        """
        The JSON output when the input is OrderedDict should be serialized to JSON
        """
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem(OrderedDict({
                'active': True,
                'id': '0b1f6472'
            })))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "id": "0b1f6472"
}
"""))

    def test_out_json_byte(self):
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'contents': b'0b1f6472'
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "contents": "0b1f6472"
}
"""))

    def test_out_json_byte_empty(self):
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'contents': b''
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "contents": ""
}
"""))

    # TABLE output tests

    def test_out_table(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = OrderedDict()
        obj['active'] = True
        obj['val'] = '0b1f6472'
        output_producer.out(CommandResultItem(obj))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""  Active  Val
--------  --------
       1  0b1f6472
"""))

    def test_out_table_list_of_lists(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = [['a', 'b'], ['c', 'd']]
        output_producer.out(CommandResultItem(obj))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Column1    Column2
---------  ---------
a          b
c          d
"""))

    def test_out_table_complex_obj(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = OrderedDict()
        obj['name'] = 'qwerty'
        obj['val'] = '0b1f6472qwerty'
        obj['sub'] = {'1'}
        result_item = CommandResultItem(obj)
        output_producer.out(result_item)
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Name    Val
------  --------------
qwerty  0b1f6472qwerty
"""))

    def test_out_table_no_query_no_transformer_order(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = {
            'name': 'qwerty',
            'val': '0b1f6472qwerty',
            'active': True,
            'sub': '0b1f6472'
        }
        result_item = CommandResultItem(obj,
                                        table_transformer=None,
                                        is_query_active=False)
        output_producer.out(result_item)
        # Should be alphabetical order as no table transformer and query is not active.
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""  Active  Name    Sub       Val
--------  ------  --------  --------------
       1  qwerty  0b1f6472  0b1f6472qwerty
"""))

    def test_out_table_no_query_yes_transformer_order(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = {
            'name': 'qwerty',
            'val': '0b1f6472qwerty',
            'active': True,
            'sub': '0b1f6472'
        }

        def transformer(r):
            return OrderedDict([('Name', r['name']), ('Val', r['val']),
                                ('Active', r['active']), ('Sub', r['sub'])])

        result_item = CommandResultItem(obj,
                                        table_transformer=transformer,
                                        is_query_active=False)
        output_producer.out(result_item)
        # Should be table transformer order
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Name    Val               Active  Sub
------  --------------  --------  --------
qwerty  0b1f6472qwerty         1  0b1f6472
"""))

    # TSV output tests
    def test_output_format_dict(self):
        obj = {}
        obj['A'] = 1
        obj['B'] = 2
        result = format_tsv(CommandResultItem(obj))
        self.assertEqual(result, '1\t2\n')

    def test_output_format_dict_sort(self):
        obj = {}
        obj['B'] = 1
        obj['A'] = 2
        result = format_tsv(CommandResultItem(obj))
        self.assertEqual(result, '2\t1\n')

    def test_output_format_ordereddict_not_sorted(self):
        obj = OrderedDict()
        obj['B'] = 1
        obj['A'] = 2
        result = format_tsv(CommandResultItem(obj))
        self.assertEqual(result, '1\t2\n')

    def test_output_format_ordereddict_list_not_sorted(self):
        obj1 = OrderedDict()
        obj1['B'] = 1
        obj1['A'] = 2

        obj2 = OrderedDict()
        obj2['A'] = 3
        obj2['B'] = 4
        result = format_tsv(CommandResultItem([obj1, obj2]))
        self.assertEqual(result, '1\t2\n3\t4\n')
Пример #54
0
class CommandArchitectureTest(S3HandlerBaseTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()
        self.output = StringIO()
        self.saved_stdout = sys.stdout
        sys.stdout = self.output

    def tearDown(self):
        self.output.close()
        sys.stdout = self.saved_stdout

        super(CommandArchitectureTest, self).setUp()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {
            'cp': ['file_generator', 's3_handler'],
            'mv': ['file_generator', 's3_handler'],
            'rm': ['file_generator', 's3_handler'],
            'sync': ['file_generator', 'comparator', 's3_handler'],
            'mb': ['s3_handler'],
            'rb': ['s3_handler']
        }

        params = {
            'filters': True,
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None
        }
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd, {
                'region': 'us-east-1',
                'endpoint_url': None,
                'verify_ssl': None
            })
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions,
                         ['file_generator', 'filters', 's3_handler'])

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': False,
            'quiet': False,
            'src': local_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s Error: Bucket does not exist\n" %
            (rel_local_file, s3_file))
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': local_file,
            'filters': filters,
            'paths_type': 's3local',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp`` copy
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3s3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {
            'dir_op': False,
            'dryrun': True,
            'quiet': False,
            'src': s3_file,
            'dest': s3_file,
            'filters': filters,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': local_dir,
            'dest': s3_prefix,
            'filters': filters,
            'paths_type': 'locals3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': True,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {
            'dir_op': True,
            'dryrun': False,
            'quiet': False,
            'src': s3_prefix,
            'dest': s3_prefix,
            'paths_type': 's3',
            'region': 'us-east-1',
            'endpoint_url': None,
            'verify_ssl': None,
            'follow_symlinks': True
        }
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 1)
Пример #55
0
def get_iem_obs(Grid, window=60.):
    """
    Returns all of the station observations from the Iowa Mesonet for a given Grid in the format
    needed for PyDDA.

    Parameters
    ----------
    Grid: pyART Grid
        The Grid to retrieve the station data for.
    window: float
        The window (in minutes) to look for the nearest observation in time.

    Returns
    -------
    station_data: list of dicts
        A list of dictionaries containing the following entries as keys:

        *lat* - Latitude of the site (float)

        *lon* - Longitude of the site (float)

        *u* - u wind at the site (float)

        *v* - v wind at the site (float)

        *w* - w wind at the site (assumed to be 0) (float)

        *site_id* - Station ID (string)

        *x*, *y*, *z* - The (x, y, z) coordinates of the site in the Grid. (floats)
    """

    # First query the database for all of the JSON info for every station
    # Only add stations whose lat/lon are within the Grid's boundaries
    regions = """AF AL_ AI_ AQ_ AG_ AR_ AK AL AM_ AO_ AS_ AR AW_ AU_ AT_ 
         AZ_ BA_ BE_ BB_ BG_ BO_ BR_ BF_ BT_ BS_ BI_ BM_ BB_ BY_ BZ_ BJ_ BW_ AZ CA CA_AB
         CA_BC CD_ CK_ CF_ CG_ CL_ CM_ CO CO_ CN_ CR_ CT CU_ CV_ CY_ CZ_ DE DK_ DJ_ DM_ DO_ 
         DZ EE_ ET_ FK_ FM_ FJ_ FI_ FR_ GF_ PF_ GA_ GM_ GE_ DE_ GH_ GI_ KY_ GB_ GR_ GL_ GD_
         GU_ GT_ GN_ GW_ GY_ HT_ HN_ HK_ HU_ IS_ IN_ ID_ IR_ IQ_ IE_ IL_ IT_ CI_ JM_ JP_ 
         JO_ KZ_ KE_ KI_ KW_ LA_ LV_ LB_ LS_ LR_ LY_ LT_ LU_ MK_ MG_ MW_ MY_ MV_ ML_ CA_MB
         MH_ MR_ MU_ YT_ MX_ MD_ MC_ MA_ MZ_ MM_ NA_ NP_ AN_ NL_ CA_NB NC_ CA_NF NF_ NI_
         NE_ NG_ MP_ KP_ CA_NT NO_ CA_NS CA_NU OM_ CA_ON PK_ PA_ PG_ PY_ PE_ PH_ PN_ PL_
         PT_ CA_PE PR_ QA_ CA_QC RO_ RU_RW_ SH_ KN_ LC_ VC_ WS_ ST_ CA_SK SA_ SN_ RS_ SC_
         SL_ SG_ SK_ SI_ SB_ SO_ ZA_ KR_ ES_ LK_ SD_ SR_ SZ_ SE_ CH_ SY_ TW_ TJ_ TZ_ TH_
         TG_ TO_ TT_ TU TN_ TR_ TM_ UG_ UA_ AE_ UN_ UY_  UZ_ VU_ VE_ VN_ VI_ YE_ CA_YT ZM_ ZW_
         EC_ EG_ FL GA GQ_ HI HR_ IA ID IL IO_ IN KS KH_ KY KM_ LA MA MD ME
         MI MN MO MS MT NC ND NE NH NJ NM NV NY OH OK OR PA RI SC SV_ SD TD_ TN TX UT VA VT VG_
         WA WI WV WY"""

    networks = ["AWOS"]
    grid_lon_min = Grid.point_longitude["data"].min()
    grid_lon_max = Grid.point_longitude["data"].max()
    grid_lat_min = Grid.point_latitude["data"].min()
    grid_lat_max = Grid.point_latitude["data"].max()
    for region in regions.split():
        networks.append("%s_ASOS" % (region,))

    site_list = []
    elevations = []
    for network in networks:
        # Get metadata
        uri = ("https://mesonet.agron.iastate.edu/" "geojson/network/%s.geojson"
              ) % (network,)
        data = urlopen(uri)
        jdict = json.load(data)
        for site in jdict["features"]:
            lat = site["geometry"]["coordinates"][1]
            lon = site["geometry"]["coordinates"][0]
            if lat >= grid_lat_min and lat <= grid_lat_max and lon >= grid_lon_min and lon <= grid_lon_max:
                site_list.append((site["properties"]["sid"], site["properties"]["elevation"]))


    # Get the timestamp for each request
    grid_time = datetime.strptime(Grid.time["units"],
                                  "seconds since %Y-%m-%dT%H:%M:%SZ")
    start_time = grid_time - timedelta(minutes=window / 2.)
    end_time = grid_time + timedelta(minutes=window / 2.)

    SERVICE = "http://mesonet.agron.iastate.edu/cgi-bin/request/asos.py?"
    service = SERVICE + "data=all&tz=Etc/UTC&format=comma&latlon=yes&"

    service += start_time.strftime("year1=%Y&month1=%m&day1=%d&")
    service += end_time.strftime("year2=%Y&month2=%m&day2=%d&")
    station_obs = []
    for stations, elevations in site_list:
        uri = "%s&station=%s" % (service, stations)
        print("Downloading: %s" % (stations,))
        data = _download_data(uri)
        buf = StringIO()
        buf.write(data)
        buf.seek(0)

        my_df = pd.read_csv(buf, skiprows=5)
        stat_dict = {}
        if len(my_df['lat'].values) == 0:
            warnings.warn(
                "No data available at station %s between time %s and %s" %
                (stations, start_time.strftime('%Y-%m-%d %H:%M:%S'),
                 end_time.strftime('%Y-%m-%d %H:%M:%S')))
        else:
            stat_dict['lat'] = my_df['lat'].values[0]
            stat_dict['lon'] = my_df['lon'].values[0]
            stat_dict['x'], stat_dict['y'] = pyart.core.geographic_to_cartesian(
                stat_dict['lon'], stat_dict['lat'], Grid.get_projparams())
            stat_dict['x'] = stat_dict['x'][0]
            stat_dict['y'] = stat_dict['y'][0]
            stat_dict['z'] = elevations - Grid.origin_altitude["data"][0]
            if my_df['drct'].values[0] == 'M':
                continue
            drct = float(my_df['drct'].values[0])
            s_ms = float(my_df['sknt'].values[0]) * 0.514444
            stat_dict['u'] = -np.sin(np.deg2rad(drct)) * s_ms
            stat_dict['v'] = -np.cos(np.deg2rad(drct)) * s_ms
            stat_dict['site_id'] = stations
            station_obs.append(stat_dict)
        buf.close()

    return station_obs
Пример #56
0
class TestOutput(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        pass

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_out_json_valid(self):
        """
        The JSON output when the input is a dict should be the dict serialized to JSON
        """
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'id': '0b1f6472'
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "id": "0b1f6472"
}
"""))

    def test_out_json_from_ordered_dict(self):
        """
        The JSON output when the input is OrderedDict should be serialized to JSON
        """
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem(OrderedDict({
                'active': True,
                'id': '0b1f6472'
            })))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "id": "0b1f6472"
}
"""))

    def test_out_json_byte(self):
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'contents': b'0b1f6472'
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "contents": "0b1f6472"
}
"""))

    def test_out_json_byte_empty(self):
        output_producer = OutputProducer(formatter=format_json, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'contents': b''
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""{
  "active": true,
  "contents": ""
}
"""))

    def test_out_boolean_valid(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(CommandResultItem(True))
        self.assertEqual(util.normalize_newlines(self.io.getvalue()),
                         util.normalize_newlines("""True\n\n\n"""))

    # TABLE output tests

    def test_out_table(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = OrderedDict()
        obj['active'] = True
        obj['val'] = '0b1f6472'
        output_producer.out(CommandResultItem(obj))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""  Active  Val
--------  --------
       1  0b1f6472
"""))

    def test_out_table_list_of_lists(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = [['a', 'b'], ['c', 'd']]
        output_producer.out(CommandResultItem(obj))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Column1    Column2
---------  ---------
a          b
c          d
"""))

    def test_out_table_complex_obj(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = OrderedDict()
        obj['name'] = 'qwerty'
        obj['val'] = '0b1f6472qwerty'
        obj['sub'] = {'1'}
        result_item = CommandResultItem(obj)
        output_producer.out(result_item)
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Name    Val
------  --------------
qwerty  0b1f6472qwerty
"""))

    def test_out_table_no_query_no_transformer_order(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = {
            'name': 'qwerty',
            'val': '0b1f6472qwerty',
            'active': True,
            'sub': '0b1f6472'
        }
        result_item = CommandResultItem(obj,
                                        table_transformer=None,
                                        is_query_active=False)
        output_producer.out(result_item)
        # Should be alphabetical order as no table transformer and query is not active.
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""  Active  Name    Sub       Val
--------  ------  --------  --------------
       1  qwerty  0b1f6472  0b1f6472qwerty
"""))

    def test_out_table_no_query_yes_transformer_order(self):
        output_producer = OutputProducer(formatter=format_table, file=self.io)
        obj = {
            'name': 'qwerty',
            'val': '0b1f6472qwerty',
            'active': True,
            'sub': '0b1f6472'
        }

        def transformer(r):
            return OrderedDict([('Name', r['name']), ('Val', r['val']),
                                ('Active', r['active']), ('Sub', r['sub'])])

        result_item = CommandResultItem(obj,
                                        table_transformer=transformer,
                                        is_query_active=False)
        output_producer.out(result_item)
        # Should be table transformer order
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Name    Val               Active  Sub
------  --------------  --------  --------
qwerty  0b1f6472qwerty         1  0b1f6472
"""))

    # LIST output tests

    def test_out_list_valid(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'id': '0b1f6472'
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Active : True
Id     : 0b1f6472


"""))

    def test_out_list_valid_caps(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'TESTStuff': 'blah'
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Test Stuff : blah
Active     : True


"""))

    def test_out_list_valid_none_val(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': None,
                'id': '0b1f6472'
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Active : None
Id     : 0b1f6472


"""))

    def test_out_list_valid_empty_array(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': None,
                'id': '0b1f6472',
                'hosts': []
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Active : None
Id     : 0b1f6472
Hosts  :
   None


"""))

    def test_out_list_valid_array_complex(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(
            CommandResultItem([{
                'active': True,
                'id': '783yesdf'
            }, {
                'active': False,
                'id': '3hjnme32'
            }, {
                'active': False,
                'id': '23hiujbs'
            }]))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Active : True
Id     : 783yesdf

Active : False
Id     : 3hjnme32

Active : False
Id     : 23hiujbs


"""))

    def test_out_list_valid_str_array(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(
            CommandResultItem(['location', 'id', 'host', 'server']))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""location

id

host

server


"""))

    def test_out_list_valid_complex_array(self):
        output_producer = OutputProducer(formatter=format_list, file=self.io)
        output_producer.out(
            CommandResultItem({
                'active': True,
                'id': '0b1f6472',
                'myarray': ['1', '2', '3', '4']
            }))
        self.assertEqual(
            util.normalize_newlines(self.io.getvalue()),
            util.normalize_newlines("""Active  : True
Id      : 0b1f6472
Myarray :
   1
   2
   3
   4


"""))

    def test_out_list_format_key_simple(self):
        lo = ListOutput()
        self.assertEqual(lo._formatted_keys_cache, {})
        lo._get_formatted_key('locationId')
        self.assertEqual(lo._formatted_keys_cache,
                         {'locationId': 'Location Id'})

    def test_out_list_format_key_single(self):
        lo = ListOutput()
        self.assertEqual(lo._formatted_keys_cache, {})
        lo._get_formatted_key('location')
        self.assertEqual(lo._formatted_keys_cache, {'location': 'Location'})

    def test_out_list_format_key_multiple_caps(self):
        lo = ListOutput()
        self.assertEqual(lo._formatted_keys_cache, {})
        lo._get_formatted_key('fooIDS')
        self.assertEqual(lo._formatted_keys_cache, {'fooIDS': 'Foo Ids'})

    def test_out_list_format_key_multiple_words(self):
        lo = ListOutput()
        self.assertEqual(lo._formatted_keys_cache, {})
        lo._get_formatted_key('locationIdState')
        self.assertEqual(lo._formatted_keys_cache,
                         {'locationIdState': 'Location Id State'})

    # TSV output tests
    def test_output_format_dict(self):
        obj = {}
        obj['A'] = 1
        obj['B'] = 2
        result = format_tsv(CommandResultItem(obj))
        self.assertEqual(result, '1\t2\n')

    def test_output_format_dict_sort(self):
        obj = {}
        obj['B'] = 1
        obj['A'] = 2
        result = format_tsv(CommandResultItem(obj))
        self.assertEqual(result, '2\t1\n')

    def test_output_format_ordereddict_not_sorted(self):
        obj = OrderedDict()
        obj['B'] = 1
        obj['A'] = 2
        result = format_tsv(CommandResultItem(obj))
        self.assertEqual(result, '1\t2\n')

    def test_output_format_ordereddict_list_not_sorted(self):
        obj1 = OrderedDict()
        obj1['B'] = 1
        obj1['A'] = 2

        obj2 = OrderedDict()
        obj2['A'] = 3
        obj2['B'] = 4
        result = format_tsv(CommandResultItem([obj1, obj2]))
        self.assertEqual(result, '1\t2\n3\t4\n')
Пример #57
0
class CommandArchitectureTest(S3HandlerBaseTest):
    def setUp(self):
        super(CommandArchitectureTest, self).setUp()
        self.session = FakeSession()
        self.bucket = make_s3_files(self.session)
        self.loc_files = make_loc_files()
        self.output = StringIO()
        self.err_output = StringIO()
        self.saved_stdout = sys.stdout
        self.saved_stderr = sys.stderr
        sys.stdout = self.output
        sys.stderr = self.err_output

    def tearDown(self):
        self.output.close()
        self.err_output.close()
        sys.stdout = self.saved_stdout
        sys.stderr = self.saved_stderr

        super(CommandArchitectureTest, self).tearDown()
        clean_loc_files(self.loc_files)
        s3_cleanup(self.bucket, self.session)

    def test_set_endpoint_no_source(self):
        cmd_arc = CommandArchitecture(self.session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'source_region': None})
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-1')

    def test_set_endpoint_with_source(self):
        cmd_arc = CommandArchitecture(self.session, 'sync',
                                      {'region': 'us-west-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None,
                                       'paths_type': 's3s3',
                                       'source_region': ['us-west-2']})
        cmd_arc.set_endpoints()
        endpoint = cmd_arc._endpoint
        source_endpoint = cmd_arc._source_endpoint
        self.assertEqual(endpoint.region_name, 'us-west-1')
        self.assertEqual(source_endpoint.region_name, 'us-west-2')

    def test_create_instructions(self):
        """
        This tests to make sure the instructions for any command is generated
        properly.
        """
        cmds = ['cp', 'mv', 'rm', 'sync', 'mb', 'rb']

        instructions = {'cp': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'mv': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'rm': ['file_generator', 'file_info_builder',
                               's3_handler'],
                        'sync': ['file_generator', 'comparator',
                                 'file_info_builder', 's3_handler'],
                        'mb': ['s3_handler'],
                        'rb': ['s3_handler']}

        params = {'filters': True, 'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'is_stream': False}
        for cmd in cmds:
            cmd_arc = CommandArchitecture(self.session, cmd,
                                          {'region': 'us-east-1',
                                           'endpoint_url': None,
                                           'verify_ssl': None,
                                           'is_stream': False})
            cmd_arc.create_instructions()
            self.assertEqual(cmd_arc.instructions, instructions[cmd])

        # Test if there is a filter.
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        self.assertEqual(cmd_arc.instructions, ['file_generator', 'filters',
                                                'file_info_builder',
                                                's3_handler'])

    def test_choose_sync_strategy_default(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check if no plugins return their sync strategy.  Should
        # result in the default strategies
        session.emit.return_value = None
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'].__class__,
            SizeAndLastModifiedSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'].__class__,
            MissingFileSync
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'].__class__,
            NeverSync
        )

    def test_choose_sync_strategy_overwrite(self):
        session = Mock()
        cmd_arc = CommandArchitecture(session, 'sync',
                                      {'region': 'us-east-1',
                                       'endpoint_url': None,
                                       'verify_ssl': None})
        # Check that the default sync strategy is overwritted if a plugin
        # returns its sync strategy.
        mock_strategy = Mock()
        mock_strategy.sync_type = 'file_at_src_and_dest'

        mock_not_at_dest_sync_strategy = Mock()
        mock_not_at_dest_sync_strategy.sync_type = 'file_not_at_dest'

        mock_not_at_src_sync_strategy = Mock()
        mock_not_at_src_sync_strategy.sync_type = 'file_not_at_src'

        responses = [(None, mock_strategy),
                     (None, mock_not_at_dest_sync_strategy),
                     (None, mock_not_at_src_sync_strategy)]

        session.emit.return_value = responses
        sync_strategies = cmd_arc.choose_sync_strategies()
        self.assertEqual(
            sync_strategies['file_at_src_and_dest_sync_strategy'],
            mock_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_dest_sync_strategy'],
            mock_not_at_dest_sync_strategy
        )
        self.assertEqual(
            sync_strategies['file_not_at_src_sync_strategy'],
            mock_not_at_src_sync_strategy
        )

    def test_run_cp_put(self):
        # This ensures that the architecture sets up correctly for a ``cp`` put
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_error_on_same_line_as_status(self):
        s3_file = 's3://' + 'bucket-does-not-exist' + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': False, 'quiet': False,
                  'src': local_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        # Also, we need to verify that the error message is on the *same* line
        # as the upload failed line, to make it easier to track.
        output_str = (
            "upload failed: %s to %s Error: Bucket does not exist\n" % (
                rel_local_file, s3_file))
        self.assertIn(output_str, self.err_output.getvalue())

    def test_run_cp_get(self):
        # This ensures that the architecture sets up correctly for a ``cp`` get
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': local_file, 'filters': filters,
                  'paths_type': 's3local', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) download: %s to %s" % (s3_file, rel_local_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_cp_copy(self):
        # This ensures that the architecture sets up correctly for a ``cp`` copy
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'cp', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) copy: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mv(self):
        # This ensures that the architecture sets up correctly for a ``mv``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3s3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'mv', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) move: %s to %s" % (s3_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_remove(self):
        # This ensures that the architecture sets up correctly for a ``rm``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        filters = [['--include', '*']]
        params = {'dir_op': False, 'dryrun': True, 'quiet': False,
                  'src': s3_file, 'dest': s3_file, 'filters': filters,
                  'paths_type': 's3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rm', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) delete: %s" % s3_file
        self.assertIn(output_str, self.output.getvalue())

    def test_run_sync(self):
        # This ensures that the architecture sets up correctly for a ``sync``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_file = 's3://' + self.bucket + '/' + 'text1.txt'
        local_file = self.loc_files[0]
        s3_prefix = 's3://' + self.bucket + '/'
        local_dir = self.loc_files[3]
        rel_local_file = os.path.relpath(local_file)
        filters = [['--include', '*']]
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': local_dir, 'dest': s3_prefix, 'filters': filters,
                  'paths_type': 'locals3', 'region': 'us-east-1',
                  'endpoint_url': None, 'verify_ssl': None,
                  'follow_symlinks': True, 'page_size': None,
                  'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'sync', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) upload: %s to %s" % (rel_local_file, s3_file)
        self.assertIn(output_str, self.output.getvalue())

    def test_run_mb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'mb', params)
        cmd_arc.create_instructions()
        cmd_arc.run()
        output_str = "(dryrun) make_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())

    def test_run_rb(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': True, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "(dryrun) remove_bucket: %s" % s3_prefix
        self.assertIn(output_str, self.output.getvalue())
        self.assertEqual(rc, 0)

    def test_run_rb_nonzero_rc(self):
        # This ensures that the architecture sets up correctly for a ``rb``
        # command.  It is just just a dry run, but all of the components need
        # to be wired correctly for it to work.
        s3_prefix = 's3://' + self.bucket + '/'
        params = {'dir_op': True, 'dryrun': False, 'quiet': False,
                  'src': s3_prefix, 'dest': s3_prefix, 'paths_type': 's3',
                  'region': 'us-east-1', 'endpoint_url': None,
                  'verify_ssl': None, 'follow_symlinks': True,
                  'page_size': None, 'is_stream': False}
        cmd_arc = CommandArchitecture(self.session, 'rb', params)
        cmd_arc.create_instructions()
        rc = cmd_arc.run()
        output_str = "remove_bucket failed: %s" % s3_prefix
        self.assertIn(output_str, self.err_output.getvalue())
        self.assertEqual(rc, 1)
Пример #58
0
class TestParser(unittest.TestCase):
    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_register_simple_commands(self):
        def test_handler1():
            pass

        def test_handler2():
            pass

        cli = TestCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'command the-name', test_handler1)
        command2 = AzCliCommand(cli.loader, 'sub-command the-second-name',
                                test_handler2)
        cmd_table = {
            'command the-name': command,
            'sub-command the-second-name': command2
        }

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cmd_table)
        args = parser.parse_args('command the-name'.split())
        self.assertIs(args.func, command)

        args = parser.parse_args('sub-command the-second-name'.split())
        self.assertIs(args.func, command2)

        AzCliCommandParser.error = VerifyError(self, )
        parser.parse_args('sub-command'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_required_parameter(self):
        def test_handler(args):  # pylint: disable=unused-argument
            pass

        cli = TestCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('req', '--req', required=True)
        cmd_table = {'test command': command}

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cmd_table)

        args = parser.parse_args('test command --req yep'.split())
        self.assertIs(args.func, command)

        AzCliCommandParser.error = VerifyError(self)
        parser.parse_args('test command'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_nargs_parameter(self):
        def test_handler():
            pass

        cli = TestCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('req', '--req', required=True, nargs=2)
        cmd_table = {'test command': command}

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cmd_table)

        args = parser.parse_args('test command --req yep nope'.split())
        self.assertIs(args.func, command)

        AzCliCommandParser.error = VerifyError(self)
        parser.parse_args('test command -req yep'.split())
        self.assertTrue(AzCliCommandParser.error.called)

    def test_case_insensitive_enum_choices(self):
        from enum import Enum

        class TestEnum(Enum):  # pylint: disable=too-few-public-methods

            opt1 = "ALL_CAPS"
            opt2 = "camelCase"
            opt3 = "snake_case"

        def test_handler():
            pass

        cli = TestCli()
        cli.loader = mock.MagicMock()
        cli.loader.cli_ctx = cli

        command = AzCliCommand(cli.loader, 'test command', test_handler)
        command.add_argument('opt',
                             '--opt',
                             required=True,
                             **enum_choice_list(TestEnum))
        cmd_table = {'test command': command}

        parser = AzCliCommandParser(cli)
        parser.load_command_table(cmd_table)

        args = parser.parse_args('test command --opt alL_cAps'.split())
        self.assertEqual(args.opt, 'ALL_CAPS')

        args = parser.parse_args('test command --opt CAMELCASE'.split())
        self.assertEqual(args.opt, 'camelCase')

        args = parser.parse_args('test command --opt sNake_CASE'.split())
        self.assertEqual(args.opt, 'snake_case')
Пример #59
0
def detailed_traceback():
    buf = StringIO()
    cgitb_hook(format="text", file=buf)
    tb_txt = buf.getvalue()
    buf.close()
    return tb_txt
Пример #60
0
class TestApplication(unittest.TestCase):
    @classmethod
    def setUpClass(cls):
        pass

    @classmethod
    def tearDownClass(cls):
        pass

    def setUp(self):
        self.io = StringIO()

    def tearDown(self):
        self.io.close()

    def test_application_register_and_call_handlers(self):
        handler_called = [False]

        def handler(**kwargs):
            kwargs['args'][0] = True

        def other_handler(**kwargs):  # pylint: disable=unused-variable
            self.assertEqual(kwargs['args'], 'secret sauce')

        config = Configuration([])
        app = Application(config)

        app.raise_event('was_handler_called', args=handler_called)
        self.assertFalse(
            handler_called[0],
            "Raising event with no handlers registered somehow failed...")

        app.register('was_handler_called', handler)
        self.assertFalse(handler_called[0])

        # Registered handler won't get called if event with different name
        # is raised...
        app.raise_event('other_handler_called', args=handler_called)
        self.assertFalse(handler_called[0], 'Wrong handler called!')

        app.raise_event('was_handler_called', args=handler_called)
        self.assertTrue(handler_called[0], "Handler didn't get called")

        app.raise_event('other_handler_called', args='secret sauce')

    def test_list_value_parameter(self):
        hellos = []

        def handler(args):
            hellos.append(args)

        command = CliCommand('test command', handler)
        command.add_argument('hello',
                             '--hello',
                             nargs='+',
                             action=IterateAction)
        command.add_argument('something', '--something')
        cmd_table = {'test command': command}

        argv = 'az test command --hello world sir --something else'.split()
        config = Configuration(argv)
        config.get_command_table = lambda: cmd_table
        application = Application(config)
        application.execute(argv[1:])

        self.assertEqual(2, len(hellos))
        self.assertEqual(hellos[0]['hello'], 'world')
        self.assertEqual(hellos[0]['something'], 'else')
        self.assertEqual(hellos[1]['hello'], 'sir')
        self.assertEqual(hellos[1]['something'], 'else')

    def test_expand_file_prefixed_files(self):
        f = tempfile.NamedTemporaryFile(delete=False)
        f.close()

        f_with_bom = tempfile.NamedTemporaryFile(delete=False)
        f_with_bom.close()

        with open(f.name, 'w+') as stream:
            stream.write('foo')

        from codecs import open as codecs_open
        with codecs_open(f_with_bom.name, encoding='utf-8-sig',
                         mode='w+') as stream:
            stream.write('foo')

        cases = [[['bar=baz'], ['bar=baz']], [['bar', 'baz'], ['bar', 'baz']],
                 [['bar=@{}'.format(f.name)], ['bar=foo']],
                 [['bar=@{}'.format(f_with_bom.name)], ['bar=foo']],
                 [['bar', '@{}'.format(f.name)], ['bar', 'foo']],
                 [['bar', f.name], ['bar', f.name]],
                 [['[email protected]'], ['[email protected]']],
                 [['bar', '*****@*****.**'], ['bar', '*****@*****.**']],
                 [['bar=mymongo=@connectionstring'],
                  ['bar=mymongo=@connectionstring']]]

        for test_case in cases:
            try:
                args = Application._expand_file_prefixed_files(test_case[0])  # pylint: disable=protected-access
                self.assertEqual(args, test_case[1],
                                 'Failed for: {}'.format(test_case[0]))
            except CLIError as ex:
                self.fail('Unexpected error for {} ({}): {}'.format(
                    test_case[0], args, ex))

        os.remove(f.name)