Example #1
0
    def load(self):
        self._common_kwargs = self._build_common_election_kwargs()
        self._common_kwargs['reporting_level'] = 'precinct'
        # Store result instances for bulk loading
        results = []

        with self._file_handle as csvfile:
            reader = unicodecsv.DictReader(csvfile)
            next(reader, None)
            for row in reader:
                if self._skip_row(row):
                    continue
                if row['votes'] == 'X':
                    continue
                rr_kwargs = self._common_kwargs.copy()
                rr_kwargs['primary_party'] = row['party'].strip()
                rr_kwargs.update(self._build_contest_kwargs(row))
                rr_kwargs.update(self._build_candidate_kwargs(row))
                jurisdiction = row['precinct'].strip()
                county_ocd_id = [c for c in self.datasource._jurisdictions() if c['county'].strip().upper() == row['county'].strip().upper()][0]['ocd_id']
                rr_kwargs.update({
                    'party': row['party'].strip(),
                    'jurisdiction': jurisdiction,
                    'parent_jurisdiction': row['county'],
                    'ocd_id': "{}/precinct:{}".format(county_ocd_id, ocd_type_id(jurisdiction)),
                    'office': row['office'].strip(),
                    'district': row['district'].strip(),
                    'votes': int(float(row['votes']))
                })
                results.append(RawResult(**rr_kwargs))
        RawResult.objects.insert(results)
    def write_data(self, file, outfile):
        """

        :param file:
        :param outfile:
        :return:
        """
        columns = self.get_column_names()

        # Ensure removal of output file
        try:
            os.remove(outfile)
        except OSError:
            pass

        csvwriter.open(outfile)
        csvwriter.write(columns)

        global_data = OrderedDict([(col, '') for col in columns])

        csvgen = csvreader(file)

        # Skip first two lines
        try:
            headers = next(csvgen)
            # Fix file header
            headers[0] = 'V1'
            next(csvgen)
        except:
            pass
        for row in csvgen:
            subject_data = OrderedDict(list(zip(headers, row)))
            self._write_data(subject_data, global_data, columns, outfile)

        csvwriter.close()
Example #3
0
    def _process_callback(self, callback, args):
        """Processes a callback to a normal or a streaming function.

        In constrast with
        :meth:`sijax.response.BaseResponse._process_callback` which only
        processes normal requests properly, this can process both normal
        and streaming functions.

        Normal functions are the typical Sijax response functions,
        which don't flush content to the browser, but only push commands
        to the buffer list. Those commands are to be flushed by Sijax
        when the response function exits.

        Streaming functions are the typical Comet response functions,
        which are generators (they use yield to flush content).

        Basically this function can be seen as a converter from
        either a generator (streaming function) or a string (normal function)
        to a generator.
        """
        response = self._perform_handler_call(callback, args)
        if isinstance(response, GeneratorType):
            # Real streaming function using a generator to flush
            while True:
                # we don't really care what it yields..
                next(response)
                if len(self._commands) != 0:
                    yield self._flush()
        else:
            # Normal (non-streaming) function
            # Let's flush implicitly for such functions
            if len(self._commands) != 0:
                yield self._flush()
Example #4
0
        def parse(token, next):
            s = token[2]
            if len(s) < 2 or not (s[0] == s[-1] == '"'):
                token[-1].error('No end quote on string', token)
            s = quoted_splitter(s[1:-1])
            result = parse_quoted_str(token, s[0])
            if len(s) > 1:
                result = [result]
                append = result.append
                s = iter(s)
                next = s.__next__
                next()
                for special in s:
                    nonmatch = next()
                    remap = quoted_mapper(special)
                    if remap is None:
                        if len(special) == 6:
                            uni = int(special[2:], 16)
                            if 0xd800 <= uni <= 0xdbff and allow_double:
                                uni, nonmatch = parse_double_unicode(uni, nonmatch, next, token)
                            remap = parse_encoded_chr(uni)
                        else:
                            return badstring(token, special)
                    append(remap)
                    append(parse_quoted_str(token, nonmatch))

                result = parse_join_str(result)
            if cachestrings:
                result = token[-1].stringcache(result, result)
            return result
Example #5
0
    def loadMovie(self, filename, log=None):
        """Load a movie from file

        :Parameters:

            filename: string
                The name of the file, including path if necessary

        Brings up a warning if avbin is not found on the computer.
        After the file is loaded MovieStim.duration is updated with the movie
        duration (in seconds).
        """
        try:
            self._movie = pyglet.media.load(filename, streaming=True)
        except Exception as e:
            # pyglet.media.riff is N/A if avbin is available, and then
            # actual exception would get masked with a new one for unknown
            # (sub)module riff, thus catching any exception and tuning msg
            # up if it has to do anything with avbin
            estr = str(e)
            msg = ''
            if "avbin" in estr.lower():
                msg = ("\nIt seems that avbin was not installed correctly."
                       "\nPlease fetch/install it from "
                       "http://code.google.com/p/avbin/.")
            emsg = "Caught exception '%s' while loading file '%s'.%s"
            raise IOError(emsg % (estr, filename, msg))
        self._player.queue(self._movie)
        self.duration = self._movie.duration
        while self._player.source != self._movie:
            next(self._player)
        self.status = NOT_STARTED
        self._player.pause()  # start 'playing' on the next draw command
        self.filename = filename
        logAttrib(self, log, 'movie', filename)
Example #6
0
 def execute(self, context):
     self.hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
     self.s3 = S3Hook(s3_conn_id=self.s3_conn_id)
     logging.info("Downloading S3 file")
     if self.wildcard_match:
         if not self.s3.check_for_wildcard_key(self.s3_key):
             raise AirflowException("No key matches {0}".format(self.s3_key))
         s3_key_object = self.s3.get_wildcard_key(self.s3_key)
     else:
         if not self.s3.check_for_key(self.s3_key):
             raise AirflowException(
                 "The key {0} does not exists".format(self.s3_key))
         s3_key_object = self.s3.get_key(self.s3_key)
     with NamedTemporaryFile("w") as f:
         logging.info("Dumping S3 key {0} contents to local"
                      " file {1}".format(s3_key_object.key, f.name))
         s3_key_object.get_contents_to_file(f)
         f.flush()
         self.s3.connection.close()
         if not self.headers:
             logging.info("Loading file into Hive")
             self.hive.load_file(
                 f.name,
                 self.hive_table,
                 field_dict=self.field_dict,
                 create=self.create,
                 partition=self.partition,
                 delimiter=self.delimiter,
                 recreate=self.recreate)
         else:
             with open(f.name, 'r') as tmpf:
                 if self.check_headers:
                     header_l = tmpf.readline()
                     header_line = header_l.rstrip()
                     header_list = header_line.split(self.delimiter)
                     field_names = list(self.field_dict.keys())
                     test_field_match = [h1.lower() == h2.lower() for h1, h2
                                         in zip(header_list, field_names)]
                     if not all(test_field_match):
                         logging.warning("Headers do not match field names"
                                         "File headers:\n {header_list}\n"
                                         "Field names: \n {field_names}\n"
                                         "".format(**locals()))
                         raise AirflowException("Headers do not match the "
                                         "field_dict keys")
                 with NamedTemporaryFile("w") as f_no_headers:
                     tmpf.seek(0)
                     next(tmpf)
                     for line in tmpf:
                         f_no_headers.write(line)
                     f_no_headers.flush()
                     logging.info("Loading file without headers into Hive")
                     self.hive.load_file(
                         f_no_headers.name,
                         self.hive_table,
                         field_dict=self.field_dict,
                         create=self.create,
                         partition=self.partition,
                         delimiter=self.delimiter,
                         recreate=self.recreate)
Example #7
0
 def read_json_dict(firsttok, next):
     result = new_object()
     append = result.append
     while 1:
         token = next()
         t0 = token[1]
         if t0  == '}':
             if result and disallow_trailing_commas:
                 error('Unexpected trailing comma', token)
             break
         key = json_value_dispatch(t0, bad_dict_key)(token, next)
         if disallow_nonstring_keys and not isinstance(key, basestring):
             error('Non-string key %s not supported' % repr(key), token)
         token = next()
         t0 = token[1]
         if t0 != ':':
             error('Expected ":" after dict key %s' % repr(key), token)
         token = next()
         t0 = token[1]
         value = json_value_dispatch(t0, bad_dict_value)(token, next)
         append([key, value])
         delim = next()
         t0 = delim[1]
         if t0 == ',':
             continue
         if t0 != '}':
             if t0 == '@':
                 error('Unterminated dict (no matching "}")', firsttok)
             error('Expected "," or "}"', delim)
         break
     return result.get_result(firsttok)
    def testOgrDriverList(self):
        # test with drivers in recommended order
        drivers = QgsVectorFileWriter.ogrDriverList(QgsVectorFileWriter.SortRecommended)
        self.assertEqual(drivers[0].longName, 'GeoPackage')
        self.assertEqual(drivers[0].driverName, 'GPKG')
        self.assertEqual(drivers[1].longName, 'ESRI Shapefile')
        self.assertEqual(drivers[1].driverName, 'ESRI Shapefile')
        self.assertTrue('ODS' in [f.driverName for f in drivers])

        # ensure that XLSX comes before SQLite, because we should sort on longName, not driverName!
        ms_xlsx_index = next(i for i, v in enumerate(drivers) if v.driverName == 'XLSX')
        sqlite_index = next(i for i, v in enumerate(drivers) if v.driverName == 'SQLite')
        self.assertLess(ms_xlsx_index, sqlite_index)

        self.assertIn('[XLSX]', drivers[ms_xlsx_index].longName)

        # alphabetical sorting
        drivers2 = QgsVectorFileWriter.ogrDriverList(QgsVectorFileWriter.VectorFormatOptions())
        self.assertTrue(drivers2[0].longName < drivers2[1].longName)
        self.assertCountEqual([d.driverName for d in drivers], [d.driverName for d in drivers2])
        self.assertNotEqual(drivers2[0].driverName, 'GPKG')

        # skip non-spatial
        formats = QgsVectorFileWriter.ogrDriverList(QgsVectorFileWriter.SkipNonSpatialFormats)
        self.assertFalse('ODS' in [f.driverName for f in formats])
Example #9
0
 def _visit(self, node):
     iterator = self._get_iterator(node)
     n = next(iterator)
     while n:
         result = n.accept(self)
         n = next(iterator)
     return result
Example #10
0
    def __init__(self, lexer, parser, parent):
        Token.__init__(self, self.__class__.__name__, lexer, parser, parent)

        # Create a grammar depending on the delimiting character.
        tok_type, delimiter = lexer.token()
        escaped_delimiter = '\\' + delimiter
        data = r'[^\r\n\\' + escaped_delimiter + ']+'
        delimiter_re = re.compile(escaped_delimiter)
        data_re = re.compile(data)
        grammar_with_delim = grammar_c[:]
        grammar_with_delim.append(('string_data',      data_re))
        grammar_with_delim.append(('string_delimiter', delimiter_re))
        lexer.set_grammar(grammar_with_delim)

        # Begin parsing the string.
        lexer.expect(self, 'string_delimiter')
        self.string = ''
        while 1:
            if lexer.current_is('string_data'):
                self.string += lexer.token()[1]
                next(lexer)
            elif lexer.current_is('escaped_data'):
                self.string += self._escape(lexer.token()[1])
                next(lexer)
            elif lexer.next_if('string_delimiter'):
                break
            else:
                ttype = lexer.token()[0]
                lexer.syntax_error('Expected string but got %s' % ttype, self)

        # Make sure that any variables specified in the command are declared.
        string_re.sub(self.variable_test_cb, self.string)
        self.mark_end()
        lexer.restore_grammar()
Example #11
0
 def exhaust_generator(gen):
     self.assertTrue(isinstance(gen, GeneratorType))
     try:
         while True:
             next(gen)
     except StopIteration:
         pass
Example #12
0
 def getTrianglesCoordinates(self):
     """
     A method to retrieve triplet of coordinates representing the triangles
     in lon,lat,height.
     """
     triangles = []
     self._computeVerticesCoordinates()
     indices = iter(self.indices)
     for i in xrange(0, len(self.indices) - 1, 3):
         vi1 = next(indices)
         vi2 = next(indices)
         vi3 = next(indices)
         triangle = (
             (self._longs[vi1],
              self._lats[vi1],
              self._heights[vi1]),
             (self._longs[vi2],
              self._lats[vi2],
              self._heights[vi2]),
             (self._longs[vi3],
              self._lats[vi3],
              self._heights[vi3])
         )
         triangles.append(triangle)
     if len(list(indices)) > 0:
         raise Exception('Corrupted tile')
     return triangles
def combineCSVs(dataFile, featureFileList):
    import csv
    returnDictionary = {}
    with open(dataFile, "r") as dataListFile:
        dataListReader = csv.reader(dataListFile, delimiter=",", skipinitialspace=True)
        dataListHeader = next(dataListReader)
        for session in dataListReader:
            sessionWithHeader = list(zip(dataListHeader, session))

            sessionDict = {}
            for (name, value) in sessionWithHeader:
                sessionDict[name] = value.strip()
            returnDictionary[sessionDict['sessionID']] = sessionDict
            returnDictionary[sessionDict['sessionID']]['featureImageDict'] = {}  # initialize

    import ast
    for ft in list(featureFileList.keys()):
        with open(featureFileList[ft], "r") as featureListFile:
            featureListReader = csv.reader(featureListFile, delimiter=",", skipinitialspace=True)
            featureListHeader = next(featureListReader)
            for row in featureListReader:
                rowWithHeader = list(zip(featureListHeader, row))
                rowFeatureDict = {}
                for (name, value) in rowWithHeader:

                    rowFeatureDict[name] = value.strip()
                currSessionDict = returnDictionary[rowFeatureDict['sessionID']]
                print(rowFeatureDict)
                currSessionDict['featureImageDict'][ft] = ast.literal_eval(rowFeatureDict['featureImage'])[ft]
                returnDictionary[rowFeatureDict['sessionID']] = currSessionDict
    return returnDictionary
def __load_multipolygon(tokens, string):
    """
    Has similar inputs and return value to to :func:`__load_point`, except is
    for handling MULTIPOLYGON geometry.

    :returns:
        A GeoJSON `dict` MultiPolygon representation of the WKT ``string``.
    """
    open_paren = next(tokens)
    if not open_paren == '(':
        raise ValueError(INVALID_WKT_FMT % string)

    polygons = []
    while True:
        try:
            poly = __load_polygon(tokens, string)
            polygons.append(poly['coordinates'])
            t = next(tokens)
            if t == ')':
                # we're done; no more polygons.
                break
        except StopIteration:
            # If we reach this, the WKT is not valid.
            raise ValueError(INVALID_WKT_FMT % string)

    return dict(type='MultiPolygon', coordinates=polygons)
Example #15
0
 def ranges(self, start, end):
     while start >= self.end:
         self.start, self.end, self.value = next(self)
     yield start, min(self.end, end), self.value
     while end > self.end:
         self.start, self.end, self.value = next(self)
         yield self.start, min(self.end, end), self.value
Example #16
0
 def __iter__(self):
     res = super(Cursor, self).__iter__()
     while True:
         if self.callback:
             yield self.callback(next(res))
         else:
             yield next(res)
def __load_multilinestring(tokens, string):
    """
    Has similar inputs and return value to to :func:`__load_point`, except is
    for handling MULTILINESTRING geometry.

    :returns:
        A GeoJSON `dict` MultiLineString representation of the WKT ``string``.
    """
    open_paren = next(tokens)
    if not open_paren == '(':
        raise ValueError(INVALID_WKT_FMT % string)

    linestrs = []
    while True:
        try:
            linestr = __load_linestring(tokens, string)
            linestrs.append(linestr['coordinates'])
            t = next(tokens)
            if t == ')':
                # we're done; no more linestrings.
                break
        except StopIteration:
            # If we reach this, the WKT is not valid.
            raise ValueError(INVALID_WKT_FMT % string)

    return dict(type='MultiLineString', coordinates=linestrs)
Example #18
0
 def test_dimension_operators(self):
     result = un.dimensionless  # Arbitrary starting expression
     dim_iter = cycle(dimensions)
     val_iter = cycle(single_values)
     for op in self.ops:
         if op is pow:
             val = int(next(val_iter) * 10)
             # Scale the value close to 10 to avoid overflow errors
             if val != 0.0:
                 val = val / 10 ** round(np.log10(abs(val)))
             dim = np_dim = int(val)
         else:
             dim = next(dim_iter)
             np_dim = 10 ** self._np_array(dim)
         np_result = np.log10(op(10 ** self._np_array(result),
                                    np_dim))
         new_result = op(result, dim)
         op_str = ("{}({}, {})".format(op.__name__, result, dim))
         self.assertIsInstance(result, un.Dimension,
                               op_str + " did not return a Dimension")
         self.assertTrue(
             all(self._np_array(new_result) == np_result),
             "{} not equal between Dimension ({}) and numpy ({})"
             .format(op_str, self._np_array(new_result), np_result))
         result = new_result
Example #19
0
    def collision(self, ball):
        """ Determines if the ball hits this obstacle

    :param ball: An instance of :class:`BallModel`
    :type ball: :class:`BallModel`
        """
        self._double_collision = False

        if ball.position[0] - ball.radius > self.max_x:
            return False
        if ball.position[0] + ball.radius < self.min_x:
            return False
        if ball.position[1] - ball.radius > self.max_y:
            return False
        if ball.position[1] + ball.radius < self.min_y:
            return False

        a, b = tee(np.vstack([np.array(self.points), self.points[0]]))
        next(b, None)
        intercept_found = False
        for pt_pair in zip(a, b):
            if self._intercept_edge(pt_pair, ball):
                if intercept_found:
                    # Ball has hit a corner
                    self._intercept = self._select_edge(
                        pt_pair,
                        self._intercept,
                        ball)
                    self._double_collision = True
                else:
                    self._intercept = pt_pair
                    intercept_found = True

        return intercept_found
Example #20
0
def write_seg(dframe, sample_id=None, chrom_ids=None):
    """Format a dataframe or list of dataframes as SEG.

    To put multiple samples into one SEG table, pass `dframe` and `sample_id` as
    equal-length lists of data tables and sample IDs in matching order.
    """
    assert sample_id is not None
    if isinstance(dframe, pd.DataFrame):
        first = dframe
        first_sid = sample_id
        sids = dframes = None
    else:
        assert not isinstance(sample_id, basestring)
        dframes = iter(dframe)
        sids = iter(sample_id)
        first = next(dframes)
        first_sid = next(sids)

    if chrom_ids in (None, True):
        chrom_ids = create_chrom_ids(first)
    results = [format_seg(first, first_sid, chrom_ids)]
    if dframes is not None:
        # Unpack matching lists of data and sample IDs
        results.extend(
            format_seg(subframe, sid, chrom_ids)
            for subframe, sid in zip_longest(dframes, sids))
    return pd.concat(results)
Example #21
0
 def test_unit_unit_operators(self):
     result = un.unitless  # Arbitrary starting expression
     unit_iter = cycle(units)
     val_iter = cycle(single_values)
     for op in self.ops:
         if op is pow:
             val = int(next(val_iter) * 10)
             # Scale the value close to 10 to avoid overflow errors
             if val != 0:
                 val = val / 10 ** round(np.log10(abs(val)))
             unit = dim = power = val
         else:
             unit = next(unit_iter)
             dim = unit.dimension
             power = 10 ** unit.power
         dim_result = op(result.dimension, dim)
         new_result = op(result, unit)
         power_result = np.log10(op(float(10 ** result.power),
                                       float(power)))
         op_str = ("{}({}, {})".format(op.__name__, result, unit))
         self.assertIsInstance(result, un.Unit,
                               op_str + " did not return a Dimension")
         self.assertEqual(
             new_result.dimension, dim_result,
             "Dimension of {} not equal between Unit ({}) and explicit "
             "({})".format(op_str, new_result.dimension, dim_result))
         self.assertEqual(
             new_result.power, power_result,
             "Power of {} not equal between Unit ({}) and explicit ({})"
             .format(op_str, new_result.power, power_result))
         result = new_result
Example #22
0
def decode_item(next, token):
    if token == b"i":
        # integer: "i" value "e"
        data = int(next())
        if next() != b"e":
            raise ValueError
    elif token == b"s":
        # string: "s" value (virtual tokens)
        data = next()
        # Strings in torrent file are defined as utf-8 encoded
        try:
            data = data.decode('utf-8')
        except UnicodeDecodeError as e:
            # The pieces field is a byte string, and should be left as such.
            pass
    elif token == b"l" or token == b"d":
        # container: "l" (or "d") values "e"
        data = []
        tok = next()
        while tok != b"e":
            data.append(decode_item(next, tok))
            tok = next()
        if token == b"d":
            data = dict(list(zip(data[0::2], data[1::2])))
    else:
        raise ValueError
    return data
    def parse_xml_file(xml_file_path):
      try:
        root = ET.parse(xml_file_path).getroot()
        for testcase in root.iter('testcase'):
          test_info = {}

          try:
            test_info.update({'time': float(testcase.attrib.get('time'))})
          except (TypeError, ValueError):
            test_info.update({'time': None})

          for attribute in testcase_attributes:
            test_info[attribute] = testcase.attrib.get(attribute)

          result = SUCCESS
          if next(testcase.iter('error'), None) is not None:
            result = ERROR
          elif next(testcase.iter('failure'), None) is not None:
            result = FAILURE
          elif next(testcase.iter('skipped'), None) is not None:
            result = SKIPPED
          test_info.update({'result_code': result})

          tests_in_path.update({testcase.attrib.get('name', ''): test_info})

      except (ET.ParseError, ValueError) as e:
        error_handler(ParseError(xml_file_path, e))
Example #24
0
 def _get_javac_args_from_zinc_args(zinc_args):
   javac_args = []
   i = iter(zinc_args)
   arg = next(i, None)
   output_dir = None
   while arg is not None:
     arg = arg.strip()
     if arg in ['-d', '-cp', '-classpath']:
       # These are passed through from zinc to javac.
       javac_args.append(arg)
       javac_args.append(next(i))
       if arg == '-d':
         output_dir = javac_args[-1]
     elif arg.startswith('-C'):
       javac_args.append(arg[2:])
     elif arg.endswith('.java'):
       javac_args.append(arg)
     arg = next(i, None)
   # Strip output dir from classpaths.  If we don't then javac will read annotation definitions
   # from there instead of from the source files, which will cause the vnames to reflect the .class
   # file instead of the .java file.
   if output_dir:
     for i, a in enumerate(javac_args):
       if a in ['-cp', '-classpath']:
         javac_args[i + 1] = ':'.join([p for p in javac_args[i + 1].split(':') if p != output_dir])
   return javac_args
    def test_entities_iterators(self, _):
        # Given
        intent_entities = {
            "entity1": {
                "utterances": {
                    "entity 1": "entity 1",
                    "entity 11": "entity 11",
                    "entity 111": "entity 111",
                }
            },
            "entity2": {
                "utterances": {
                    "entity 2": "entity 2",
                    "entity 22": "entity 22",
                    "entity 222": "entity 222",
                }
            }
        }
        random_state = np.random.RandomState(1)

        # Then
        it_dict = get_entities_iterators(intent_entities, random_state)

        # When
        self.assertIn("entity1", it_dict)
        expected_seq = ["entity 1", "entity 11", "entity 111"]
        seq = [next(it_dict["entity1"]) for _ in range(len(expected_seq))]
        self.assertListEqual(expected_seq, sorted(seq))

        self.assertIn("entity2", it_dict)
        expected_seq = ["entity 2", "entity 22", "entity 222"]
        seq = [next(it_dict["entity2"]) for _ in range(len(expected_seq))]
        self.assertListEqual(expected_seq, sorted(seq))
Example #26
0
def pairwise(iterable):
    """s -> (s0,s1), (s1,s2), (s2,s3), ...

    copied from documentation
    """
    a, b = tee(iterable)
    next(b, None)
    return zip(a, b)
Example #27
0
 def testStart(self):
     self.testStop()
     self.assertEqual(len(self.pipeline), 1)
     item1 = object()
     self.pipeline.appendleft(item1)
     self.assertEqual(next(self.pipeline), None)
     self.pipeline.start()
     self.assertEqual(next(self.pipeline), item1)
Example #28
0
 def testNestedInsert(self):
     tg = QgsTransactionGroup()
     tg.addLayer(self.vl)
     self.vl.startEditing()
     it = self.vl.getFeatures()
     f = next(it)
     f['pk'] = NULL
     self.vl.addFeature(f)  # Should not deadlock during an active iteration
     f = next(it)
Example #29
0
def parse_fn_name(t):
    if next(t) == '(':
        # fn pointer name
        assert next(t) == '*'
        name=next(t)
        assert next(t) == ')'
    else:
        name=t.current()
    return name
Example #30
0
 def test_open_zip_returns_realpath_on_badzipfile(self):
   # In case of file corruption, deleting a Pants-constructed symlink would not resolve the error.
   with temporary_file() as not_zip:
     with temporary_dir() as tempdir:
       file_symlink = os.path.join(tempdir, 'foo')
       os.symlink(not_zip.name, file_symlink)
       self.assertEquals(os.path.realpath(file_symlink), os.path.realpath(not_zip.name))
       with self.assertRaisesRegexp(zipfile.BadZipfile, r'{}'.format(not_zip.name)):
         next(open_zip(file_symlink).gen)
Example #31
0
 def pop(self, last=True):
     if not self:
         raise KeyError('set is empty')
     key = next(reversed(self)) if last else next(iter(self))
     self.discard(key)
     return key
Example #32
0
def range_pair(field, cat, fq_filter, iterable, end, collection_facet):
    # e.g. counts":["0",17430,"1000",1949,"2000",671,"3000",404,"4000",243,"5000",165],"gap":1000,"start":0,"end":6000}
    pairs = []
    selected_values = [f['value'] for f in fq_filter]
    is_single_unit_gap = re.match(
        '^[\+\-]?1[A-Za-z]*$',
        str(collection_facet['properties']['gap'])) is not None
    is_up = collection_facet['properties']['sort'] == 'asc'

    if collection_facet['properties']['sort'] == 'asc' and (
            collection_facet['type'] == 'range-up'
            or collection_facet['properties'].get('type') == 'range-up'):
        prev = None
        n = []
        for e in iterable:
            if prev is not None:
                n.append(e)
                n.append(prev)
                prev = None
            else:
                prev = e
        iterable = n
        iterable.reverse()

    a, to = itertools.tee(iterable)
    next(to, None)
    counts = iterable[1::2]
    total_counts = counts.pop(
        0) if collection_facet['properties']['sort'] == 'asc' else 0
    isDate = collection_facet['properties']['isDate']

    for element in a:
        next(to, None)
        to_value = next(to, end)
        count = next(a)

        if collection_facet['properties']['sort'] == 'asc':
            from_value = to_value
            to_value = element
        else:
            from_value = element

        pairs.append({
            'field':
            field,
            'from':
            from_value if isDate else int(element),
            'value':
            count,
            'to':
            to_value if isDate else int(to_value),
            'selected':
            element in selected_values,
            'exclude':
            all([f['exclude'] for f in fq_filter if f['value'] == element]),
            'is_single_unit_gap':
            is_single_unit_gap,
            'total_counts':
            total_counts,
            'is_up':
            is_up
        })
        total_counts += counts.pop(0) if counts else 0

    if collection_facet['properties']['sort'] == 'asc' and collection_facet[
            'type'] != 'range-up' and collection_facet['properties'].get(
                'type') != 'range-up':
        pairs.reverse()

    return pairs
Example #33
0
 def has_with_pattern(self, re_pattern):
     """Return whether if there is an item with the given pattern."""
     return bool(next(self.get_by_pattern(re_pattern), None))
Example #34
0
 def __next__(self):
     return _mongo_item_to_task(next(self.cursor))
def main():
    """
    Main function to handle vcenter vm os and the mapping to a policy group
    """

    # Handling arguments
    args = get_args()
    clusters = []
    if args.clusters:
        clusters = args.clusters
    debug = args.debug
    log_file = None
    if args.logfile:
        log_file = args.logfile
    mapping_file = args.mapping_file
    nuage_enterprise = args.nuage_enterprise
    nuage_host = args.nuage_host
    nuage_port = args.nuage_port
    nuage_password = None
    if args.nuage_password:
        nuage_password = args.nuage_password
    nuage_username = args.nuage_username
    remove_policygroups = args.remove_policygroups
    nosslcheck = args.nosslcheck
    verbose = args.verbose
    vcenter_host = args.vcenter_host
    vcenter_https_port = args.vcenter_https_port
    vcenter_password = None
    if args.vcenter_password:
        vcenter_password = args.vcenter_password
    vcenter_username = args.vcenter_username

    # Logging settings
    if debug:
        log_level = logging.DEBUG
    elif verbose:
        log_level = logging.INFO
    else:
        log_level = logging.WARNING

    logging.basicConfig(filename=log_file,
                        format='%(asctime)s %(levelname)s %(message)s',
                        level=log_level)
    logger = logging.getLogger(__name__)

    # Getting user password for Nuage connection
    if nuage_password is None:
        logger.debug(
            'No command line Nuage password received, requesting Nuage password from user'
        )
        nuage_password = getpass.getpass(
            prompt='Enter password for Nuage host {0:s} for user {1:s}: '.
            format(nuage_host, nuage_username))

    # Getting user password for vCenter connection
    if vcenter_password is None:
        logger.debug(
            'No command line vCenter password received, requesting vCenter password from user'
        )
        vcenter_password = getpass.getpass(
            prompt='Enter password for vCenter host {0:s} for user {1:s}: '.
            format(vcenter_host, vcenter_username))

    try:
        vc = None
        nc = None

        # Connecting to Nuage
        try:
            logger.info(
                'Connecting to Nuage server {0:s}:{1:d} with username {2:s}'.
                format(nuage_host, nuage_port, nuage_username))
            nc = vsdk.NUVSDSession(username=nuage_username,
                                   password=nuage_password,
                                   enterprise=nuage_enterprise,
                                   api_url="https://{0:s}:{1:d}".format(
                                       nuage_host, nuage_port))
            nc.start()
        except IOError:
            pass

        if not nc or not nc.is_current_session():
            logger.error(
                'Could not connect to Nuage host {0:s} with user {1:s} and specified password'
                .format(nuage_host, nuage_username))
            return 1

        # Connecting to vCenter
        try:
            logger.info(
                'Connecting to vCenter server {0:s}:{1:d} with username {2:s}'.
                format(vcenter_host, vcenter_https_port, vcenter_username))
            if nosslcheck:
                vc = SmartConnectNoSSL(host=vcenter_host,
                                       user=vcenter_username,
                                       pwd=vcenter_password,
                                       port=int(vcenter_https_port))
            else:
                vc = SmartConnect(host=vcenter_host,
                                  user=vcenter_username,
                                  pwd=vcenter_password,
                                  port=int(vcenter_https_port))

        except IOError:
            pass

        if not vc:
            logger.error(
                'Could not connect to vCenter host {0:s} with user {1:s} and specified password'
                .format(vcenter_host, vcenter_username))
            return 1

        logger.debug('Registering vCenter disconnect at exit')
        atexit.register(Disconnect, vc)

        logger.info('Connected to both Nuage & vCenter servers')

    except vmodl.MethodFault as e:
        logger.critical('Caught vmodl fault: {0:s}'.format(e.msg))
        return 1

    # CSV Handling
    if not os.path.isfile(mapping_file):
        logger.critical(
            'Mapping file {0:s} does not exist, exiting'.format(mapping_file))
        return 1

    mapping_list = {}
    # CSV fields:
    # vCenter VM name regex, Policy group
    logger.debug('Parsing mapping file {0:s}'.format(mapping_file))

    with open(mapping_file, 'rb') as maplist:
        mapping_list_raw = csv.reader(maplist, delimiter=',', quotechar='"')
        for row in mapping_list_raw:
            logger.debug('Found CSV row: {0:s}'.format(','.join(row)))
            mapping_list[row[0]] = row[1]

    # Getting clusters in the current vCenter
    logger.debug(
        'Gathering all Clusters from the vCenter {0:s}'.format(vcenter_host))
    content = vc.content
    obj_view = content.viewManager.CreateContainerView(
        content.rootFolder, [vim.ClusterComputeResource], True)
    vc_cl_list = obj_view.view
    obj_view.Destroy()

    for vc_cl in vc_cl_list:
        if vc_cl.name not in clusters:
            continue

        # Getting VMs in the current vCenter Cluster
        logger.debug('Gathering all VMs from the vCenter Cluster {0:s}'.format(
            vc_cl.name))
        obj_view = content.viewManager.CreateContainerView(
            vc_cl, [vim.VirtualMachine], True)
        vc_vm_list = obj_view.view
        obj_view.Destroy()

        for vc_vm in vc_vm_list:
            # Verifying if VM matches a regex in the list
            logger.debug('Found VM {0:s}, checking'.format(vc_vm.name))

            # If the VM is a template skip it
            if vc_vm.config.template:
                logger.debug('VM {0:s} is a template, skipping'.format(
                    vc_vm.name))
                continue

            # Getting VM info
            nc_vm_properties = {}
            vc_vm_nuage_enterprise = next((x for x in vc_vm.config.extraConfig
                                           if x.key == 'nuage.enterprise'),
                                          None)
            vc_vm_nuage_domain = next((x for x in vc_vm.config.extraConfig
                                       if x.key == 'nuage.nic0.domain'), None)
            vc_vm_nuage_l2domain = next((x for x in vc_vm.config.extraConfig
                                         if x.key == 'nuage.nic0.l2domain'),
                                        None)
            vc_vm_nuage_zone = next((x for x in vc_vm.config.extraConfig
                                     if x.key == 'nuage.nic0.zone'), None)
            vc_vm_nuage_network = next((x for x in vc_vm.config.extraConfig
                                        if x.key == 'nuage.nic0.network'),
                                       None)

            # Check if all the settings for an L3 domain are present
            if vc_vm_nuage_enterprise is None or vc_vm_nuage_domain is None or vc_vm_nuage_zone is None or vc_vm_nuage_network is None:
                # Check if it is an L2 domain
                if vc_vm_nuage_enterprise is None or vc_vm_nuage_l2domain is None:
                    logger.info(
                        'VM {0:s} has no correct Nuage metadata set, assuming it is not a VM connected through Nuage and skipping it.'
                        .format(vc_vm.name))
                    continue

            nc_vm_properties['name'] = vc_vm.name
            nc_vm_properties['os'] = vc_vm.config.guestFullName
            nc_vm_properties['nuage.enterprise'] = vc_vm_nuage_enterprise.value
            # If domain is not set, it is an l2 domain
            if vc_vm_nuage_domain is not None:
                nc_vm_properties['nuage.domain'] = vc_vm_nuage_domain.value
                nc_vm_properties['nuage.l2domain'] = None
                nc_vm_domain_name = vc_vm_nuage_domain.value
            else:
                nc_vm_properties['nuage.domain'] = None
                nc_vm_properties['nuage.l2domain'] = vc_vm_nuage_l2domain.value
                nc_vm_domain_name = vc_vm_nuage_l2domain.value
            if vc_vm_nuage_zone is not None:
                nc_vm_properties['nuage.zone'] = vc_vm_nuage_zone.value
            else:
                nc_vm_properties['nuage.zone'] = None
            if vc_vm_nuage_network is not None:
                nc_vm_properties['nuage.network'] = vc_vm_nuage_network.value
            else:
                nc_vm_properties['nuage.network'] = None

            logger.debug(
                'VM {0:s} with OS {1:s} has following Nuage settings: Enterprise {2:s}, Domain {3:s}, Zone {4:s}, Subnet {5:s}'
                .format(nc_vm_properties['name'], nc_vm_properties['os'],
                        nc_vm_properties['nuage.enterprise'],
                        nc_vm_domain_name, nc_vm_properties['nuage.zone'],
                        nc_vm_properties['nuage.network']))

            # Getting VM MAC
            vc_vm_nic = next(
                (x for x in vc_vm.config.hardware.device
                 if isinstance(x, vim.vm.device.VirtualEthernetCard)), None)
            if vc_vm_nic is None:
                logger.error(
                    'VM {0:s} has no valid network interfaces, skipping it'.
                    format(nc_vm_properties['name']))
                continue

            nc_vm_properties['mac'] = vc_vm_nic.macAddress
            logger.debug('VM {0:s} has MAC {1:s}'.format(
                nc_vm_properties['name'], nc_vm_properties['mac']))

            # Getting Nuage vport for this VM
            nc_vm_properties['vm_interface'] = nc.user.vm_interfaces.get_first(
                filter="MAC == '{0:s}'".format(nc_vm_properties['mac']))
            if nc_vm_properties['vm_interface'] is None:
                logger.error(
                    'VM {0:s} with MAC address {1:s} is not known in Nuage, skipping it'
                    .format(nc_vm_properties['name'], nc_vm_properties['mac']))
                continue

            # Getting Nuage vport for this VM
            nc_vm_properties['vport'] = vsdk.NUVPort(
                id=nc_vm_properties['vm_interface'].vport_id)
            try:
                nc_vm_properties['vport'].fetch()
            except BambouHTTPError:
                logger.error(
                    'VM {0:s} with MAC address {1:s} has a vm_interface but no vport in Nuage, this should not be possible... Skipping it'
                    .format(nc_vm_properties['name'], nc_vm_properties['mac']))
                continue

            logger.debug(
                'Found vm_interface and vport for VM {0:s} with MAC address {1:s}'
                .format(nc_vm_properties['name'], nc_vm_properties['mac']))

            # Checking regex's on VMs
            nc_vm_pgs = []
            for regex in list(mapping_list.keys()):
                logger.debug(
                    'Checking regex "{0:s}" on VM {1:s} with OS {2:s}'.format(
                        regex, nc_vm_properties['name'],
                        nc_vm_properties['os']))
                pattern = re.compile(regex)
                if pattern.match(nc_vm_properties['os']):
                    logger.debug(
                        'Found match: regex "{0:s}" and VM OS "{1:s}", adding to the task list to hand over to Nuage.'
                        .format(regex, nc_vm_properties['os']))
                    nc_vm_pgs.append(mapping_list[regex])

            if len(nc_vm_pgs) > 0:
                logger.debug(
                    'Handing task over to Nuage part to set {0:d} Policy Groups on VM {1:s}'
                    .format(len(nc_vm_pgs), nc_vm_properties['name']))
                update_nuage_policy_group(
                    logger=logger,
                    nc=nc,
                    nc_vm_properties=nc_vm_properties,
                    nc_vm_pgs=nc_vm_pgs,
                    remove_policygroups=remove_policygroups)

    logger.info('All done!')
    return 0
Example #36
0
 def __next__(this):
     try:
         return next(this.generator)
     except StopIteration:
         return None
Example #37
0
def convert_to_beam_type(typ):
    """Convert a given typing type to a Beam type.

  Args:
    typ (type): typing type.

  Returns:
    type: The given type converted to a Beam type as far as we can do the
    conversion.

  Raises:
    ~exceptions.ValueError: The type was malformed.
  """
    if isinstance(typ, typing.TypeVar):
        # This is a special case, as it's not parameterized by types.
        # Also, identity must be preserved through conversion (i.e. the same
        # TypeVar instance must get converted into the same TypeVariable instance).
        # A global cache should be OK as the number of distinct type variables
        # is generally small.
        if id(typ) not in _type_var_cache:
            _type_var_cache[id(typ)] = typehints.TypeVariable(typ.__name__)
        return _type_var_cache[id(typ)]
    elif getattr(typ, '__module__', None) != 'typing':
        # Only tranlsate types from the typing module.
        return typ

    type_map = [
        _TypeMapEntry(match=_match_same_type(typing.Any),
                      arity=0,
                      beam_type=typehints.Any),
        _TypeMapEntry(match=_match_issubclass(typing.Dict),
                      arity=2,
                      beam_type=typehints.Dict),
        _TypeMapEntry(match=_match_is_exactly_iterable,
                      arity=1,
                      beam_type=typehints.Iterable),
        _TypeMapEntry(match=_match_issubclass(typing.List),
                      arity=1,
                      beam_type=typehints.List),
        _TypeMapEntry(match=_match_issubclass(typing.Set),
                      arity=1,
                      beam_type=typehints.Set),
        # NamedTuple is a subclass of Tuple, but it needs special handling.
        # We just convert it to Any for now.
        # This MUST appear before the entry for the normal Tuple.
        _TypeMapEntry(match=_match_is_named_tuple,
                      arity=0,
                      beam_type=typehints.Any),
        _TypeMapEntry(match=_match_issubclass(typing.Tuple),
                      arity=-1,
                      beam_type=typehints.Tuple),
        _TypeMapEntry(match=_match_is_union,
                      arity=-1,
                      beam_type=typehints.Union),
    ]

    # Find the first matching entry.
    matched_entry = next((entry for entry in type_map if entry.match(typ)),
                         None)
    if not matched_entry:
        # No match: return original type.
        return typ

    if matched_entry.arity == -1:
        arity = _len_arg(typ)
    else:
        arity = matched_entry.arity
        if _len_arg(typ) != arity:
            raise ValueError(
                'expecting type %s to have arity %d, had arity %d '
                'instead' % (str(typ), arity, _len_arg(typ)))
    typs = [convert_to_beam_type(_get_arg(typ, i)) for i in range(arity)]
    if arity == 0:
        # Nullary types (e.g. Any) don't accept empty tuples as arguments.
        return matched_entry.beam_type
    elif arity == 1:
        # Unary types (e.g. Set) don't accept 1-tuples as arguments
        return matched_entry.beam_type[typs[0]]
    else:
        return matched_entry.beam_type[tuple(typs)]
Example #38
0
    def processAlgorithm(self, feedback):
        radius = self.getParameterValue(self.DISTANCE)
        horizontal = self.getParameterValue(self.HORIZONTAL)
        output = self.getOutputFromName(self.OUTPUT_LAYER)

        layer = dataobjects.getObjectFromUri(
            self.getParameterValue(self.INPUT_LAYER))

        writer = output.getVectorWriter(layer.fields(), layer.wkbType(),
                                        layer.crs())

        features = vector.features(layer)

        total = 100.0 / len(features)

        duplicates = dict()
        for current, f in enumerate(features):
            wkt = f.geometry().exportToWkt()
            if wkt not in duplicates:
                duplicates[wkt] = [f.id()]
            else:
                duplicates[wkt].extend([f.id()])

            feedback.setProgress(int(current * total))

        current = 0
        total = 100.0 / len(duplicates)
        feedback.setProgress(0)

        fullPerimeter = 2 * math.pi

        for (geom, fids) in list(duplicates.items()):
            count = len(fids)
            if count == 1:
                f = next(
                    layer.getFeatures(QgsFeatureRequest().setFilterFid(
                        fids[0])))
                writer.addFeature(f)
            else:
                angleStep = fullPerimeter / count
                if count == 2 and horizontal:
                    currentAngle = math.pi / 2
                else:
                    currentAngle = 0

                old_point = QgsGeometry.fromWkt(geom).asPoint()

                request = QgsFeatureRequest().setFilterFids(fids).setFlags(
                    QgsFeatureRequest.NoGeometry)
                for f in layer.getFeatures(request):
                    sinusCurrentAngle = math.sin(currentAngle)
                    cosinusCurrentAngle = math.cos(currentAngle)
                    dx = radius * sinusCurrentAngle
                    dy = radius * cosinusCurrentAngle

                    new_point = QgsPoint(old_point.x() + dx,
                                         old_point.y() + dy)
                    out_feature = QgsFeature()
                    out_feature.setGeometry(QgsGeometry.fromPoint(new_point))
                    out_feature.setAttributes(f.attributes())

                    writer.addFeature(out_feature)
                    currentAngle += angleStep

            current += 1
            feedback.setProgress(int(current * total))

        del writer
Example #39
0
    def processAlgorithm(self, parameters, context, feedback):
        layerPoints = QgsProcessingUtils.mapLayerFromString(
            self.getParameterValue(self.POINTS), context)
        layerHubs = QgsProcessingUtils.mapLayerFromString(
            self.getParameterValue(self.HUBS), context)
        fieldName = self.getParameterValue(self.FIELD)

        units = self.UNITS[self.getParameterValue(self.UNIT)]

        if layerPoints.source() == layerHubs.source():
            raise GeoAlgorithmExecutionException(
                self.tr('Same layer given for both hubs and spokes'))

        fields = layerPoints.fields()
        fields.append(QgsField('HubName', QVariant.String))
        fields.append(QgsField('HubDist', QVariant.Double))

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, QgsWkbTypes.LineString, layerPoints.crs(), context)

        index = QgsProcessingUtils.createSpatialIndex(layerHubs, context)

        distance = QgsDistanceArea()
        distance.setSourceCrs(layerPoints.crs())
        distance.setEllipsoid(QgsProject.instance().ellipsoid())

        # Scan source points, find nearest hub, and write to output file
        features = QgsProcessingUtils.getFeatures(layerPoints, context)
        total = 100.0 / layerPoints.featureCount() if layerPoints.featureCount(
        ) else 0
        for current, f in enumerate(features):
            src = f.geometry().boundingBox().center()

            neighbors = index.nearestNeighbor(src, 1)
            ft = next(
                layerHubs.getFeatures(QgsFeatureRequest().setFilterFid(
                    neighbors[0]).setSubsetOfAttributes([fieldName],
                                                        layerHubs.fields())))
            closest = ft.geometry().boundingBox().center()
            hubDist = distance.measureLine(src, closest)

            attributes = f.attributes()
            attributes.append(ft[fieldName])
            if units == 'Feet':
                attributes.append(hubDist * 3.2808399)
            elif units == 'Miles':
                attributes.append(hubDist * 0.000621371192)
            elif units == 'Kilometers':
                attributes.append(hubDist / 1000.0)
            elif units != 'Meters':
                attributes.append(
                    sqrt(
                        pow(src.x() - closest.x(), 2.0) +
                        pow(src.y() - closest.y(), 2.0)))
            else:
                attributes.append(hubDist)

            feat = QgsFeature()
            feat.setAttributes(attributes)

            feat.setGeometry(QgsGeometry.fromPolyline([src, closest]))

            writer.addFeature(feat, QgsFeatureSink.FastInsert)
            feedback.setProgress(int(current * total))

        del writer
Example #40
0
    def testDateTimeWriteShapefile(self):
        """Check writing date and time fields to an ESRI shapefile."""
        ml = QgsVectorLayer(
            ('Point?crs=epsg:4326&field=id:int&'
             'field=date_f:date&field=time_f:time&field=dt_f:datetime'),
            'test', 'memory')

        self.assertTrue(ml.isValid())
        provider = ml.dataProvider()
        self.assertIsNotNone(provider)

        ft = QgsFeature()
        ft.setGeometry(QgsGeometry.fromPointXY(QgsPointXY(10, 10)))
        ft.setAttributes([
            1,
            QDate(2014, 3, 5),
            QTime(13, 45, 22),
            QDateTime(QDate(2014, 3, 5), QTime(13, 45, 22))
        ])
        res, features = provider.addFeatures([ft])
        self.assertTrue(res)
        self.assertTrue(features)

        dest_file_name = os.path.join(str(QDir.tempPath()), 'datetime.shp')
        crs = QgsCoordinateReferenceSystem()
        crs.createFromId(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
        write_result, error_message = QgsVectorFileWriter.writeAsVectorFormat(
            ml, dest_file_name, 'utf-8', crs, 'ESRI Shapefile')
        self.assertEqual(write_result, QgsVectorFileWriter.NoError,
                         error_message)

        # Open result and check
        created_layer = QgsVectorLayer('{}|layerid=0'.format(dest_file_name),
                                       'test', 'ogr')

        fields = created_layer.dataProvider().fields()
        self.assertEqual(
            fields.at(fields.indexFromName('date_f')).type(), QVariant.Date)
        # shapefiles do not support time types, result should be string
        self.assertEqual(
            fields.at(fields.indexFromName('time_f')).type(), QVariant.String)
        # shapefiles do not support datetime types, result should be string
        self.assertEqual(
            fields.at(fields.indexFromName('dt_f')).type(), QVariant.String)

        f = next(created_layer.getFeatures(QgsFeatureRequest()))

        date_idx = created_layer.fields().lookupField('date_f')
        self.assertIsInstance(f.attributes()[date_idx], QDate)
        self.assertEqual(f.attributes()[date_idx], QDate(2014, 3, 5))
        time_idx = created_layer.fields().lookupField('time_f')
        # shapefiles do not support time types
        self.assertIsInstance(f.attributes()[time_idx], str)
        self.assertEqual(f.attributes()[time_idx], '13:45:22')
        # shapefiles do not support datetime types
        datetime_idx = created_layer.fields().lookupField('dt_f')
        self.assertIsInstance(f.attributes()[datetime_idx], str)
        self.assertEqual(
            f.attributes()[datetime_idx],
            QDateTime(QDate(2014, 3, 5),
                      QTime(13, 45, 22)).toString("yyyy/MM/dd hh:mm:ss.zzz"))
Example #41
0
    def executeCmd(self, cmdVar):
        """Execute a command (of type opscore.actor.keyvar.CmdVar).
        
        Performs the following tasks:
        - Sets the command ID number
        - Sets the start time
        - Puts the command on the keyword dispatcher queue
        - Sends the command to the server

        Inputs:
        - cmdVar: the command, of class opscore.actor.keyvar.CmdVar
            
        Note:
        - Always increments cmdID because every command must have a unique command ID
          (even commands that go to different actors); this simplifies the
          dispatcher code and also makes the hub's life easier
          (since it can report certain kinds of failures using actor=hub).
        """
        if not self._isConnected:
            errReply = self.makeReply(
                dataStr="Failed; Actor=%r; Cmd=%r; Text=\"not connected\"" %
                (cmdVar.actor, cmdVar.cmdStr), )
            self._replyToCmdVar(cmdVar, errReply)
            return

        while True:
            if cmdVar.isRefresh:
                cmdID = next(self.refreshCmdIDGen)
            else:
                cmdID = next(self.userCmdIDGen)
            if cmdID not in self.cmdDict:
                break
        self.cmdDict[cmdID] = cmdVar
        cmdVar._setStartInfo(self, cmdID)

        try:
            if self.includeName:
                # internal actor; must specify the commander
                if cmdVar.forUserCmd:
                    cmdrStr = "%s.%s " % (cmdVar.forUserCmd.cmdr,
                                          self.connection.cmdr)
                else:
                    cmdrStr = "%s.%s " % (self.connection.cmdr,
                                          self.connection.cmdr)
            else:
                # external actor; do not specify the commander
                cmdrStr = ""
            fullCmdStr = "%s%d %s %s" % (cmdrStr, cmdVar.cmdID, cmdVar.actor,
                                         cmdVar.cmdStr)
            self.connection.writeLine(fullCmdStr)
#             self.logMsg (
#                 msgStr = fullCmdStr,
#                 actor = cmdVar.actor,
#                 cmdID = cmdVar.cmdID,
#             )
# print >> sys.stderr, "executing:", fullCmdStr
        except Exception as e:
            errReply = self.makeReply(
                cmdID=cmdVar.cmdID,
                dataStr="WriteFailed; Actor=%r; Cmd=%r; Text=%r" %
                (cmdVar.actor, cmdVar.cmdStr,
                 RO.StringUtil.strFromException(e)),
            )
            self._replyToCmdVar(cmdVar, errReply)
Example #42
0
def _frame_alignment_base(dataset,
                          max_displacement=None,
                          method='correlation',
                          n_processes=1,
                          **method_kwargs):
    """Estimate whole-frame displacements based on pixel correlations.

    Parameters
    ----------
    max_displacement : array
        see estimate_displacements

    Returns
    -------
    shifts : array
        (2, num_frames*num_cycles)-array of integers giving the
        estimated displacement of each frame
    correlations : array
        (num_frames*num_cycles)-array giving the correlation of
        each shifted frame with the reference
    n_processes : int, optional
        Number of pool processes to spawn to parallelize frame alignment.
        Defaults to 1.

    """

    if n_processes < 1:
        raise ValueError('n_processes must be at least 1')

    global namespace
    global lock
    if n_processes > 1:
        namespace = multiprocessing.Manager().Namespace()
    else:
        namespace = Struct()
    namespace.offset = np.zeros(3, dtype=int)
    namespace.pixel_counts = np.zeros(dataset.frame_shape)  # TODO: int?
    namespace.pixel_sums = np.zeros(dataset.frame_shape).astype('float64')
    # NOTE: float64 gives nan when divided by 0
    namespace.shifts = [
        np.zeros(seq.shape[:2] + (3, ), dtype=int) for seq in dataset
    ]
    namespace.correlations = [np.empty(seq.shape[:2]) for seq in dataset]
    namespace.min_shift = np.zeros(3)
    namespace.max_shift = np.zeros(3)

    lock = multiprocessing.Lock()
    if n_processes > 1:
        pool = multiprocessing.Pool(processes=n_processes, maxtasksperchild=1)

    for cycle_idx, cycle in zip(it.count(), dataset):

        #CZ added
        #tmp = zip(it.count(), cycle, it.repeat(cycle_idx),
        #            it.repeat(method), it.repeat(max_displacement))
        #print(list(tmp)[5])

        chunksize = min(1 + old_div(len(cycle), n_processes), 200)
        if n_processes > 1:
            map_generator = pool.imap_unordered(
                _align_frame,
                zip(it.count(), cycle, it.repeat(cycle_idx), it.repeat(method),
                    it.repeat(max_displacement), it.repeat(method_kwargs)),
                chunksize=chunksize)
        else:
            map_generator = map(
                _align_frame,
                zip(it.count(), cycle, it.repeat(cycle_idx), it.repeat(method),
                    it.repeat(max_displacement), it.repeat(method_kwargs)))

        # Loop over generator and calculate frame alignments
        while True:
            try:
                next(map_generator)
            except StopIteration:
                break

    if n_processes > 1:
        pool.close()
        pool.join()

    def _align_planes(shifts):
        """Align planes to minimize shifts between them."""
        mean_shift = nanmean(np.concatenate(shifts), axis=0)
        # calculate alteration of shape (num_planes, dim)
        alteration = (mean_shift - mean_shift[0]).astype(int)
        for seq in shifts:
            seq -= alteration

    shifts = [s[..., 1:] for s in namespace.shifts]
    correlations = namespace.correlations

    del namespace.pixel_counts
    del namespace.pixel_sums
    del namespace.shifts
    del namespace.correlations

    _align_planes(shifts)
    return shifts, correlations
Example #43
0
    def processAlgorithm(self, feedback):
        layer = dataobjects.getObjectFromUri(
            self.getParameterValue(self.VECTOR))
        pointCount = float(self.getParameterValue(self.POINT_NUMBER))
        minDistance = float(self.getParameterValue(self.MIN_DISTANCE))

        fields = QgsFields()
        fields.append(QgsField('id', QVariant.Int, '', 10, 0))
        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, QgsWkbTypes.Point, layer.crs())

        nPoints = 0
        nIterations = 0
        maxIterations = pointCount * 200
        featureCount = layer.featureCount()
        total = 100.0 / pointCount

        index = QgsSpatialIndex()
        points = dict()

        da = QgsDistanceArea()
        request = QgsFeatureRequest()

        random.seed()

        while nIterations < maxIterations and nPoints < pointCount:
            # pick random feature
            fid = random.randint(0, featureCount - 1)
            f = next(
                layer.getFeatures(
                    request.setFilterFid(fid).setSubsetOfAttributes([])))
            fGeom = f.geometry()

            if fGeom.isMultipart():
                lines = fGeom.asMultiPolyline()
                # pick random line
                lineId = random.randint(0, len(lines) - 1)
                vertices = lines[lineId]
            else:
                vertices = fGeom.asPolyline()

            # pick random segment
            if len(vertices) == 2:
                vid = 0
            else:
                vid = random.randint(0, len(vertices) - 2)
            startPoint = vertices[vid]
            endPoint = vertices[vid + 1]
            length = da.measureLine(startPoint, endPoint)
            dist = length * random.random()

            if dist > minDistance:
                d = dist / (length - dist)
                rx = (startPoint.x() + d * endPoint.x()) / (1 + d)
                ry = (startPoint.y() + d * endPoint.y()) / (1 + d)

                # generate random point
                pnt = QgsPoint(rx, ry)
                geom = QgsGeometry.fromPoint(pnt)
                if vector.checkMinDistance(pnt, index, minDistance, points):
                    f = QgsFeature(nPoints)
                    f.initAttributes(1)
                    f.setFields(fields)
                    f.setAttribute('id', nPoints)
                    f.setGeometry(geom)
                    writer.addFeature(f)
                    index.insertFeature(f)
                    points[nPoints] = pnt
                    nPoints += 1
                    feedback.setProgress(int(nPoints * total))
            nIterations += 1

        if nPoints < pointCount:
            ProcessingLog.addToLog(
                ProcessingLog.LOG_INFO,
                self.tr('Can not generate requested number of random points. '
                        'Maximum number of attempts exceeded.'))

        del writer
Example #44
0
    def testWriteShapefileWithZ(self):
        """Check writing geometries with Z dimension to an ESRI shapefile."""

        # start by saving a memory layer and forcing z
        ml = QgsVectorLayer(('Point?crs=epsg:4326&field=id:int'), 'test',
                            'memory')

        self.assertIsNotNone(ml, 'Provider not initialized')
        self.assertTrue(ml.isValid(), 'Source layer not valid')
        provider = ml.dataProvider()
        self.assertIsNotNone(provider)

        ft = QgsFeature()
        ft.setGeometry(QgsGeometry.fromWkt('PointZ (1 2 3)'))
        ft.setAttributes([1])
        res, features = provider.addFeatures([ft])
        self.assertTrue(res)
        self.assertTrue(features)

        # check with both a standard PointZ and 25d style Point25D type
        for t in [QgsWkbTypes.PointZ, QgsWkbTypes.Point25D]:
            dest_file_name = os.path.join(
                str(QDir.tempPath()),
                'point_{}.shp'.format(QgsWkbTypes.displayString(t)))
            crs = QgsCoordinateReferenceSystem()
            crs.createFromId(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
            write_result, error_message = QgsVectorFileWriter.writeAsVectorFormat(
                ml,
                dest_file_name,
                'utf-8',
                crs,
                'ESRI Shapefile',
                overrideGeometryType=t)
            self.assertEqual(write_result, QgsVectorFileWriter.NoError,
                             error_message)

            # Open result and check
            created_layer = QgsVectorLayer(
                '{}|layerid=0'.format(dest_file_name), 'test', 'ogr')
            f = next(created_layer.getFeatures(QgsFeatureRequest()))
            g = f.geometry()
            wkt = g.asWkt()
            expWkt = 'PointZ (1 2 3)'
            self.assertTrue(
                compareWkt(expWkt, wkt),
                "saving geometry with Z failed: mismatch Expected:\n%s\nGot:\n%s\n"
                % (expWkt, wkt))

            # also try saving out the shapefile version again, as an extra test
            # this tests that saving a layer with z WITHOUT explicitly telling the writer to keep z values,
            # will stay retain the z values
            dest_file_name = os.path.join(
                str(QDir.tempPath()),
                'point_{}_copy.shp'.format(QgsWkbTypes.displayString(t)))
            crs = QgsCoordinateReferenceSystem()
            crs.createFromId(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
            write_result, error_message = QgsVectorFileWriter.writeAsVectorFormat(
                created_layer, dest_file_name, 'utf-8', crs, 'ESRI Shapefile')
            self.assertEqual(write_result, QgsVectorFileWriter.NoError,
                             error_message)

            # Open result and check
            created_layer_from_shp = QgsVectorLayer(
                '{}|layerid=0'.format(dest_file_name), 'test', 'ogr')
            f = next(created_layer_from_shp.getFeatures(QgsFeatureRequest()))
            g = f.geometry()
            wkt = g.asWkt()
            self.assertTrue(
                compareWkt(expWkt, wkt),
                "saving geometry with Z failed: mismatch Expected:\n%s\nGot:\n%s\n"
                % (expWkt, wkt))
Example #45
0
def addFilesToWMBSInBulk(filesetId,
                         workflowName,
                         files,
                         isDBS=True,
                         conn=None,
                         transaction=None):
    """
    _addFilesToWMBSInBulk

    Do a bulk addition of files into WMBS. This is a speedup.

    Assumes files are full dao objects
    """
    if not files:
        # Nothing to do
        return 0

    daofactory = next(iter(files)).daofactory
    setParentage = daofactory(classname="Files.SetParentage")
    setFileRunLumi = daofactory(classname="Files.AddRunLumi")
    setFileLocation = daofactory(classname="Files.SetLocationForWorkQueue")
    setFileAddChecksum = daofactory(classname="Files.AddChecksumByLFN")
    addFileAction = daofactory(classname="Files.Add")
    addToFileset = daofactory(classname="Files.AddDupsToFileset")
    updateFileAction = daofactory(classname="Files.Update")

    # build up list of binds for all files then run in single transaction
    parentageBinds = []
    runLumiBinds = []
    fileCksumBinds = []
    fileLocations = []
    fileCreate = []
    fileLFNs = set()
    lfnsToCreate = set()
    lfnList = set()
    fileUpdate = []

    for wmbsFile in files:
        lfn = wmbsFile['lfn']
        lfnList.add(lfn)

        if wmbsFile.get('inFileset', True):
            fileLFNs.add(lfn)
        for parent in wmbsFile['parents']:
            parentageBinds.append({'child': lfn, 'parent': parent["lfn"]})

        selfChecksums = wmbsFile['checksums']
        if len(wmbsFile['runs']) > 0:
            runLumiBinds.append({'lfn': lfn, 'runs': wmbsFile['runs']})

        if len(wmbsFile['newlocations']) < 1:
            # Then we're in trouble
            msg = "File created in WMBS without locations!\n"
            msg += "File lfn: %s\n" % (lfn)
            logging.error(msg)
            raise RuntimeError(msg)

        for loc in wmbsFile['newlocations']:
            fileLocations.append({'lfn': lfn, 'location': loc})

        if wmbsFile.exists():
            # update events, size, first_event, merged
            fileUpdate.append([
                lfn, wmbsFile['size'], wmbsFile['events'], None,
                wmbsFile["first_event"], wmbsFile['merged']
            ])
            continue

        lfnsToCreate.add(lfn)

        if selfChecksums:
            # If we have checksums we have to create a bind
            # For each different checksum
            for entry in selfChecksums:
                fileCksumBinds.append({
                    'lfn': lfn,
                    'cksum': selfChecksums[entry],
                    'cktype': entry
                })

        fileCreate.append([
            lfn, wmbsFile['size'], wmbsFile['events'], None,
            wmbsFile["first_event"], wmbsFile['merged']
        ])

    if len(fileCreate) > 0:
        addFileAction.execute(files=fileCreate,
                              conn=conn,
                              transaction=transaction)
        setFileAddChecksum.execute(bulkList=fileCksumBinds,
                                   conn=conn,
                                   transaction=transaction)

    if len(fileUpdate) > 0:
        updateFileAction.execute(files=fileUpdate,
                                 conn=conn,
                                 transaction=transaction)

    if len(fileLocations) > 0:
        setFileLocation.execute(lfns=lfnList,
                                locations=fileLocations,
                                isDBS=isDBS,
                                conn=conn,
                                transaction=transaction)
    if len(runLumiBinds) > 0:
        setFileRunLumi.execute(file=runLumiBinds,
                               conn=conn,
                               transaction=transaction)

    if len(fileLFNs) > 0:
        addToFileset.execute(file=fileLFNs,
                             fileset=filesetId,
                             workflow=workflowName,
                             conn=conn,
                             transaction=transaction)

    if len(parentageBinds) > 0:
        setParentage.execute(binds=parentageBinds,
                             conn=conn,
                             transaction=transaction)

    return len(lfnsToCreate)
Example #46
0
def cli():

    logging.basicConfig(level=logging.DEBUG)

    # Control-T handling
    hidden = [True]  # Nonlocal
    key_bindings = KeyBindings()

    @key_bindings.add('c-t')
    def _(event):
        ' When Control-T has been pressed, toggle visibility. '
        hidden[0] = not hidden[0]

    def prompt_pass():
        print('Type Control-T to toggle password visible.')
        password = prompt('Password/Key: ',
                          is_password=Condition(lambda: hidden[0]),
                          key_bindings=key_bindings)
        return password

    def prompt_key():
        print('Type Control-T to toggle key visible.')
        key = prompt('Key: ',
                     is_password=Condition(lambda: hidden[0]),
                     key_bindings=key_bindings)
        return key

    def prompt_pin():
        print('Press any key when finished entering PIN')
        return

    if len(sys.argv) > 1:

        if sys.argv[1] == 'settime':
            only_key.set_time(time.time())

        elif sys.argv[1] == 'init':
            while 1:
                if only_key.read_string(timeout_ms=500) != 'UNINITIALIZED':
                    break

            for msg in [Message.OKSETPIN]:
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()
                input('Press the Enter key once you are done')
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()
                input('Press the Enter key once you are done')
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()

            for msg in [Message.OKSETPDPIN]:
                only_key.send_message(msg=msg)
                print(only_key.read_string() + ' for second profile')
                print()
                input('Press the Enter key once you are done')
                only_key.send_message(msg=msg)
                print(only_key.read_string() + ' for second profile')
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()
                input('Press the Enter key once you are done')
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()

            for msg in [Message.OKSETSDPIN]:
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()
                input('Press the Enter key once you are done')
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()
                input('Press the Enter key once you are done')
                only_key.send_message(msg=msg)
                print(only_key.read_string())
                print()

        elif sys.argv[1] == 'getlabels':
            tmp = {}
            for slot in only_key.getlabels():
                tmp[slot.name] = slot
            slots = iter([
                '1a', '1b', '2a', '2b', '3a', '3b', '4a', '4b', '5a', '5b',
                '6a', '6b'
            ])
            for slot_name in slots:
                print(tmp[slot_name].to_str().replace('ÿ', " "))
                print(tmp[next(slots)].to_str().replace('ÿ', " "))
                print()

        elif sys.argv[1] == 'getkeylabels':
            tmp = {}
            for slot in only_key.getkeylabels():
                tmp[slot.name] = slot
            slots = iter([
                'RSA Key 1', 'RSA Key 2', 'RSA Key 3', 'RSA Key 4',
                'ECC Key 1', 'ECC Key 2', 'ECC Key 3', 'ECC Key 4',
                'ECC Key 5', 'ECC Key 6', 'ECC Key 7', 'ECC Key 8',
                'ECC Key 9', 'ECC Key 10', 'ECC Key 11', 'ECC Key 12',
                'ECC Key 13', 'ECC Key 14', 'ECC Key 15', 'ECC Key 16'
            ])
            for slot_name in slots:
                print(tmp[slot_name].to_str().replace('ÿ', " "))

        elif sys.argv[1] == 'setslot':
            try:
                if sys.argv[2] == '1a':
                    slot_id = 1
                elif sys.argv[2] == '2a':
                    slot_id = 2
                elif sys.argv[2] == '3a':
                    slot_id = 3
                elif sys.argv[2] == '4a':
                    slot_id = 4
                elif sys.argv[2] == '5a':
                    slot_id = 5
                elif sys.argv[2] == '6a':
                    slot_id = 6
                elif sys.argv[2] == '1b':
                    slot_id = 7
                elif sys.argv[2] == '2b':
                    slot_id = 8
                elif sys.argv[2] == '3b':
                    slot_id = 9
                elif sys.argv[2] == '4b':
                    slot_id = 10
                elif sys.argv[2] == '5b':
                    slot_id = 11
                elif sys.argv[2] == '6b':
                    slot_id = 12
                elif sys.argv[2] >= int('25'):
                    slot_id = int(sys.argv[2])
            except:
                print("setslot <id> <type> [value]")
                print("<id> must be slot number 1a - 6b")
                return

            if sys.argv[3] == 'label':
                only_key.setslot(slot_id, MessageField.LABEL, sys.argv[4])
            elif sys.argv[3] == 'ecc_key_label':
                only_key.setslot(slot_id + 28, MessageField.LABEL, sys.argv[4])
            elif sys.argv[3] == 'rsa_key_label':
                only_key.setslot(slot_id + 24, MessageField.LABEL, sys.argv[4])
            elif sys.argv[3] == 'url':
                only_key.setslot(slot_id, MessageField.URL, sys.argv[4])
            elif sys.argv[3] == 'add_char2':
                only_key.setslot(slot_id, MessageField.NEXTKEY1, sys.argv[4])
            elif sys.argv[3] == 'delay1':
                only_key.setslot(slot_id, MessageField.DELAY1, sys.argv[4])
            elif sys.argv[3] == 'username':
                only_key.setslot(slot_id, MessageField.USERNAME, sys.argv[4])
            elif sys.argv[3] == 'add_char3':
                only_key.setslot(slot_id, MessageField.NEXTKEY2, sys.argv[4])
            elif sys.argv[3] == 'delay2':
                only_key.setslot(slot_id, MessageField.DELAY2, sys.argv[4])
            elif sys.argv[3] == 'password':
                password = prompt_pass()
                only_key.setslot(slot_id, MessageField.PASSWORD, password)
            elif sys.argv[3] == 'add_char5':
                only_key.setslot(slot_id, MessageField.NEXTKEY3, sys.argv[4])
            elif sys.argv[3] == 'delay3':
                only_key.setslot(slot_id, MessageField.DELAY3, sys.argv[4])
            elif sys.argv[3] == '2fa':
                only_key.setslot(slot_id, MessageField.TFATYPE, sys.argv[4])
            elif sys.argv[3] == 'gkey':
                totpkey = prompt_key()
                totpkey = base64.b32decode("".join(totpkey.split()).upper())
                totpkey = binascii.hexlify(totpkey)
                # pad with zeros for even digits
                totpkey = totpkey.zfill(len(totpkey) + len(totpkey) % 2)
                payload = [
                    int(totpkey[i:i + 2], 16)
                    for i in range(0, len(totpkey), 2)
                ]
                only_key.setslot(slot_id, MessageField.TOTPKEY, payload)
            elif sys.argv[3] == 'totpkey':
                totpkey = prompt_key()
                only_key.setslot(slot_id, MessageField.TOTPKEY, totpkey)
            elif sys.argv[3] == 'add_char1':
                only_key.setslot(slot_id, MessageField.NEXTKEY4, sys.argv[4])
            elif sys.argv[3] == 'add_char4':
                only_key.setslot(slot_id, MessageField.NEXTKEY5, sys.argv[4])
            else:
                print("setslot <id> <type> [value]")
                print(
                    "<type> must be ['label', 'ecc_key_label', 'rsa_key_label', 'url', 'add_char1', 'delay1', 'username', 'add_char2', 'delay2', 'password', 'add_char3', 'delay3', '2fa', 'totpkey', 'add_char4', 'add_char5']"
                )
            return

        elif sys.argv[1] == 'wipeslot':
            try:
                if sys.argv[2] == '1a':
                    slot_id = 1
                elif sys.argv[2] == '2a':
                    slot_id = 2
                elif sys.argv[2] == '3a':
                    slot_id = 3
                elif sys.argv[2] == '4a':
                    slot_id = 4
                elif sys.argv[2] == '5a':
                    slot_id = 5
                elif sys.argv[2] == '6a':
                    slot_id = 6
                elif sys.argv[2] == '1b':
                    slot_id = 7
                elif sys.argv[2] == '2b':
                    slot_id = 8
                elif sys.argv[2] == '3b':
                    slot_id = 9
                elif sys.argv[2] == '4b':
                    slot_id = 10
                elif sys.argv[2] == '5b':
                    slot_id = 11
                elif sys.argv[2] == '6b':
                    slot_id = 12
                elif sys.argv[2] >= int('25'):
                    slot_id = int(sys.argv[2])
            except:
                print("wipeslot <id>")
                print("<id> must be slot number 1a - 6b")
                return

            only_key.wipeslot(slot_id)

        elif sys.argv[1] == 'backupkey':
            only_key.generate_backup_key()

        elif sys.argv[1] == 'setkey':
            only_key.setkey(sys.argv[2], sys.argv[3], sys.argv[4])

        elif sys.argv[1] == 'wipekey':
            only_key.wipekey(sys.argv[2])

        elif sys.argv[1] == 'idletimeout':
            only_key.setslot(1, MessageField.IDLETIMEOUT, int(sys.argv[2]))
        elif sys.argv[1] == 'wipemode':
            only_key.setslot(1, MessageField.WIPEMODE, int(sys.argv[2]))
        elif sys.argv[1] == 'keytypespeed':
            only_key.setslot(1, MessageField.KEYTYPESPEED, int(sys.argv[2]))
        elif sys.argv[1] == 'led_brightness':
            only_key.setslot(1, MessageField.LEDBRIGHTNESS, int(sys.argv[2]))
        elif sys.argv[1] == '2nd_profile_mode':
            only_key.setslot(1, MessageField.SECPROFILEMODE, int(sys.argv[2]))
        elif sys.argv[1] == 'pgp_challenge_mode':
            only_key.setslot(1, MessageField.PGPCHALENGEMODE, int(sys.argv[2]))
        elif sys.argv[1] == 'ssh_challenge_mode':
            only_key.setslot(1, MessageField.SSHCHALENGEMODE, int(sys.argv[2]))
        elif sys.argv[1] == 'backup_key_mode':
            only_key.setslot(1, MessageField.BACKUPMODE, int(sys.argv[2]))
        elif sys.argv[1] == 'keylayout':
            only_key.setslot(1, MessageField.KEYLAYOUT, int(sys.argv[2]))

        elif sys.argv[1]:
            print('Command not found')
            print()

    else:

        # Print help.
        print('OnlyKey CLI v1.2.2')
        print('Control-D to exit.')
        print()

        def mprompt():
            return prompt('OnlyKey> ')

        nexte = mprompt

        while 1:
            print()
            raw = nexte()
            print()
            data = raw.split()
            # nexte = prompt_pass
            if data[0] == "settime":
                only_key.set_time(time.time())
            if data[0] == "init":
                while 1:
                    if only_key.read_string(timeout_ms=500) != 'UNINITIALIZED':
                        break

                for msg in [Message.OKSETPIN]:
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()
                    input('Press the Enter key once you are done')
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()
                    input('Press the Enter key once you are done')
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()

                for msg in [Message.OKSETPDPIN]:
                    only_key.send_message(msg=msg)
                    print(only_key.read_string() + ' for second profile')
                    print()
                    input('Press the Enter key once you are done')
                    only_key.send_message(msg=msg)
                    print(only_key.read_string() + ' for second profile')
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()
                    input('Press the Enter key once you are done')
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()

                for msg in [Message.OKSETSDPIN]:
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()
                    input('Press the Enter key once you are done')
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()
                    input('Press the Enter key once you are done')
                    only_key.send_message(msg=msg)
                    print(only_key.read_string())
                    print()
            elif data[0] == 'getlabels':
                tmp = {}
                for slot in only_key.getlabels():
                    tmp[slot.name] = slot
                slots = iter([
                    '1a', '1b', '2a', '2b', '3a', '3b', '4a', '4b', '5a', '5b',
                    '6a', '6b'
                ])
                for slot_name in slots:
                    print(tmp[slot_name].to_str().replace('ÿ', " "))
                    print(tmp[next(slots)].to_str().replace('ÿ', " "))
                    print()

            if data[0] == 'getkeylabels':
                tmp = {}
                for slot in only_key.getkeylabels():
                    tmp[slot.name] = slot
                slots = iter([
                    'RSA Key 1', 'RSA Key 2', 'RSA Key 3', 'RSA Key 4',
                    'ECC Key 1', 'ECC Key 2', 'ECC Key 3', 'ECC Key 4',
                    'ECC Key 5', 'ECC Key 6', 'ECC Key 7', 'ECC Key 8',
                    'ECC Key 9', 'ECC Key 10', 'ECC Key 11', 'ECC Key 12',
                    'ECC Key 13', 'ECC Key 14', 'ECC Key 15', 'ECC Key 16'
                ])
                for slot_name in slots:
                    print(tmp[slot_name].to_str().replace('ÿ', " "))

            elif data[0] == 'setslot':
                try:
                    if data[1] == '1a':
                        slot_id = 1
                    elif data[1] == '2a':
                        slot_id = 2
                    elif data[1] == '3a':
                        slot_id = 3
                    elif data[1] == '4a':
                        slot_id = 4
                    elif data[1] == '5a':
                        slot_id = 5
                    elif data[1] == '6a':
                        slot_id = 6
                    elif data[1] == '1b':
                        slot_id = 7
                    elif data[1] == '2b':
                        slot_id = 8
                    elif data[1] == '3b':
                        slot_id = 9
                    elif data[1] == '4b':
                        slot_id = 10
                    elif data[1] == '5b':
                        slot_id = 11
                    elif data[1] == '6b':
                        slot_id = 12
                    elif data[1] >= int('25'):
                        slot_id = int(data[1])
                except:
                    print("setslot <id> <type> [value]")
                    print("<id> must be slot number 1a - 6b")
                    continue

                if data[2] == 'label':
                    only_key.setslot(slot_id, MessageField.LABEL, data[3])
                elif data[2] == 'ecc_key_label':
                    only_key.setslot(slot_id + 28, MessageField.LABEL, data[3])
                elif data[2] == 'rsa_key_label':
                    only_key.setslot(slot_id + 24, MessageField.LABEL, data[3])
                elif data[2] == 'url':
                    only_key.setslot(slot_id, MessageField.URL, data[3])
                elif data[2] == 'add_char2':
                    only_key.setslot(slot_id, MessageField.NEXTKEY1, data[3])
                elif data[2] == 'delay1':
                    only_key.setslot(slot_id, MessageField.DELAY1, data[3])
                elif data[2] == 'username':
                    only_key.setslot(slot_id, MessageField.USERNAME, data[3])
                elif data[2] == 'add_char3':
                    only_key.setslot(slot_id, MessageField.NEXTKEY2, data[3])
                elif data[2] == 'delay2':
                    only_key.setslot(slot_id, MessageField.DELAY2, data[3])
                elif data[2] == 'password':
                    password = prompt_pass()
                    only_key.setslot(slot_id, MessageField.PASSWORD, password)
                elif data[2] == 'add_char5':
                    only_key.setslot(slot_id, MessageField.NEXTKEY3, data[3])
                elif data[2] == 'delay3':
                    only_key.setslot(slot_id, MessageField.DELAY3, data[3])
                elif data[2] == '2fa':
                    only_key.setslot(slot_id, MessageField.TFATYPE, data[3])
                elif data[2] == 'gkey':
                    totpkey = prompt_key()
                    totpkey = base64.b32decode("".join(
                        totpkey.split()).upper())
                    totpkey = binascii.hexlify(totpkey)
                    # pad with zeros for even digits
                    totpkey = totpkey.zfill(len(totpkey) + len(totpkey) % 2)
                    payload = [
                        int(totpkey[i:i + 2], 16)
                        for i in range(0, len(totpkey), 2)
                    ]
                    only_key.setslot(slot_id, MessageField.TOTPKEY, payload)
                elif data[2] == 'totpkey':
                    totpkey = prompt_key()
                    only_key.setslot(slot_id, MessageField.TOTPKEY, totpkey)
                elif data[2] == 'add_char1':
                    only_key.setslot(slot_id, MessageField.NEXTKEY3, data[3])
                elif data[2] == 'add_char4':
                    only_key.setslot(slot_id, MessageField.NEXTKEY3, data[3])
                else:
                    print("setslot <id> <type> [value]")
                    print(
                        "<type> must be ['label', 'ecc_key_label', 'rsa_key_label', 'url', 'add_char1', 'delay1', 'username', 'add_char2', 'delay2', 'password', 'add_char3', 'delay3', '2fa', 'totpkey', 'add_char4', 'add_char5']"
                    )
                continue
            elif data[0] == 'wipeslot':
                try:
                    if data[1] == '1a':
                        slot_id = 1
                    elif data[1] == '2a':
                        slot_id = 2
                    elif data[1] == '3a':
                        slot_id = 3
                    elif data[1] == '4a':
                        slot_id = 4
                    elif data[1] == '5a':
                        slot_id = 5
                    elif data[1] == '6a':
                        slot_id = 6
                    elif data[1] == '1b':
                        slot_id = 7
                    elif data[1] == '2b':
                        slot_id = 8
                    elif data[1] == '3b':
                        slot_id = 9
                    elif data[1] == '4b':
                        slot_id = 10
                    elif data[1] == '5b':
                        slot_id = 11
                    elif data[1] == '6b':
                        slot_id = 12
                    elif data[1] >= int('25'):
                        slot_id = int(data[1])
                except:
                    print("wipeslot <id>")
                    print("<id> must be slot number 1a - 6b")
                    continue

                only_key.wipeslot(slot_id)

            elif data[0] == 'backupkey':
                try:
                    only_key.generate_backup_key()
                except:
                    continue

            elif data[0] == 'setkey':
                try:
                    key = prompt_pass()
                    only_key.setkey(data[1], data[2], key)
                except:
                    continue

            elif data[0] == 'wipekey':
                try:
                    only_key.wipekey(data[1])
                except:
                    continue

            elif data[0] == 'idletimeout':
                try:
                    only_key.setslot(1, MessageField.IDLETIMEOUT, int(data[1]))
                except:
                    continue
            elif data[0] == 'wipemode':
                try:
                    only_key.setslot(1, MessageField.WIPEMODE, int(data[1]))
                except:
                    continue
            elif data[0] == 'keytypespeed':
                try:
                    only_key.setslot(1, MessageField.KEYTYPESPEED,
                                     int(data[1]))
                except:
                    continue
            elif data[0] == 'led_brightness':
                try:
                    only_key.setslot(1, MessageField.LEDBRIGHTNESS,
                                     int(data[1]))
                except:
                    continue
            elif data[0] == 'pgp_challenge_mode':
                try:
                    only_key.setslot(1, MessageField.PGPCHALENGEMODE,
                                     int(data[1]))
                except:
                    continue
            elif data[0] == 'ssh_challenge_mode':
                try:
                    only_key.setslot(1, MessageField.SSHCHALENGEMODE,
                                     int(data[1]))
                except:
                    continue
            elif data[0] == 'backup_key_mode':
                try:
                    only_key.setslot(1, MessageField.BACKUPMODE, int(data[1]))
                except:
                    continue
            elif data[0] == '2nd_profile_mode':
                try:
                    only_key.setslot(1, MessageField.SECPROFILEMODE,
                                     int(data[1]))
                except:
                    continue
            elif data[0] == 'keylayout':
                try:
                    only_key.setslot(1, MessageField.KEYLAYOUT, int(data[1]))
                except:
                    continue
Example #47
0
def test_http_operate_tests_async():
    with InteractivePlan(
            name="InteractivePlan",
            interactive_port=0,
            interactive_block=False,
            parse_cmdline=False,
            logger_level=TEST_INFO,
    ) as plan:
        plan.run()
        wait(lambda: any(plan.i.http_handler_info), 5, raise_on_timeout=True)
        addr = "http://{}:{}".format(*plan.i.http_handler_info)

        plan.add(make_multitest(1))
        plan.add(make_multitest(2))

        # TRIGGER ASYNC RUN OF TESTS -> UID
        response = post_request("{}/async/run_tests".format(addr), {}).json()
        expected = {
            "message": "Async operation performed: run_tests",
            "error": False,
            "trace": None,
            "metadata": {},
            "result": re.compile("[0-9|a-z|-]+"),
        }
        assert compare(expected, response)[0] is True
        uid = response["result"]

        # QUERY UID ASYNC OPERATION UNTIL FINISHED
        sleeper = get_sleeper(0.6,
                              raise_timeout_with_msg="Async result missing.")
        while next(sleeper):
            response = post_request("{}/async_result".format(addr),
                                    {"uid": uid})
            json_response = response.json()
            if json_response["error"] is False:
                assert response.status_code == 200
                expected = {
                    "result": None,
                    "trace": None,
                    "error": False,
                    "message": re.compile("[0-9|a-z|-]+"),
                    "metadata": {
                        "state": "Finished"
                    },
                }
                assert compare(expected, json_response)[0] is True
                break
            assert response.status_code == 400

        # REPORT VIA HTTP
        response = post_request("{}/sync/report".format(addr), {
            "serialized": True
        }).json()
        expected_response = {
            "result": plan.i.report(serialized=True),
            "error": False,
            "metadata": {},
            "trace": None,
            "message": "Sync operation performed: report",
        }
        assert compare(response, expected_response)[0] is True
Example #48
0
def subcellularConn(self, allCellTags, allPopTags):
    """
    Function for/to <short description of `netpyne.network.subconn.subcellularConn`>

    Parameters
    ----------
    self : <type>
        <Short description of self>
        **Default:** *required*

    allCellTags : <type>
        <Short description of allCellTags>
        **Default:** *required*

    allPopTags : <type>
        <Short description of allPopTags>
        **Default:** *required*


    """

    from .. import sim

    sim.timing('start', 'subConnectTime')
    print('  Distributing synapses based on subcellular connectivity rules...')

    for subConnParamTemp in list(self.params.subConnParams.values()
                                 ):  # for each conn rule or parameter set
        subConnParam = subConnParamTemp.copy()

        # find list of pre and post cell
        preCellsTags, postCellsTags = self._findPrePostCellsCondition(
            allCellTags, subConnParam['preConds'], subConnParam['postConds'])

        if preCellsTags and postCellsTags:
            # iterate over postsyn cells to redistribute synapses
            for postCellGid in postCellsTags:  # for each postsyn cell
                if postCellGid in self.gid2lid:
                    postCell = self.cells[self.gid2lid[postCellGid]]
                    allConns = [
                        conn for conn in postCell.conns
                        if conn['preGid'] in preCellsTags
                    ]
                    if 'NetStim' in [
                            x['cellModel'] for x in list(preCellsTags.values())
                            if 'cellModel' in x.keys()
                    ]:  # temporary fix to include netstim conns
                        allConns.extend([
                            conn for conn in postCell.conns
                            if conn['preGid'] == 'NetStim'
                        ])

                    # group synMechs so they are not distributed separately
                    if 'groupSynMechs' in subConnParam and len(
                            subConnParam['groupSynMechs']) > 1:
                        conns = []
                        connsGroup = {}
                        #iConn = -1
                        for conn in allConns:
                            if not conn['synMech'].startswith('__grouped__'):
                                conns.append(conn)
                                #iConn = iConn + 1
                                connGroupLabel = '%d_%s_%.4f' % (
                                    conn['preGid'], conn['sec'], conn['loc'])
                                if conn['synMech'] in subConnParam[
                                        'groupSynMechs']:
                                    for synMech in [
                                            s for s in
                                            subConnParam['groupSynMechs']
                                            if s != conn['synMech']
                                    ]:
                                        connGroup = next(
                                            (c for c in allConns
                                             if c['synMech'] == synMech
                                             and c['sec'] == conn['sec']
                                             and c['loc'] == conn['loc']),
                                            None)
                                        try:
                                            connGroup[
                                                'synMech'] = '__grouped__' + connGroup[
                                                    'synMech']
                                            connsGroup[
                                                connGroupLabel] = connGroup
                                        except:
                                            print(
                                                '  Warning: Grouped synMechs %s not found'
                                                % (str(connGroup)))
                    else:
                        conns = allConns

                    # sort conns so reproducible across different number of cores
                    # use sec+preGid to avoid artificial distribution based on preGid (e.g. low gids = close to soma)
                    conns = sorted(conns,
                                   key=lambda v: v['sec'] + str(v['loc']) +
                                   str(v['preGid']))

                    # set sections to be used
                    secList = postCell._setConnSections(subConnParam)

                    # Uniform distribution
                    if subConnParam.get('density', None) == 'uniform':
                        # calculate new syn positions
                        newSecs, newLocs = postCell._distributeSynsUniformly(
                            secList=secList, numSyns=len(conns))

                    # 2D map and 1D map (radial)
                    elif isinstance(
                            subConnParam.get('density', None),
                            dict) and subConnParam['density']['type'] in [
                                '2Dmap', '1Dmap'
                            ]:

                        gridY = subConnParam['density']['gridY']
                        gridSigma = subConnParam['density']['gridValues']
                        somaX, somaY, _ = self._posFromLoc(
                            postCell.secs['soma']['hObj'],
                            0.5)  # get cell pos move method to Cell!
                        if 'fixedSomaY' in subConnParam[
                                'density']:  # is fixed cell soma y, adjust y grid accordingly
                            fixedSomaY = subConnParam['density'].get(
                                'fixedSomaY')
                            gridY = [
                                y + (somaY - fixedSomaY) for y in gridY
                            ]  # adjust grid so cell soma is at fixedSomaY

                        if subConnParam['density']['type'] == '2Dmap':  # 2D
                            gridX = [
                                x - somaX
                                for x in subConnParam['density']['gridX']
                            ]  # center x at cell soma
                            segNumSyn = self._interpolateSegmentSigma(
                                postCell, secList, gridX, gridY,
                                gridSigma)  # move method to Cell!
                        elif subConnParam['density']['type'] == '1Dmap':  # 1D
                            segNumSyn = self._interpolateSegmentSigma(
                                postCell, secList, None, gridY,
                                gridSigma)  # move method to Cell!

                        totSyn = sum([
                            sum(nsyn) for nsyn in list(segNumSyn.values())
                        ])  # summed density
                        scaleNumSyn = float(
                            len(conns)) / float(totSyn) if totSyn > 0 else 0.0
                        diffList = []
                        for sec in segNumSyn:
                            for seg, x in enumerate(segNumSyn[sec]):
                                orig = float(x * scaleNumSyn)
                                scaled = int(round(x * scaleNumSyn))
                                segNumSyn[sec][seg] = scaled
                                diff = orig - scaled
                                if diff > 0:
                                    diffList.append([diff, sec, seg])

                        totSynRescale = sum(
                            [sum(nsyn) for nsyn in list(segNumSyn.values())])

                        # if missing syns due to rescaling to 0, find top values which were rounded to 0 and make 1
                        if totSynRescale < len(conns):
                            extraSyns = len(conns) - totSynRescale
                            diffList = sorted(diffList,
                                              key=lambda l: l[0],
                                              reverse=True)
                            for i in range(min(extraSyns, len(diffList))):
                                sec = diffList[i][1]
                                seg = diffList[i][2]
                                segNumSyn[sec][seg] += 1

                        # convert to list so can serialize and save
                        subConnParam['density']['gridY'] = list(
                            subConnParam['density']['gridY'])
                        subConnParam['density']['gridValues'] = list(
                            subConnParam['density']['gridValues'])

                        newSecs, newLocs = [], []
                        for sec, nsyns in segNumSyn.items():
                            for i, seg in enumerate(
                                    postCell.secs[sec]['hObj']):
                                for isyn in range(nsyns[i]):
                                    newSecs.append(sec)
                                    newLocs.append(seg.x)

                    # Distance-based
                    elif isinstance(
                            subConnParam.get('density', None), dict
                    ) and subConnParam['density']['type'] == 'distance':
                        # find origin section
                        # default
                        if 'soma' in postCell.secs:
                            secOrig = 'soma'
                        elif any([
                                secName.startswith('som')
                                for secName in list(postCell.secs.keys())
                        ]):
                            secOrig = next(
                                secName
                                for secName in list(postCell.secs.keys())
                                if secName.startswith('soma'))
                        else:
                            secOrig = list(postCell.secs.keys())[0]
                        # giving argument
                        if 'ref_sec' in subConnParam['density']:
                            if subConnParam['density']['ref_sec'] in list(
                                    postCell.secs.keys()):
                                secOrig = subConnParam['density']['ref_sec']
                            else:
                                print(
                                    '  Warning: Redistributing synapses based on inexistent information for neuron %d - section %s not found'
                                    % (postCell.gid,
                                       subConnParam['density']['ref_sec']))

                        # find origin segment
                        segOrig = 0.5  # default
                        if 'ref_seg' in subConnParam['density']:
                            segOrig = subConnParam['density']['ref_seg']

                        # target
                        target_distance = 0.0
                        if 'target_distance' in subConnParam['density']:
                            target_distance = subConnParam['density'][
                                'target_distance']

                        newSec, newLoc = secOrig, segOrig
                        min_dist = target_distance
                        if 'coord' in subConnParam['density'] and subConnParam[
                                'density']['coord'] == 'cartesian':
                            # calculate euclidean distance from reference
                            x0, y0, z0 = self._posFromLoc(
                                postCell.secs[secOrig]['hObj'],
                                postCell.secs[secOrig]['hObj'](segOrig).x)
                            for secName in secList:
                                for seg in postCell.secs[secName]['hObj']:
                                    x, y, z = self._posFromLoc(
                                        postCell.secs[secName]['hObj'], seg.x)
                                    dist = np.sqrt((x - x0)**2 + (y - y0)**2 +
                                                   (z - z0)**2)
                                    if abs(dist - target_distance) <= min_dist:
                                        min_dist = abs(dist - target_distance)
                                        newSec, newLoc = secName, seg.x

                        else:
                            # (default) calculate distance based on the topology
                            for secName in secList:
                                for seg in postCell.secs[secName]['hObj']:
                                    dist = self.fromtodistance(
                                        postCell.secs[secOrig]['hObj'](
                                            segOrig), seg)
                                    if abs(dist - target_distance) <= min_dist:
                                        min_dist = abs(dist - target_distance)
                                        newSec, newLoc = secName, seg.x

                        newSecs = [newSec] * len(conns)
                        newLocs = [newLoc] * len(conns)

                    for i, (conn, newSec,
                            newLoc) in enumerate(zip(conns, newSecs, newLocs)):

                        # get conn group label before updating params
                        connGroupLabel = '%d_%s_%.4f' % (
                            conn['preGid'], conn['sec'], conn['loc'])

                        # update weight if weightNorm present
                        newWeightNorm = None
                        if 'weightNorm' in postCell.secs[
                                conn['sec']] and isinstance(
                                    postCell.secs[conn['sec']]['weightNorm'],
                                    list):
                            oldNseg = postCell.secs[
                                conn['sec']]['geom']['nseg']
                            oldWeightNorm = postCell.secs[
                                conn['sec']]['weightNorm'][
                                    int(round(conn['loc'] * oldNseg)) - 1]
                            newNseg = postCell.secs[newSec]['geom']['nseg']
                            newWeightNorm = postCell.secs[newSec]['weightNorm'][
                                int(round(newLoc * newNseg)) -
                                1] if 'weightNorm' in postCell.secs[
                                    newSec] else 1.0
                            conn['weight'] = conn[
                                'weight'] / oldWeightNorm * newWeightNorm

                        # avoid locs at 0.0 or 1.0 - triggers hoc error if syn needs an ion (eg. ca_ion)
                        if newLoc == 0.0: newLoc = 0.00001
                        elif newLoc == 1.0: newLoc = 0.99999

                        # updade sec and loc
                        conn['sec'] = newSec
                        conn['loc'] = newLoc

                        # find grouped conns
                        if subConnParam.get('groupSynMechs', None) \
                            and len(subConnParam['groupSynMechs']) > 1 \
                            and conn['synMech'] in subConnParam['groupSynMechs']:

                            connGroup = connsGroup[
                                connGroupLabel]  # get grouped conn from previously stored dict
                            connGroup['synMech'] = connGroup['synMech'].split(
                                '__grouped__')[1]  # remove '__grouped__' label

                            connGroup['sec'] = newSec
                            connGroup['loc'] = newLoc
                            if newWeightNorm:
                                connGroup['weight'] = connGroup[
                                    'weight'] / oldWeightNorm * newWeightNorm

        sim.pc.barrier()
Example #49
0
def voronoi(siteList, context):
    try:
        edgeList = EdgeList(siteList.xmin, siteList.xmax, len(siteList))
        priorityQ = PriorityQueue(siteList.ymin, siteList.ymax, len(siteList))
        siteIter = siteList.iterator()

        bottomsite = next(siteIter)
        context.outSite(bottomsite)
        newsite = next(siteIter)
        minpt = Site(-BIG_FLOAT, -BIG_FLOAT)
        while True:
            if not priorityQ.isEmpty():
                minpt = priorityQ.getMinPt()

            if (newsite and (priorityQ.isEmpty() or cmp(newsite, minpt) < 0)):
                # newsite is smallest -  this is a site event
                context.outSite(newsite)

                # get first Halfedge to the LEFT and RIGHT of the new site
                lbnd = edgeList.leftbnd(newsite)
                rbnd = lbnd.right

                # if this halfedge has no edge, bot = bottom site (whatever that is)
                # create a new edge that bisects
                bot = lbnd.rightreg(bottomsite)
                edge = Edge.bisect(bot, newsite)
                context.outBisector(edge)

                # create a new Halfedge, setting its pm field to 0 and insert
                # this new bisector edge between the left and right vectors in
                # a linked list
                bisector = Halfedge(edge, Edge.LE)
                edgeList.insert(lbnd, bisector)

                # if the new bisector intersects with the left edge, remove
                # the left edge's vertex, and put in the new one
                p = lbnd.intersect(bisector)
                if p is not None:
                    priorityQ.delete(lbnd)
                    priorityQ.insert(lbnd, p, newsite.distance(p))

                # create a new Halfedge, setting its pm field to 1
                # insert the new Halfedge to the right of the original bisector
                lbnd = bisector
                bisector = Halfedge(edge, Edge.RE)
                edgeList.insert(lbnd, bisector)

                # if this new bisector intersects with the right Halfedge
                p = bisector.intersect(rbnd)
                if p is not None:
                    # push the Halfedge into the ordered linked list of vertices
                    priorityQ.insert(bisector, p, newsite.distance(p))

                newsite = next(siteIter)

            elif not priorityQ.isEmpty():
                # intersection is smallest - this is a vector (circle) event

                # pop the Halfedge with the lowest vector off the ordered list of
                # vectors.  Get the Halfedge to the left and right of the above HE
                # and also the Halfedge to the right of the right HE
                lbnd = priorityQ.popMinHalfedge()
                llbnd = lbnd.left
                rbnd = lbnd.right
                rrbnd = rbnd.right

                # get the Site to the left of the left HE and to the right of
                # the right HE which it bisects
                bot = lbnd.leftreg(bottomsite)
                top = rbnd.rightreg(bottomsite)

                # output the triple of sites, stating that a circle goes through them
                mid = lbnd.rightreg(bottomsite)
                context.outTriple(bot, top, mid)

                # get the vertex that caused this event and set the vertex number
                # couldn't do this earlier since we didn't know when it would be processed
                v = lbnd.vertex
                siteList.setSiteNumber(v)
                context.outVertex(v)

                # set the endpoint of the left and right Halfedge to be this vector
                if lbnd.edge.setEndpoint(lbnd.pm, v):
                    context.outEdge(lbnd.edge)

                if rbnd.edge.setEndpoint(rbnd.pm, v):
                    context.outEdge(rbnd.edge)

                # delete the lowest HE, remove all vertex events to do with the
                # right HE and delete the right HE
                edgeList.delete(lbnd)
                priorityQ.delete(rbnd)
                edgeList.delete(rbnd)

                # if the site to the left of the event is higher than the Site
                # to the right of it, then swap them and set 'pm' to RIGHT
                pm = Edge.LE
                if bot.y > top.y:
                    bot, top = top, bot
                    pm = Edge.RE

                # Create an Edge (or line) that is between the two Sites.  This
                # creates the formula of the line, and assigns a line number to it
                edge = Edge.bisect(bot, top)
                context.outBisector(edge)

                # create a HE from the edge
                bisector = Halfedge(edge, pm)

                # insert the new bisector to the right of the left HE
                # set one endpoint to the new edge to be the vector point 'v'
                # If the site to the left of this bisector is higher than the right
                # Site, then this endpoint is put in position 0; otherwise in pos 1
                edgeList.insert(llbnd, bisector)
                if edge.setEndpoint(Edge.RE - pm, v):
                    context.outEdge(edge)

                # if left HE and the new bisector don't intersect, then delete
                # the left HE, and reinsert it
                p = llbnd.intersect(bisector)
                if p is not None:
                    priorityQ.delete(llbnd)
                    priorityQ.insert(llbnd, p, bot.distance(p))

                # if right HE and the new bisector don't intersect, then reinsert it
                p = bisector.intersect(rrbnd)
                if p is not None:
                    priorityQ.insert(bisector, p, bot.distance(p))
            else:
                break

        he = edgeList.leftend.right
        while he is not edgeList.rightend:
            context.outEdge(he.edge)
            he = he.right
        Edge.EDGE_NUM = 0
    except Exception as err:
        # fix_print_with_import
        print("######################################################")
        # fix_print_with_import
        print(str(err))
Example #50
0
    def processAlgorithm(self, progress):
        layer = dataobjects.getObjectFromUri(self.getParameterValue(
            self.INPUT))

        buf = self.getParameterValue(self.BUFFER)

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            layer.fields().toList(), QgsWkbTypes.Polygon, layer.crs())

        outFeat = QgsFeature()
        extent = layer.extent()
        extraX = extent.height() * (buf / 100.0)
        extraY = extent.width() * (buf / 100.0)
        height = extent.height()
        width = extent.width()
        c = voronoi.Context()
        pts = []
        ptDict = {}
        ptNdx = -1

        features = vector.features(layer)
        total = 100.0 / len(features)
        for current, inFeat in enumerate(features):
            geom = inFeat.geometry()
            point = geom.asPoint()
            x = point.x() - extent.xMinimum()
            y = point.y() - extent.yMinimum()
            pts.append((x, y))
            ptNdx += 1
            ptDict[ptNdx] = inFeat.id()
            progress.setPercentage(int(current * total))

        if len(pts) < 3:
            raise GeoAlgorithmExecutionException(
                self.tr('Input file should contain at least 3 points. Choose '
                        'another file and try again.'))

        uniqueSet = set(item for item in pts)
        ids = [pts.index(item) for item in uniqueSet]
        sl = voronoi.SiteList([
            voronoi.Site(i[0], i[1], sitenum=j)
            for (j, i) in enumerate(uniqueSet)
        ])
        voronoi.voronoi(sl, c)
        inFeat = QgsFeature()

        current = 0
        total = 100.0 / len(c.polygons)

        for (site, edges) in list(c.polygons.items()):
            request = QgsFeatureRequest().setFilterFid(ptDict[ids[site]])
            inFeat = next(layer.getFeatures(request))
            lines = self.clip_voronoi(edges, c, width, height, extent, extraX,
                                      extraY)

            geom = QgsGeometry.fromMultiPoint(lines)
            geom = QgsGeometry(geom.convexHull())
            outFeat.setGeometry(geom)
            outFeat.setAttributes(inFeat.attributes())
            writer.addFeature(outFeat)

            current += 1
            progress.setPercentage(int(current * total))

        del writer
Example #51
0
    def processAlgorithm(self, progress):
        currentOs = os.name

        path = OTBUtils.otbPath()

        commands = []
        commands.append(os.path.join(path, self.cliName))

        self.roiVectors = {}
        self.roiRasters = {}
        for param in self.parameters:
            # get the given input(s)
            if param.name in ["-il", "-in"]:
                newparams = ""
                listeParameters = param.value.split(";")
                for inputParameter in listeParameters:
                    # if HDF5 file
                    if "HDF5" in inputParameter:
                        if currentOs == "posix":
                            data = inputParameter[6:]
                        else:
                            data = inputParameter[5:]
                        dataset = data

                        #on windows, there isn't "
                        #if data[-1] == '"':
                        if currentOs == "posix":
                            data = data[:data.index('"')]
                        else:
                            data = data[:data.index('://')]
                        #try :
                        if currentOs == "posix":
                            dataset.index('"')
                            dataset = os.path.basename(
                                data) + dataset[dataset.index('"'):]
                        #except ValueError :
                        else:
                            #dataset = os.path.basename( data ) + '"' + dataset[dataset.index('://'):]
                            dataset = dataset[dataset.index('://'):]

                        #get index of the subdataset with gdal
                        if currentOs == "posix":
                            commandgdal = "gdalinfo " + data + " | grep '" + dataset + "$'"
                        else:
                            commandgdal = "gdalinfo " + data + " | findstr \"" + dataset + "$\""
                        resultGDAL = os.popen(commandgdal).readlines()
                        indexSubdataset = -1
                        if resultGDAL:
                            indexSubdatasetString = re.search(
                                "SUBDATASET_(\d+)_", resultGDAL[0])
                            if indexSubdatasetString:
                                #match between ()
                                indexSubdataset = indexSubdatasetString.group(
                                    1)
                            else:
                                indexSubdataset = -1
                        else:
                            #print "Error : no match of ", dataset, "$ in gdalinfo " + data
                            indexSubdataset = -1

                        if not indexSubdataset == -1:
                            indexSubdataset = int(indexSubdataset) - 1
                            newParam = "\'" + data + "?&sdataidx=" + str(
                                indexSubdataset) + "\'"

                        else:
                            newParam = inputParameter

                        newparams += newParam
                    # no hdf5
                    else:
                        newparams += inputParameter
                    newparams += ";"
                if newparams[-1] == ";":
                    newparams = newparams[:-1]
                param.value = newparams

            if param.value is None or param.value == "":
                continue
            if isinstance(param, ParameterVector):
                commands.append(param.name)
                if self.hasROI:
                    roiFile = getTempFilename('shp')
                    commands.append(roiFile)
                    self.roiVectors[param.value] = roiFile
                else:
                    commands.append("\"" + param.value + "\"")
            elif isinstance(param, ParameterRaster):
                commands.append(param.name)
                if self.hasROI:
                    roiFile = getTempFilename('tif')
                    commands.append(roiFile)
                    self.roiRasters[param.value] = roiFile
                else:
                    commands.append("\"" + param.value + "\"")
            elif isinstance(param, ParameterMultipleInput):
                commands.append(param.name)
                files = str(param.value).split(";")
                paramvalue = " ".join(["\"" + f + " \"" for f in files])
                commands.append(paramvalue)
            elif isinstance(param, ParameterSelection):
                commands.append(param.name)
                idx = int(param.value)
                commands.append(str(param.options[idx][1]))
            elif isinstance(param, ParameterBoolean):
                if param.value:
                    commands.append(param.name)
                    commands.append(str(param.value).lower())
            elif isinstance(param, ParameterExtent):
                self.roiValues = param.value.split(",")
            else:
                commands.append(param.name)
                commands.append(str(param.value))

        for out in self.outputs:
            commands.append(out.name)
            commands.append('"' + out.value + '"')
        for roiInput, roiFile in list(self.roiRasters.items()):
            startX, startY = float(self.roiValues[0]), float(self.roiValues[1])
            sizeX = float(self.roiValues[2]) - startX
            sizeY = float(self.roiValues[3]) - startY
            helperCommands = [
                "otbcli_ExtractROI", "-in", roiInput, "-out", roiFile,
                "-startx",
                str(startX), "-starty",
                str(startY), "-sizex",
                str(sizeX), "-sizey",
                str(sizeY)
            ]
            ProcessingLog.addToLog(ProcessingLog.LOG_INFO, helperCommands)
            progress.setCommand(helperCommands)
            OTBUtils.executeOtb(helperCommands, progress)

        if self.roiRasters:
            supportRaster = next(iter(list(self.roiRasters.values())))
            for roiInput, roiFile in list(self.roiVectors.items()):
                helperCommands = [
                    "otbcli_VectorDataExtractROIApplication", "-vd.in",
                    roiInput, "-io.in", supportRaster, "-io.out", roiFile,
                    "-elev.dem.path",
                    OTBUtils.otbSRTMPath()
                ]
                ProcessingLog.addToLog(ProcessingLog.LOG_INFO, helperCommands)
                progress.setCommand(helperCommands)
                OTBUtils.executeOtb(helperCommands, progress)

        loglines = []
        loglines.append(self.tr('OTB execution command'))
        for line in commands:
            loglines.append(line)
            progress.setCommand(line)

        ProcessingLog.addToLog(ProcessingLog.LOG_INFO, loglines)
        import processing.algs.otb.OTBSpecific_XMLLoading
        module = processing.algs.otb.OTBSpecific_XMLLoading

        found = False
        if 'adapt%s' % self.appkey in dir(module):
            found = True
            commands = getattr(module, 'adapt%s' % self.appkey)(commands)
        else:
            the_key = 'adapt%s' % self.appkey
            if '-' in the_key:
                base_key = the_key.split("-")[0]
                if base_key in dir(module):
                    found = True
                    commands = getattr(module, base_key)(commands)

        if not found:
            ProcessingLog.addToLog(
                ProcessingLog.LOG_INFO,
                self.tr("Adapter for %s not found") % the_key)

        #frames = inspect.getouterframes(inspect.currentframe())[1:]
        #for a_frame in frames:
        #    frame,filename,line_number,function_name,lines,index = a_frame
        #    ProcessingLog.addToLog(ProcessingLog.LOG_INFO, "%s %s %s %s %s %s" % (frame,filename,line_number,function_name,lines,index))

        OTBUtils.executeOtb(commands, progress)
Example #52
0
 def __next__(self):
     return next(self._run_list_iter)
Example #53
0
 def has_with_prefix(self, prefix):
     """Return whether if there is an item with the given prefix."""
     return bool(next(self.get_by_prefix(prefix), None))
Example #54
0
 def __init__(self, run_list):
     self._run_list_iter = iter(run_list)
     self.start, self.end, self.value = next(self)
Example #55
0
def range_pair2(facet_field,
                cat,
                fq_filter,
                iterable,
                end,
                facet,
                collection_facet=None):
    # e.g. counts":["0",17430,"1000",1949,"2000",671,"3000",404,"4000",243,"5000",165],"gap":1000,"start":0,"end":6000}
    pairs = []
    selected_values = [f['value'] for f in fq_filter]
    is_single_unit_gap = re.match('^[\+\-]?1[A-Za-z]*$',
                                  str(facet['gap'])) is not None
    is_up = facet['sort'] == 'asc'

    if facet['sort'] == 'asc' and facet['type'] == 'range-up':
        prev = None
        n = []
        for e in iterable:
            if prev is not None:
                n.append(e)
                n.append(prev)
                prev = None
            else:
                prev = e
        iterable = n
        iterable.reverse()

    a, to = itertools.tee(iterable)
    next(to, None)
    counts = iterable[1::2]
    total_counts = counts.pop(0) if facet['sort'] == 'asc' else 0
    sum_all = collection_facet and collection_facet['widgetType'] in (
        'timeline-widget', 'bucket-widget') and facet['type'] == 'range-up'

    for element in a:
        next(to, None)
        to_value = next(to, end)
        count = next(a)
        if sum_all:
            count = total_counts

        pairs.append({
            'field':
            facet_field,
            'from':
            element,
            'value':
            count,
            'to':
            to_value,
            'selected':
            element in selected_values,
            'exclude':
            all([f['exclude'] for f in fq_filter if f['value'] == element]),
            'is_single_unit_gap':
            is_single_unit_gap,
            'total_counts':
            total_counts,
            'is_up':
            is_up
        })
        total_counts += counts.pop(0) if counts else 0

    if facet['sort'] == 'asc' and facet['type'] != 'range-up':
        pairs.reverse()

    return pairs
Example #56
0
    def processAlgorithm(self, parameters, context, feedback):
        if parameters[self.INPUT] == parameters[self.HUBS]:
            raise QgsProcessingException(
                self.tr('Same layer given for both hubs and spokes'))

        point_source = self.parameterAsSource(parameters, self.INPUT, context)
        hub_source = self.parameterAsSource(parameters, self.HUBS, context)
        fieldName = self.parameterAsString(parameters, self.FIELD, context)

        units = self.UNITS[self.parameterAsEnum(parameters, self.UNIT,
                                                context)]

        fields = point_source.fields()
        fields.append(QgsField('HubName', QVariant.String))
        fields.append(QgsField('HubDist', QVariant.Double))

        (sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT,
                                               context, fields,
                                               QgsWkbTypes.Point,
                                               point_source.sourceCrs())

        index = QgsSpatialIndex(
            hub_source.getFeatures(QgsFeatureRequest().setSubsetOfAttributes(
                []).setDestinationCrs(point_source.sourceCrs())))

        distance = QgsDistanceArea()
        distance.setSourceCrs(point_source.sourceCrs())
        distance.setEllipsoid(context.project().ellipsoid())

        # Scan source points, find nearest hub, and write to output file
        features = point_source.getFeatures()
        total = 100.0 / point_source.featureCount(
        ) if point_source.featureCount() else 0
        for current, f in enumerate(features):
            if feedback.isCanceled():
                break

            if not f.hasGeometry():
                sink.addFeature(f, QgsFeatureSink.FastInsert)
                continue

            src = f.geometry().boundingBox().center()

            neighbors = index.nearestNeighbor(src, 1)
            ft = next(
                hub_source.getFeatures(QgsFeatureRequest().setFilterFid(
                    neighbors[0]).setSubsetOfAttributes(
                        [fieldName], hub_source.fields()).setDestinationCrs(
                            point_source.sourceCrs())))
            closest = ft.geometry().boundingBox().center()
            hubDist = distance.measureLine(src, closest)

            if units != self.LAYER_UNITS:
                hub_dist_in_desired_units = distance.convertLengthMeasurement(
                    hubDist, units)
            else:
                hub_dist_in_desired_units = hubDist

            attributes = f.attributes()
            attributes.append(ft[fieldName])
            attributes.append(hub_dist_in_desired_units)

            feat = QgsFeature()
            feat.setAttributes(attributes)

            feat.setGeometry(QgsGeometry.fromPoint(src))

            sink.addFeature(feat, QgsFeatureSink.FastInsert)
            feedback.setProgress(int(current * total))

        return {self.OUTPUT: dest_id}
Example #57
0
 def _get_session(self, notebook, type='hive'):
     session = next(
         (session
          for session in notebook['sessions'] if session['type'] == type),
         None)
     return session
Example #58
0
 def __getitem__(self, index):
     while index >= self.end and index > self.start:
         # condition has special case for 0-length run (fixes issue 471)
         self.start, self.end, self.value = next(self)
     return self.value
Example #59
0
    def testWriteShapefileWithAttributeSubsets(self):
        """Tests writing subsets of attributes to files."""
        ml = QgsVectorLayer((
            'Point?crs=epsg:4326&field=id:int&field=field1:int&field=field2:int&field=field3:int'
        ), 'test', 'memory')

        self.assertIsNotNone(ml, 'Provider not initialized')
        self.assertTrue(ml.isValid(), 'Source layer not valid')
        provider = ml.dataProvider()
        self.assertIsNotNone(provider)

        ft = QgsFeature()
        ft.setGeometry(QgsGeometry.fromWkt('Point (1 2)'))
        ft.setAttributes([1, 11, 12, 13])
        res, features = provider.addFeatures([ft])
        self.assertTrue(res)
        self.assertTrue(features)

        # first write out with all attributes
        dest_file_name = os.path.join(str(QDir.tempPath()),
                                      'all_attributes.shp')
        crs = QgsCoordinateReferenceSystem()
        crs.createFromId(4326, QgsCoordinateReferenceSystem.EpsgCrsId)
        write_result, error_message = QgsVectorFileWriter.writeAsVectorFormat(
            ml, dest_file_name, 'utf-8', crs, 'ESRI Shapefile', attributes=[])
        self.assertEqual(write_result, QgsVectorFileWriter.NoError,
                         error_message)

        # Open result and check
        created_layer = QgsVectorLayer('{}|layerid=0'.format(dest_file_name),
                                       'test', 'ogr')
        self.assertEqual(created_layer.fields().count(), 4)
        f = next(created_layer.getFeatures(QgsFeatureRequest()))
        self.assertEqual(f['id'], 1)
        self.assertEqual(f['field1'], 11)
        self.assertEqual(f['field2'], 12)
        self.assertEqual(f['field3'], 13)

        # now test writing out only a subset of attributes
        dest_file_name = os.path.join(str(QDir.tempPath()),
                                      'subset_attributes.shp')
        write_result, error_message = QgsVectorFileWriter.writeAsVectorFormat(
            ml,
            dest_file_name,
            'utf-8',
            crs,
            'ESRI Shapefile',
            attributes=[1, 3])
        self.assertEqual(write_result, QgsVectorFileWriter.NoError,
                         error_message)

        # Open result and check
        created_layer = QgsVectorLayer('{}|layerid=0'.format(dest_file_name),
                                       'test', 'ogr')
        self.assertEqual(created_layer.fields().count(), 2)
        f = next(created_layer.getFeatures(QgsFeatureRequest()))
        self.assertEqual(f['field1'], 11)
        self.assertEqual(f['field3'], 13)

        # finally test writing no attributes
        dest_file_name = os.path.join(str(QDir.tempPath()),
                                      'no_attributes.shp')
        write_result, error_message = QgsVectorFileWriter.writeAsVectorFormat(
            ml,
            dest_file_name,
            'utf-8',
            crs,
            'ESRI Shapefile',
            skipAttributeCreation=True)
        self.assertEqual(write_result, QgsVectorFileWriter.NoError,
                         error_message)

        # Open result and check
        created_layer = QgsVectorLayer('{}|layerid=0'.format(dest_file_name),
                                       'test', 'ogr')
        # expect only a default 'FID' field for shapefiles
        self.assertEqual(created_layer.fields().count(), 1)
        self.assertEqual(created_layer.fields()[0].name(), 'FID')
        # in this case we also check that the geometry exists, to make sure feature has been correctly written
        # even without attributes
        f = next(created_layer.getFeatures(QgsFeatureRequest()))
        g = f.geometry()
        wkt = g.asWkt()
        expWkt = 'Point (1 2)'
        self.assertTrue(
            compareWkt(expWkt, wkt),
            "geometry not saved correctly when saving without attributes : mismatch Expected:\n%s\nGot:\n%s\n"
            % (expWkt, wkt))
        self.assertEqual(f['FID'], 0)
Example #60
0
    def processAlgorithm(self, context, feedback):
        layer = dataobjects.getLayerFromString(
            self.getParameterValue(self.INPUT))

        fields = [
            QgsField('POINTA', QVariant.Double, '', 24, 15),
            QgsField('POINTB', QVariant.Double, '', 24, 15),
            QgsField('POINTC', QVariant.Double, '', 24, 15)
        ]

        writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
            fields, QgsWkbTypes.Polygon, layer.crs(), context)

        pts = []
        ptDict = {}
        ptNdx = -1
        c = voronoi.Context()
        features = QgsProcessingUtils.getFeatures(layer, context)
        total = 100.0 / QgsProcessingUtils.featureCount(layer, context)
        for current, inFeat in enumerate(features):
            geom = QgsGeometry(inFeat.geometry())
            if geom.isNull():
                continue
            if geom.isMultipart():
                points = geom.asMultiPoint()
            else:
                points = [geom.asPoint()]
            for n, point in enumerate(points):
                x = point.x()
                y = point.y()
                pts.append((x, y))
                ptNdx += 1
                ptDict[ptNdx] = (inFeat.id(), n)
            feedback.setProgress(int(current * total))

        if len(pts) < 3:
            raise GeoAlgorithmExecutionException(
                self.tr('Input file should contain at least 3 points. Choose '
                        'another file and try again.'))

        uniqueSet = set(item for item in pts)
        ids = [pts.index(item) for item in uniqueSet]
        sl = voronoi.SiteList([voronoi.Site(*i) for i in uniqueSet])
        c.triangulate = True
        voronoi.voronoi(sl, c)
        triangles = c.triangles
        feat = QgsFeature()

        total = 100.0 / len(triangles)
        for current, triangle in enumerate(triangles):
            indices = list(triangle)
            indices.append(indices[0])
            polygon = []
            attrs = []
            step = 0
            for index in indices:
                fid, n = ptDict[ids[index]]
                request = QgsFeatureRequest().setFilterFid(fid)
                inFeat = next(layer.getFeatures(request))
                geom = QgsGeometry(inFeat.geometry())
                if geom.isMultipart():
                    point = QgsPoint(geom.asMultiPoint()[n])
                else:
                    point = QgsPoint(geom.asPoint())
                polygon.append(point)
                if step <= 3:
                    attrs.append(ids[index])
                step += 1
            feat.setAttributes(attrs)
            geometry = QgsGeometry().fromPolygon([polygon])
            feat.setGeometry(geometry)
            writer.addFeature(feat)
            feedback.setProgress(int(current * total))

        del writer