Ejemplo n.º 1
1
    def expand_uids_to_download(self, crispin_client, uids, metadata):
        # During Gmail initial sync, we expand threads: given a UID to
        # download, we want to also download other UIDs on the same thread, so
        # that you don't see incomplete thread views for the duration of the
        # sync. Given a 'seed set' of UIDs, this function returns a generator
        # which yields the 'expanded' set of UIDs to download.
        thrids = OrderedDict()
        for uid in sorted(uids, reverse=True):
            g_thrid = metadata[uid].g_thrid
            if g_thrid in thrids:
                thrids[g_thrid].append(uid)
            else:
                thrids[g_thrid] = [uid]

        for g_thrid, uids in thrids.items():
            g_msgid = metadata[uids[0]].g_msgid
            # Because `uids` is ordered newest-to-oldest here, uids[0] is the
            # last UID on the thread. If g_thrid is equal to its g_msgid, that
            # means it's also the first UID on the thread. In that case, we can
            # skip thread expansion for greater sync throughput.
            if g_thrid != g_msgid:
                uids = set(uids).union(crispin_client.expand_thread(g_thrid))
                metadata.update(crispin_client.g_metadata(uids))
            for uid in sorted(uids, reverse=True):
                yield uid
Ejemplo n.º 2
0
    def __init__(self, data, label=None, batch_size=1, shuffle=False,
                 last_batch_handle='pad', data_name='data',
                 label_name='softmax_label'):
        super(NDArrayIter, self).__init__(batch_size)

        self.data = _init_data(data, allow_empty=False, default_name=data_name)
        self.label = _init_data(label, allow_empty=True, default_name=label_name)

        # shuffle data
        if shuffle:
            idx = np.arange(self.data[0][1].shape[0])
            np.random.shuffle(idx)
            self.data = [(k, array(v.asnumpy()[idx], v.context)) for k, v in self.data]
            self.label = [(k, array(v.asnumpy()[idx], v.context)) for k, v in self.label]

        # batching
        if last_batch_handle == 'discard':
            new_n = self.data[0][1].shape[0] - self.data[0][1].shape[0] % batch_size
            data_dict = OrderedDict(self.data)
            label_dict = OrderedDict(self.label)
            for k, _ in self.data:
                data_dict[k] = data_dict[k][:new_n]
            for k, _ in self.label:
                label_dict[k] = label_dict[k][:new_n]
            self.data = data_dict.items()
            self.label = label_dict.items()

        self.data_list = [x[1] for x in self.data] + [x[1] for x in self.label]
        self.num_source = len(self.data_list)
        self.num_data = self.data_list[0].shape[0]
        assert self.num_data >= batch_size, \
            "batch_size need to be smaller than data size."
        self.cursor = -batch_size
        self.batch_size = batch_size
        self.last_batch_handle = last_batch_handle
Ejemplo n.º 3
0
def combine(**kwargs):
  """Generate combinations based on its keyword arguments.

  Two sets of returned combinations can be concatenated using +.  Their product
  can be computed using `times()`.

  Args:
    **kwargs: keyword arguments of form `option=[possibilities, ...]`
         or `option=the_only_possibility`.

  Returns:
    a list of dictionaries for each combination. Keys in the dictionaries are
    the keyword argument names.  Each key has one value - one of the
    corresponding keyword argument values.
  """
  if not kwargs:
    return [OrderedDict()]

  sort_by_key = lambda k: k[0][0]
  kwargs = OrderedDict(sorted(kwargs.items(), key=sort_by_key))
  first = list(kwargs.items())[0]

  rest = dict(list(kwargs.items())[1:])
  rest_combined = combine(**rest)

  key = first[0]
  values = first[1]
  if not isinstance(values, list):
    values = [values]

  return [
      OrderedDict(sorted(list(combined.items()) + [(key, v)], key=sort_by_key))
      for v in values
      for combined in rest_combined
  ]
    def get_error_messages(self, errors, question_descriptor_from="label"):

        question_errors = super(QuestionSummary, self).get_error_messages(
            errors,
            question_descriptor_from=question_descriptor_from,
        )

        boolean_list_questions = self.get('boolean_list_questions')
        boolean_list_values = self.get('value') or []

        if self.id in question_errors and self.type == 'boolean_list' and boolean_list_questions:
            # pad list of values to same length as boolean_list_questions
            boolean_list_values.extend([None] * (len(boolean_list_questions) - len(boolean_list_values)))

            for index, boolean_list_question in enumerate(boolean_list_questions):
                if not isinstance(boolean_list_values[index], bool):
                    # Each non-boolean value is an error
                    boolean_question_id = "{}-{}".format(self.id, index)
                    question_errors[boolean_question_id] = {
                        'input_name': boolean_question_id,
                        'message': question_errors[self.id]['message'],
                        'question': boolean_list_question
                    }

            question_errors[self.id] = True
            question_errors = OrderedDict([
                (k, question_errors[k]) for k in sorted(question_errors.keys())
            ])

        return question_errors
Ejemplo n.º 5
0
 def build_new_row(row, val_idx, h_friends, headers, val_h):
     new_row = OrderedDict()
     for h_idx, h in enumerate(headers):
         h_hash = h['hash']
         h_name = h['name']
         if h_hash not in [f['hash'] for f in h_friends]:
             if len(row[h_idx]) == 1:
                 # if this column is not correlated to the column we
                 # are working on and it's a single value, set
                 # it to the same value in the new row
                 new_row[h_name] = row[h_idx]
             else:
                 # if this column is not correlated to the column we are
                 # working on and it is a multi value, set it
                 # to "UNRELATED"
                 new_row[h_name] = [
                     "UNRELATED TO {}".format(val_h['mod_name'])
                 ]
         else:
             # if this column is correlated to the column we are
             # working on, set the value to the indexed value of this
             # value
             new_row[h_name] = [row[h_idx][val_idx]]
     new_row = new_row.values()
     return new_row
Ejemplo n.º 6
0
def uniquify_points_and_return_input_index_to_unique_index_map( pts, threshold = 0 ):
	'''
	Given a sequence of N points 'pts',
	and an optional 'threshold' indicating how many decimal places of accuracy (default: 0)
	returns two items:
	   a sequence of all the unique elements in 'pts'
	   and
	   a list of length N where the i-th item in the list tells you where
	   pts[i] can be found in the unique elements.
	'''
	
	from collections import OrderedDict
	unique_pts = OrderedDict()
	pts_map = []
	## Add rounded points to a dictionary and set the key to
	## ( the index into the ordered dictionary, the non-rounded point )
	for i, ( pt, rounded_pt ) in enumerate( zip( pts, map( tuple, asarray( pts ).round( threshold ) ) ) ):
		index = unique_pts.setdefault( rounded_pt, ( len( unique_pts ), pt ) )[0]
		## For fancier schemes:
		# index = unique_pts.setdefault( rounded_pt, ( len( unique_pts ), [] ) )[0]
		# unique_pts[ rounded_pt ][1].append( pt )
		pts_map.append( index )
	
	## Return the original resolution points.
	## The average of all points that round:
	# return [ tuple( average( pt, axis = 0 ) ) for i, pt in unique_pts.itervalues() ], pts_map
	## The closest point to the rounded point:
	# return [ tuple( pt[ abs( asarray( pt ).round( threshold ) - asarray( pt ) ).sum(axis=1).argmin() ] ) for i, pt in unique_pts.itervalues() ], pts_map
	## Simplest, the first rounded point:
	return [ tuple( pt ) for i, pt in unique_pts.itervalues() ], pts_map
Ejemplo n.º 7
0
def get_multipartite_form(multi_type, user):
    'It returns a form for the given multipartite'
    form_fields = OrderedDict()

    part_defs = PARTS_TO_ASSEMBLE[multi_type]
    for parts in part_defs:
        features = Feature.objects.filter(type__name=parts[0],
                                          prefix=parts[1],
                                          suffix=parts[2])
        features = filter_feature_by_user_perms(features, user)
        choices = features_to_choices(features)
        name = parts[0]
        form_fields[name] = forms.CharField(max_length=100,
                                            widget=Select(choices=choices))

    # last we need to add the vector to the form
    vector_choices = get_vector_choices(user)
    form_fields[VECTOR_TYPE_NAME] = forms.CharField(max_length=100,
                                                    widget=Select(choices=vector_choices))

    form = type('MultiPartiteForm', (forms.BaseForm,),
                {'base_fields': form_fields})
    for field_name in form_fields.keys():
        setattr(form, 'clean_{0}'.format(field_name),
                create_feature_validator(field_name))
    return form
Ejemplo n.º 8
0
    def post(self, request, pk):
        """ Clean the data and save opening hours in the database.
        Old opening hours are purged before new ones are saved.
        """
        location = self.get_object()
        # open days, disabled widget data won't make it into request.POST
        present_prefixes = [x.split('-')[0] for x in request.POST.keys()]
        day_forms = OrderedDict()
        for day_no, day_name in WEEKDAYS:
            for slot_no in (1, 2):
                prefix = self.form_prefix(day_no, slot_no)
                # skip closed day as it would be invalid form due to no data
                if prefix not in present_prefixes:
                    continue
                day_forms[prefix] = (day_no, Slot(request.POST, prefix=prefix))

        if all([day_form[1].is_valid() for pre, day_form in day_forms.items()]):
            OpeningHours.objects.filter(company=location).delete()
            for prefix, day_form in day_forms.items():
                day, form = day_form
                opens, shuts = [str_to_time(form.cleaned_data[x])
                                for x in ('opens', 'shuts')]
                if opens != shuts:
                    OpeningHours(from_hour=opens, to_hour=shuts,
                                 company=location, weekday=day).save()
        return redirect(request.path_info)
 def git_list(args, script_path):
     ariane_git_repos = OrderedDict(sorted(json.load(
         open(script_path+"/resources/sources/ariane.community.git.repos-SNAPSHOT.json")).items(),
         key=lambda t: t[0]))
     if args.addon:
         print("\nExisting Ariane addon git repositories :\n")
         print('{:40} {:110}'.format("Ariane git repository name", "Ariane git repository URL"))
         print('{:40} {:110}'.format("--------------------------", "-------------------------"))
         for key in ariane_git_repos.keys():
             git_repo = ariane_git_repos[key]
             if git_repo['type'] == "addon":
                 print('{:40} {:110}'.format(key, git_repo['url']))
     elif args.core:
         print("\nExisting Ariane core git repositories :\n")
         print('{:40} {:110}'.format("Ariane git repository name", "Ariane git repository URL"))
         print('{:40} {:110}'.format("--------------------------", "-------------------------"))
         for key in ariane_git_repos.keys():
             git_repo = ariane_git_repos[key]
             if git_repo['type'] == "core":
                 print('{:40} {:110}'.format(key, git_repo['url']))
     else:
         print("\nExisting Ariane git repositories :\n")
         print('{:40} {:110} {:25}'.format("Ariane git repository name", "Ariane git repository URL",
                                           "Ariane git repository type"))
         print('{:40} {:110} {:25}'.format("--------------------------", "-------------------------",
                                           "--------------------------"))
         for key in ariane_git_repos.keys():
             git_repo = ariane_git_repos[key]
             print('{:40} {:110} {:25}'.format(key, git_repo['url'], git_repo['type']))
Ejemplo n.º 10
0
def from_files(filenames):
    """Return an iterator that provides a sequence of Histograms for
the histograms defined in filenames.
    """
    all_histograms = OrderedDict()
    for filename in filenames:
        parser = FILENAME_PARSERS[os.path.basename(filename)]
        histograms = parser(filename)

        # OrderedDicts are important, because then the iteration order over
        # the parsed histograms is stable, which makes the insertion into
        # all_histograms stable, which makes ordering in generated files
        # stable, which makes builds more deterministic.
        if not isinstance(histograms, OrderedDict):
            raise BaseException, "histogram parser didn't provide an OrderedDict"

        for (name, definition) in histograms.iteritems():
            if all_histograms.has_key(name):
                raise DefinitionException, "duplicate histogram name %s" % name
            all_histograms[name] = definition

    # We require that all USE_COUNTER2_* histograms be defined in a contiguous
    # block.
    use_counter_indices = filter(lambda x: x[1].startswith("USE_COUNTER2_"),
                                 enumerate(all_histograms.iterkeys()));
    if use_counter_indices:
        lower_bound = use_counter_indices[0][0]
        upper_bound = use_counter_indices[-1][0]
        n_counters = upper_bound - lower_bound + 1
        if n_counters != len(use_counter_indices):
            raise DefinitionException, "use counter histograms must be defined in a contiguous block"

    for (name, definition) in all_histograms.iteritems():
        yield Histogram(name, definition)
Ejemplo n.º 11
0
class LRUCache(object):

    def __init__(self, capacity):
        """
        :type capacity: int
        """
        self.dictionary = OrderedDict()
        self.capacity = capacity

    def get(self, key):
        """
        :rtype: int
        """
        if key in self.dictionary:
            temp = self.dictionary.pop(key)
            self.dictionary[key] = temp
            return temp
        else: return -1

    def set(self, key, value):
        """
        :type key: int
        :type value: int
        :rtype: nothing
        """
        if key in self.dictionary:
            self.dictionary.pop(key)()
            self.dictionary[key] = value
        else:
            if len(self.dictionary) >= self.capacity:
                self.dictionary.popitem(last=False)
                self.dictionary[key] = value
            else:
                self.dictionary[key] = value
Ejemplo n.º 12
0
    def __init__(self, logger=None):
        self.log = logger or dulog
        self.section  = None
        self.filename = None
        self.compiler_info = OrderedDict((
                ('mpicc'  , None),
                ('mpicxx' , None),
                ('mpifort', None),
                ('mpif77' , None),
                ('mpif90' , None),
                ('mpif08' , None),
                ('mpild'  , None),
                ))
        self.library_info = OrderedDict((
            ('define_macros'        , []),
            ('undef_macros'         , []),
            ('include_dirs'         , []),

            ('libraries'            , []),
            ('library_dirs'         , []),
            ('runtime_library_dirs' , []),

            ('extra_compile_args'   , []),
            ('extra_link_args'      , []),
            ('extra_objects'        , []),
            ))
Ejemplo n.º 13
0
def get_language_config(content_language=None):
    language = get_language()[:2]
    if content_language:
        content_language = content_language[:2]
    else:
        content_language = language

    config = {}
    config['language'] = language

    lang_names = OrderedDict()
    for lang, name in settings.LANGUAGES:
        if lang[:2] not in lang_names: lang_names[lang[:2]] = []
        lang_names[lang[:2]].append(_(name))
    sp_langs = []
    for lang, names in lang_names.items():
        if lang == content_language:
            default = '+'
        else:
            default = ''
        sp_langs.append(u'%s%s=%s' % (default, ' / '.join(names), lang))

    config['spellchecker_languages'] = ','.join(sp_langs)

    if content_language in settings.LANGUAGES_BIDI:
        config['directionality'] = 'rtl'
    else:
        config['directionality'] = 'ltr'

    if tinymce.settings.USE_SPELLCHECKER:
        config['spellchecker_rpc_url'] = reverse('tinymce.views.spell_check')

    return config
Ejemplo n.º 14
0
    def test_rename(self):
        data = create_test_data()
        newnames = {'var1': 'renamed_var1', 'dim2': 'renamed_dim2'}
        renamed = data.rename(newnames)

        variables = OrderedDict(data.variables)
        for k, v in newnames.iteritems():
            variables[v] = variables.pop(k)

        for k, v in variables.iteritems():
            dims = list(v.dimensions)
            for name, newname in newnames.iteritems():
                if name in dims:
                    dims[dims.index(name)] = newname

            self.assertVariableEqual(Variable(dims, v.values, v.attrs),
                                     renamed.variables[k])
            self.assertEqual(v.encoding, renamed.variables[k].encoding)
            self.assertEqual(type(v), type(renamed.variables[k]))

        self.assertTrue('var1' not in renamed.variables)
        self.assertTrue('dim2' not in renamed.variables)

        with self.assertRaisesRegexp(ValueError, "cannot rename 'not_a_var'"):
            data.rename({'not_a_var': 'nada'})

        # verify that we can rename a variable without accessing the data
        var1 = data['var1']
        data['var1'] = (var1.dimensions, InaccessibleArray(var1.values))
        renamed = data.rename(newnames)
        with self.assertRaises(UnexpectedDataAccess):
            renamed['renamed_var1'].values
Ejemplo n.º 15
0
    def setUp(self):
        couch_uri = 'http://ovh23.depp.it:5984'
        self.couch_server = couchdb.Server(couch_uri)

        self.norm_couch = self.couch_server['bilanci_voci']
        self.simple_couch = self.couch_server['bilanci_simple']

        self.entrate_sections = OrderedDict([
            ('Accertamenti', 0),
            ('Riscossioni in conto competenza', 1),
            ('Riscossioni in conto residui', 2),
        ])

        self.spese_sections = OrderedDict([
            ('Impegni', 0),
            ('Pagamenti in conto competenza', 1),
            ('Pagamenti in conto residui', 2),
        ])

        (self.year, self.city) = self.code.split("_")
        self.normalized_doc_id = "{}_{}".format(self.year, self.city)
        self.simplified_doc_id = self.city
        self.norm_doc = self.norm_couch[self.normalized_doc_id]
        self.simp_doc = self.simple_couch[self.simplified_doc_id]

        self.logger = logging.getLogger('test')
        self.logger.setLevel(logging.INFO)
Ejemplo n.º 16
0
def force_trace_widths(board):
	microstrip_layers = frozenset(('1_top', '6_bot'))
	stripline_layers = frozenset(('3_inner', '4_inner'))

	se_50_microstrip_width = '0.1778'
	se_50_stripline_width = '0.1651'

	diff_90_microstrip_width = '0.127'
	diff_90_stripline_width = '0.127'

	for element in board:
		if element[0] == 'segment':
			segment = OrderedDict([(v[0], v[1:]) for v in element[1:]])
			assert len(segment['net']) == 1
			net_name = net_by_number[int(segment['net'][0])]
			assert len(segment['layer']) == 1
			layer = segment['layer'][0]

			new_width = None
			if net_name in nets_by_net_class['50_se']:
				if layer in microstrip_layers:
					new_width = [se_50_microstrip_width]
				if layer in stripline_layers:
					new_width = [se_50_stripline_width]
			elif net_name in nets_by_net_class['90_diff']:
				if layer in microstrip_layers:
					new_width = [diff_90_microstrip_width]
				if layer in stripline_layers:
					new_width = [diff_90_stripline_width]

			if new_width:
				segment['width'] = new_width
				new_elements = [[a] + b for a, b in segment.items()]
				element[1:] = new_elements
Ejemplo n.º 17
0
    def parse_records(self, record):
        """
        Parses the record set
        """
        time_series = OrderedDict([
            ("X", {"Original": {}, "SDOF": {}}),
            ("Y", {"Original": {}, "SDOF": {}}),
            ("V", {"Original": {}, "SDOF": {}})])

        target_names = time_series.keys()
        for iloc, ifile in enumerate(self.input_files):
            if not os.path.exists(ifile):
                if iloc < 2:
                    # Expected horizontal component is missing - raise error
                    raise ValueError("Horizontal record %s is expected but "
                        "not found!" % ifile)
                else:
                    print "Vertical record file %s not found" % ifile
                    del time_series["V"]
                    continue
            else:
                time_series[target_names[iloc]]["Original"] = \
                    self._parse_time_history(ifile, record.xrecord.units)
        if iloc < 2:
            del time_series["V"]

        return time_series
Ejemplo n.º 18
0
class akmers :
  def __init__(self) :
    self.mers = OrderedDict()
    self.smers_set = set()
  #*******************************************
  def get_mers(self) :
    return set(self.mers.keys())
  #*******************************************
  def update_smer_set(self) :
    self.smers_set = set(self.mers.keys())
  #*******************************************
  def add_mer(self, mer, count) :
    self.mers[mer] = count
   #*******************************************
  def remove_mer(self, mer) :
    del self.mers[mer]
  #*******************************************
  def has_mers(self) :
    if len(self.mers) > 0 and max(self.mers.values()) > 1 :
      return True
    else :
      return False
  #*******************************************
  def get_count(self, mer) :
    return self.mers[mer]
Ejemplo n.º 19
0
    def test_mqtt_subscribes_topics_on_connect(self):
        """Test subscription to topic on connect."""
        from collections import OrderedDict
        prev_topics = OrderedDict()
        prev_topics['topic/test'] = 1,
        prev_topics['home/sensor'] = 2,
        prev_topics['still/pending'] = None

        mqtt.MQTT_CLIENT.topics = prev_topics
        mqtt.MQTT_CLIENT.progress = {1: 'still/pending'}
        # Return values for subscribe calls (rc, mid)
        mqtt.MQTT_CLIENT._mqttc.subscribe.side_effect = ((0, 2), (0, 3))
        mqtt.MQTT_CLIENT._mqtt_on_connect(None, None, 0, 0)
        self.assertFalse(mqtt.MQTT_CLIENT._mqttc.disconnect.called)

        expected = [(topic, qos) for topic, qos in prev_topics.items()
                    if qos is not None]
        self.assertEqual(
            expected,
            [call[1] for call in mqtt.MQTT_CLIENT._mqttc.subscribe.mock_calls])
        self.assertEqual({
            1: 'still/pending',
            2: 'topic/test',
            3: 'home/sensor',
        }, mqtt.MQTT_CLIENT.progress)
Ejemplo n.º 20
0
 def insert(self,key,value,index=None,akey=None,bkey=None):
     tmp1=OrderedDict()
     tmp2=OrderedDict()
     if ((index is not None) and (isinstance(index, int))):
         if index<len(self.keys()):
             for i in self.iterkeys():
                 if self.indexofkey(i)<index:
                     tmp1[i]=self[i]
                 elif self.indexofkey(i)>=index:
                     tmp2[i]=self[i]
             self.clear()
             for i in tmp1.items():
                 self[i[0]]=i[1]
             self[key]=value
             for i in tmp2.items():
                 self[i[0]]=i[1]
             return self
         if index==len(self.keys()):
             self[key]=value
     if akey is not None:
         if akey in self.iterkeys():               
             self.insert(key,value,index=self.indexofkey(akey)+1)
         else:
             raise KeyError
     if bkey is not None:
         if bkey in self.iterkeys():
             self.insert(key, value, index=self.indexofkey(bkey))
         else:
             raise KeyError
Ejemplo n.º 21
0
class _SlideCache(object):
    def __init__(self, cache_size, dz_opts):
        self.cache_size = cache_size
        self.dz_opts = dz_opts
        self._lock = Lock()
        self._cache = OrderedDict()

    def get(self, path):
        with self._lock:
            if path in self._cache:
                # Move to end of LRU
                slide = self._cache.pop(path)
                self._cache[path] = slide
                return slide

        osr = OpenSlide(path)
        slide = DeepZoomGenerator(osr, **self.dz_opts)
        try:
            mpp_x = osr.properties[openslide.PROPERTY_NAME_MPP_X]
            mpp_y = osr.properties[openslide.PROPERTY_NAME_MPP_Y]
            slide.mpp = (float(mpp_x) + float(mpp_y)) / 2
        except (KeyError, ValueError):
            slide.mpp = 0

        with self._lock:
            if path not in self._cache:
                if len(self._cache) == self.cache_size:
                    self._cache.popitem(last=False)
                self._cache[path] = slide
        return slide
Ejemplo n.º 22
0
def get_fielddata(model_object, search_tables, field_information_filter=None, extra_properties=[]):
    """
    returns an ordered dict of field_name->{value:value,fieldinformation:fi}
    to be used to display the item in the UI Detail views
    extra_properties are non-standard getters that wouldn't normally be returned (restricted fields)
    """
    # dump(self.dataset)
    # data=model_to_dict(self.dataset)
    property_dict = get_properties(model_object)
    if len(extra_properties) > 0:
        for prop in extra_properties:
            property_dict[prop] = getattr(model_object, prop)
            logger.info(str(("got extra prop", prop, getattr(model_object, prop))))

    logger.debug(str(("property_dict", property_dict)))
    ui_dict = {}
    for field, value in property_dict.iteritems():
        logger.debug(str(("get_field_info", field)))
        details = {}
        try:
            fi = FieldInformation.manager.get_column_fieldinformation_by_priority(field, search_tables)

            if fi and (field_information_filter and field_information_filter(fi) or field_information_filter == None):
                details["fieldinformation"] = fi
                details["value"] = value
                ui_dict[field] = details
                # ui_dict[fi.get_verbose_name()] = value
            else:
                logger.debug(str(("field not shown in this view: ", field, value)))
        except (ObjectDoesNotExist, MultipleObjectsReturned, Exception) as e:
            logger.debug(str(("no field information defined for: ", field, value)))
    ui_dict = OrderedDict(sorted(ui_dict.items(), key=lambda x: x[1]["fieldinformation"].order))
    if logger.isEnabledFor(logging.DEBUG):
        logger.debug(str(("ui_dict", ui_dict)))
    return ui_dict
Ejemplo n.º 23
0
    def getDependenciesRecursive(self,
                 available_components = None,
                            processed = None,
                          search_dirs = None,
                               target = None,
                       available_only = False,
                                 test = False
        ):
        ''' Get available and already installed components, don't check for
            remotely available components. See also
            satisfyDependenciesRecursive()

            Returns {component_name:component}
        '''
        components, errors = self.__getDependenciesRecursiveWithProvider(
           available_components = available_components,
                    search_dirs = search_dirs,
                         target = target,
                 traverse_links = True,
               update_installed = False,
                       provider = self.provideInstalled,
                           test = test
        )
        for error in errors:
            logger.error(error)
        if available_only:
            components = OrderedDict((k, v) for k, v in components.items() if v)
        return components
Ejemplo n.º 24
0
def getDbSeqRecord(db_record, id_field, seq_field, meta_fields=None, 
                   delimiter=default_delimiter):
    """
    Parses a database record into a SeqRecord

    Arguments: 
    db_record = a dictionary containing a database record
    id_field = the field containing identifiers
    seq_field = the field containing sequences
    meta_fields = a list of fields to add to sequence annotations
    delimiter = a tuple of delimiters for (fields, values, value lists) 

    Returns: 
    a SeqRecord
    """
    # Return None if ID or sequence fields are empty
    if not db_record[id_field] or not db_record[seq_field]:
        return None
    
    # Create description string
    desc_dict = OrderedDict([('ID', db_record[id_field])])
    if meta_fields is not None:
        desc_dict.update([(f, db_record[f]) for f in meta_fields if f in db_record]) 
    desc_str = flattenAnnotation(desc_dict, delimiter=delimiter)
    
    # Create SeqRecord
    seq_record = SeqRecord(Seq(db_record[seq_field], IUPAC.ambiguous_dna),
                           id=desc_str, name=desc_str, description='')
        
    return seq_record
Ejemplo n.º 25
0
 def __init__(self, raw):
     self._raw = raw
     self.file_version = 0
     self.elements_count = 0
     self._elements_map = OrderedDict()
     self._elements_index = OrderedDict()
     self._jpg_map = OrderedDict()
Ejemplo n.º 26
0
def assert_no_duplicate_frameworks(state):
    """A function which asserts that there are no duplicate frameworks running, where
    frameworks are identified by their name.

    Note the extra spaces in the output strings: this is to account for the extra indentation
    we add, so we can have:

        frameworks:
          framework: marathon count: 1

    :param state: the state info from the Mesos master
    :returns: a tuple containing (output, ok): output is a log of the state of frameworks, ok a boolean
        indicating if there are any duplicate frameworks.
    """
    frameworks = state['frameworks']
    framework_counts = OrderedDict(sorted(Counter([fw['name'] for fw in frameworks]).items()))
    output = ["frameworks:"]
    ok = True

    for framework, count in framework_counts.iteritems():
        if count > 1:
            ok = False
            output.append(PaastaColors.red(
                          "    CRITICAL: Framework %s has %d instances running--expected no more than 1."
                          % (framework, count)))
        else:
            output.append("    framework: %s count: %d" % (framework, count))
    return (("\n").join(output), ok)
def test():
    from collections import OrderedDict as StdlibOrderedDict

    ordered_dict = OrderedDict(((1, 'a'), (2, 'b'), (3, 'c')))
    stdlib_ordered_dict = StdlibOrderedDict(((1, 'a'), (2, 'b'), (3, 'c')))
    
    assert ordered_dict == stdlib_ordered_dict
    assert stdlib_ordered_dict == ordered_dict
    assert ordered_dict.items() == stdlib_ordered_dict.items()
    assert ordered_dict.keys() == stdlib_ordered_dict.keys()
    assert ordered_dict.values() == stdlib_ordered_dict.values()
    
    ordered_dict.move_to_end(1)
    
    assert ordered_dict != stdlib_ordered_dict
    #assert stdlib_ordered_dict != ordered_dict
    assert ordered_dict.items() != stdlib_ordered_dict.items()
    assert ordered_dict.keys() != stdlib_ordered_dict.keys()
    assert ordered_dict.values() != stdlib_ordered_dict.values()
    
    del stdlib_ordered_dict[1]
    stdlib_ordered_dict[1] = 'a'
    
    assert ordered_dict == stdlib_ordered_dict
    assert stdlib_ordered_dict == ordered_dict
    assert ordered_dict.items() == stdlib_ordered_dict.items()
    assert ordered_dict.keys() == stdlib_ordered_dict.keys()
    assert ordered_dict.values() == stdlib_ordered_dict.values()
    
    assert ordered_dict == OrderedDict(stdlib_ordered_dict) == \
                                                            stdlib_ordered_dict
    assert ordered_dict == StdlibOrderedDict(ordered_dict) == \
                                                            stdlib_ordered_dict
    
def find_unique_entries(node):
  """
  Find all uniquely named entries, without recursing through inner namespaces.

  Args:
    node: a Section or InnerNamespace instance

  Yields:
    A sequence of MergedEntry nodes representing an entry

  Remarks:
    This collapses multiple entries with the same fully qualified name into
    one entry (e.g. if there are multiple entries in different kinds).
  """
  if not isinstance(node, metadata_model.Section) and    \
     not isinstance(node, metadata_model.InnerNamespace):
      raise TypeError("expected node to be a Section or InnerNamespace")

  d = OrderedDict()
  # remove the 'kinds' from the path between sec and the closest entries
  # then search the immediate children of the search path
  search_path = isinstance(node, metadata_model.Section) and node.kinds \
                or [node]
  for i in search_path:
      for entry in i.entries:
          d[entry.name] = entry

  for k,v in d.iteritems():
      yield v.merge()
Ejemplo n.º 29
0
def list_releases(moniker=None, limit=0):
    """
    List currently available releases.

    Arguments:
    moniker -- kernel release moniker (e.g., mainline, stable, longterm, etc.).
    Defaults to everything
    limit -- maximum number of releases to list per moniker. Defaults to 0,
    which is no limit
    """

    releases = get_releases()

    if moniker is None:
        releases_by_moniker = OrderedDict()

        for release in releases['releases']:
            releases_by_moniker.setdefault(release['moniker'], []).append(release)

        first = True
        for moniker, r in releases_by_moniker.items():
            if not first:
                print()
            first = False

            print('%s:' % moniker)
            for release in r[:limit] if limit > 0 else r:
                print(release['version'])
    else:
        r = get_releases_by_moniker(releases, moniker)
        for release in r[:limit] if limit > 0 else r:
            print(release['version'])
Ejemplo n.º 30
0
 def getDependencies(self,
     available_components = None,
              search_dirs = None,
                   target = None,
           available_only = False,
                     test = False,
                 warnings = True
     ):
     ''' Returns {component_name:component}
     '''
     if search_dirs is None:
         search_dirs = [self.modulesPath()]
     available_components = self.ensureOrderedDict(available_components)
         
     components, errors = self.__getDependenciesWithProvider(
         available_components = available_components,
                  search_dirs = search_dirs,
                       target = target,
             update_installed = False,
                     provider = self.provideInstalled,
                         test = test
     )
     if warnings:
         for error in errors:
             logger.warning(error)
     if available_only:
         components = OrderedDict((k, v) for k, v in components.items() if v)
     return components
Ejemplo n.º 31
0
def augment_by_utterance_splitting(dataset, filename, denoise_only=False):
    """Performs utterance splitting and augments the dataset with new pseudo-samples whose utterances are
    one sentence long. The MR of each pseudo-sample contains only slots mentioned in the corresponding sentence.
    Assumes a CSV or JSON file as input.
    """

    if not filename.lower().endswith(('.csv', '.json')):
        raise ValueError('Unexpected file type. Please provide a CSV or JSON file as input.')

    mrs_dicts = []
    data_new = []

    print('Performing utterance splitting on ' + str(filename))

    # Read in the data
    data_cont = data_loader.init_test_data(os.path.join(config.DATA_DIR, dataset, filename))
    mrs, utterances = data_cont['data']
    _, _, slot_sep, val_sep, val_sep_end = data_cont['separators']

    for i, mr in enumerate(mrs):
        mr_dict = OrderedDict()

        # Extract the slot-value pairs into a dictionary
        for slot_value in mr.split(slot_sep):
            slot, _, _, value_orig = data_loader.parse_slot_and_value(slot_value, val_sep, val_sep_end)
            mr_dict[slot] = value_orig

        mrs_dicts.append(mr_dict)

    new_mrs, new_utterances = split_content(mrs_dicts, utterances, filename, permute=False, denoise_only=denoise_only)

    suffix = ' [' + ('denoised' if denoise_only else 'utt. split') + ']'
    filename_out = os.path.splitext(filename)[0] + suffix + os.path.splitext(filename)[1]

    if filename.lower().endswith('.csv'):
        for row, mr in enumerate(new_mrs):
            if len(mr) == 0:
                continue

            mr_str = ', '.join(['{0}[{1}]'.format(slot, value) for slot, value in mr.items()])

            data_new.append([mr_str, new_utterances[row]])

        # Write the augmented dataset to a new file
        pd.DataFrame(data_new).to_csv(os.path.join(config.DATA_DIR, dataset, filename_out),
                                      header=['mr', 'ref'],
                                      index=False,
                                      encoding='utf8')
    elif filename.lower().endswith('.json'):
        for row, mr in enumerate(new_mrs):
            if len(mr) == 0:
                continue

            mr_str = mr.pop('da')
            mr_str += '(' + slot_sep.join(
                ['{0}{1}{2}'.format(key.rstrip(string.digits), val_sep, value) for key, value in mr.items()]
            ) + ')'

            data_new.append([mr_str, new_utterances[row]])

        # Write the augmented dataset to a new file
        with io.open(os.path.join(config.DATA_DIR, dataset, filename_out), 'w', encoding='utf8') as f_data_new:
            json.dump(data_new, f_data_new, indent=4)
Ejemplo n.º 32
0
        for instance in tqdm.tqdm(indexed_instances):
            instance.pad(self.model._get_max_lengths())  # pylint: disable=protected-access
        query_arrays = numpy.asarray([instance.as_training_data()[0][0] for instance in indexed_instances])
        logger.info("Getting query vectors")
        return self.query_encoder_model.predict(query_arrays)

    @overrides
    def encode_passages(self, passages: List[str]) -> List[numpy.array]:
        grouped_passages = util.group_by_count(passages, self.model.num_sentences, '')
        passage_instances = [SentenceSelectionInstance('', passage_group, None)
                             for passage_group in grouped_passages]
        logger.info("Indexing passages")
        indexed_instances = [instance.to_indexed_instance(self.model.data_indexer)
                             for instance in tqdm.tqdm(passage_instances)]
        logger.info("Padding passages")
        for instance in tqdm.tqdm(indexed_instances):
            instance.pad(self.model._get_max_lengths())  # pylint: disable=protected-access
        grouped_passage_arrays = numpy.asarray([instance.as_training_data()[0][1]
                                                for instance in indexed_instances])
        logger.info("Getting passage vectors")
        grouped_passage_vectors = self.passage_encoder_model.predict(grouped_passage_arrays)
        shape = grouped_passage_vectors.shape
        new_shape = (shape[0] * shape[1], shape[2])
        passage_vectors = grouped_passage_vectors.reshape(new_shape)
        return passage_vectors[:len(passages)]


retrieval_encoders = OrderedDict()  # pylint:  disable=invalid-name
retrieval_encoders['bow'] = BagOfWordsRetrievalEncoder
retrieval_encoders['sentence selection'] = SentenceSelectionRetrievalEncoder
Ejemplo n.º 33
0
def train_one_epoch(epoch,
                    model,
                    loader,
                    optimizer,
                    loss_fn,
                    args,
                    lr_scheduler=None,
                    saver=None,
                    output_dir='',
                    amp_autocast=suppress,
                    loss_scaler=None,
                    model_ema=None,
                    mixup_fn=None):

    if args.mixup_off_epoch and epoch >= args.mixup_off_epoch:
        if args.prefetcher and loader.mixup_enabled:
            loader.mixup_enabled = False
        elif mixup_fn is not None:
            mixup_fn.mixup_enabled = False

    second_order = hasattr(optimizer,
                           'is_second_order') and optimizer.is_second_order
    batch_time_m = AverageMeter()
    data_time_m = AverageMeter()
    losses_m = AverageMeter()

    model.train()

    end = time.time()
    last_idx = len(loader) - 1
    num_updates = epoch * len(loader)
    for batch_idx, (input, target) in enumerate(loader):
        last_batch = batch_idx == last_idx
        data_time_m.update(time.time() - end)
        if not args.prefetcher:
            input, target = input.cuda(), target.cuda()
            if mixup_fn is not None:
                input, target = mixup_fn(input, target)
        if args.channels_last:
            input = input.contiguous(memory_format=torch.channels_last)

        with amp_autocast():
            output = model(input)
            loss = loss_fn(output, target)

        if not args.distributed:
            losses_m.update(loss.item(), input.size(0))

        optimizer.zero_grad()
        if loss_scaler is not None:
            loss_scaler(loss,
                        optimizer,
                        clip_grad=args.clip_grad,
                        clip_mode=args.clip_mode,
                        parameters=model_parameters(model,
                                                    exclude_head='agc'
                                                    in args.clip_mode),
                        create_graph=second_order)
        else:
            loss.backward(create_graph=second_order)
            if args.clip_grad is not None:
                dispatch_clip_grad(model_parameters(model,
                                                    exclude_head='agc'
                                                    in args.clip_mode),
                                   value=args.clip_grad,
                                   mode=args.clip_mode)
            optimizer.step()

        if model_ema is not None:
            model_ema.update(model)

        torch.cuda.synchronize()
        num_updates += 1
        batch_time_m.update(time.time() - end)
        if last_batch or batch_idx % args.log_interval == 0:
            lrl = [param_group['lr'] for param_group in optimizer.param_groups]
            lr = sum(lrl) / len(lrl)

            if args.distributed:
                reduced_loss = reduce_tensor(loss.data, args.world_size)
                losses_m.update(reduced_loss.item(), input.size(0))

            if args.local_rank == 0:
                _logger.info(
                    'Train: {} [{:>4d}/{} ({:>3.0f}%)]  '
                    'Loss: {loss.val:>9.6f} ({loss.avg:>6.4f})  '
                    'Time: {batch_time.val:.3f}s, {rate:>7.2f}/s  '
                    '({batch_time.avg:.3f}s, {rate_avg:>7.2f}/s)  '
                    'LR: {lr:.3e}  '
                    'Data: {data_time.val:.3f} ({data_time.avg:.3f})'.format(
                        epoch,
                        batch_idx,
                        len(loader),
                        100. * batch_idx / last_idx,
                        loss=losses_m,
                        batch_time=batch_time_m,
                        rate=input.size(0) * args.world_size /
                        batch_time_m.val,
                        rate_avg=input.size(0) * args.world_size /
                        batch_time_m.avg,
                        lr=lr,
                        data_time=data_time_m))

                if args.save_images and output_dir:
                    torchvision.utils.save_image(
                        input,
                        os.path.join(output_dir,
                                     'train-batch-%d.jpg' % batch_idx),
                        padding=0,
                        normalize=True)

        if saver is not None and args.recovery_interval and (
                last_batch or (batch_idx + 1) % args.recovery_interval == 0):
            saver.save_recovery(epoch, batch_idx=batch_idx)

        if lr_scheduler is not None:
            lr_scheduler.step_update(num_updates=num_updates,
                                     metric=losses_m.avg)

        end = time.time()
        # end for

    if hasattr(optimizer, 'sync_lookahead'):
        optimizer.sync_lookahead()

    return OrderedDict([('loss', losses_m.avg)])
Ejemplo n.º 34
0
from collections import OrderedDict
import pytest
import numpy
import torch
from functools import partial
import traceback
import io

import syft
from syft.serde import protobuf
from test.serde.serde_helpers import *

# Dictionary containing test samples functions
samples = OrderedDict()

# Native
samples[type(None)] = make_none


def test_serde_coverage():
    """Checks all types in serde are tested"""
    for cls, _ in protobuf.serde.bufferizers.items():
        has_sample = cls in samples
        assert has_sample is True, "Serde for %s is not tested" % cls


@pytest.mark.parametrize("cls", samples)
def test_serde_roundtrip_protobuf(cls, workers):
    """Checks that values passed through serialization-deserialization stay same"""
    _samples = samples[cls](workers=workers)
    for sample in _samples:
Ejemplo n.º 35
0
    def __init__(self, db, book_id_map, parent=None):
        from calibre.ebooks.oeb.polish.main import HELP
        QDialog.__init__(self, parent)
        self.db, self.book_id_map = weakref.ref(db), book_id_map
        self.setWindowIcon(QIcon(I('polish.png')))
        title = _('Polish book')
        if len(book_id_map) > 1:
            title = _('Polish %d books')%len(book_id_map)
        self.setWindowTitle(title)

        self.help_text = {
            'polish': _('<h3>About Polishing books</h3>%s')%HELP['about'].format(
                _('''<p>If you have both EPUB and ORIGINAL_EPUB in your book,
                  then polishing will run on ORIGINAL_EPUB (the same for other
                  ORIGINAL_* formats).  So if you
                  want Polishing to not run on the ORIGINAL_* format, delete the
                  ORIGINAL_* format before running it.</p>''')
            ),

            'embed':_('<h3>Embed referenced fonts</h3>%s')%HELP['embed'],
            'subset':_('<h3>Subsetting fonts</h3>%s')%HELP['subset'],

            'smarten_punctuation':
            _('<h3>Smarten punctuation</h3>%s')%HELP['smarten_punctuation'],

            'metadata':_('<h3>Updating metadata</h3>'
                         '<p>This will update all metadata <i>except</i> the cover in the'
                         ' e-book files to match the current metadata in the'
                         ' calibre library.</p>'
                         ' <p>Note that most e-book'
                         ' formats are not capable of supporting all the'
                         ' metadata in calibre.</p><p>There is a separate option to'
                         ' update the cover.</p>'),
            'do_cover': _('<h3>Update cover</h3><p>Update the covers in the e-book files to match the'
                        ' current cover in the calibre library.</p>'
                        '<p>If the e-book file does not have'
                        ' an identifiable cover, a new cover is inserted.</p>'
                        ),
            'jacket':_('<h3>Book Jacket</h3>%s')%HELP['jacket'],
            'remove_jacket':_('<h3>Remove Book Jacket</h3>%s')%HELP['remove_jacket'],
            'remove_unused_css':_('<h3>Remove unused CSS rules</h3>%s')%HELP['remove_unused_css'],
            'compress_images': _('<h3>Losslessly compress images</h3>%s') % HELP['compress_images'],
        }

        self.l = l = QGridLayout()
        self.setLayout(l)

        self.la = la = QLabel('<b>'+_('Select actions to perform:'))
        l.addWidget(la, 0, 0, 1, 2)

        count = 0
        self.all_actions = OrderedDict([
            ('embed', _('&Embed all referenced fonts')),
            ('subset', _('&Subset all embedded fonts')),
            ('smarten_punctuation', _('Smarten &punctuation')),
            ('metadata', _('Update &metadata in the book files')),
            ('do_cover', _('Update the &cover in the book files')),
            ('jacket', _('Add/replace metadata as a "book &jacket" page')),
            ('remove_jacket', _('&Remove a previously inserted book jacket')),
            ('remove_unused_css', _('Remove &unused CSS rules from the book')),
            ('compress_images', _('Losslessly compress images')),
        ])
        prefs = gprefs.get('polishing_settings', {})
        for name, text in self.all_actions.iteritems():
            count += 1
            x = QCheckBox(text, self)
            x.setChecked(prefs.get(name, False))
            x.stateChanged.connect(partial(self.option_toggled, name))
            l.addWidget(x, count, 0, 1, 1)
            setattr(self, 'opt_'+name, x)
            la = QLabel(' <a href="#%s">%s</a>'%(name, _('About')))
            setattr(self, 'label_'+name, x)
            la.linkActivated.connect(self.help_link_activated)
            l.addWidget(la, count, 1, 1, 1)

        count += 1
        l.addItem(QSpacerItem(10, 10, vPolicy=QSizePolicy.Expanding), count, 1, 1, 2)

        la = self.help_label = QLabel('')
        self.help_link_activated('#polish')
        la.setWordWrap(True)
        la.setTextFormat(Qt.RichText)
        la.setFrameShape(QFrame.StyledPanel)
        la.setAlignment(Qt.AlignLeft|Qt.AlignTop)
        la.setLineWidth(2)
        la.setStyleSheet('QLabel { margin-left: 75px }')
        l.addWidget(la, 0, 2, count+1, 1)
        l.setColumnStretch(2, 1)

        self.show_reports = sr = QCheckBox(_('Show &report'), self)
        sr.setChecked(gprefs.get('polish_show_reports', True))
        sr.setToolTip(textwrap.fill(_('Show a report of all the actions performed'
                        ' after polishing is completed')))
        l.addWidget(sr, count+1, 0, 1, 1)
        self.bb = bb = QDialogButtonBox(QDialogButtonBox.Ok|QDialogButtonBox.Cancel)
        bb.accepted.connect(self.accept)
        bb.rejected.connect(self.reject)
        self.save_button = sb = bb.addButton(_('&Save Settings'), bb.ActionRole)
        sb.clicked.connect(self.save_settings)
        self.load_button = lb = bb.addButton(_('&Load Settings'), bb.ActionRole)
        self.load_menu = QMenu(lb)
        lb.setMenu(self.load_menu)
        self.all_button = b = bb.addButton(_('Select &all'), bb.ActionRole)
        b.clicked.connect(partial(self.select_all, True))
        self.none_button = b = bb.addButton(_('Select &none'), bb.ActionRole)
        b.clicked.connect(partial(self.select_all, False))
        l.addWidget(bb, count+1, 1, 1, -1)
        self.setup_load_button()

        self.resize(QSize(950, 600))
Ejemplo n.º 36
0
class Polish(QDialog):  # {{{

    def __init__(self, db, book_id_map, parent=None):
        from calibre.ebooks.oeb.polish.main import HELP
        QDialog.__init__(self, parent)
        self.db, self.book_id_map = weakref.ref(db), book_id_map
        self.setWindowIcon(QIcon(I('polish.png')))
        title = _('Polish book')
        if len(book_id_map) > 1:
            title = _('Polish %d books')%len(book_id_map)
        self.setWindowTitle(title)

        self.help_text = {
            'polish': _('<h3>About Polishing books</h3>%s')%HELP['about'].format(
                _('''<p>If you have both EPUB and ORIGINAL_EPUB in your book,
                  then polishing will run on ORIGINAL_EPUB (the same for other
                  ORIGINAL_* formats).  So if you
                  want Polishing to not run on the ORIGINAL_* format, delete the
                  ORIGINAL_* format before running it.</p>''')
            ),

            'embed':_('<h3>Embed referenced fonts</h3>%s')%HELP['embed'],
            'subset':_('<h3>Subsetting fonts</h3>%s')%HELP['subset'],

            'smarten_punctuation':
            _('<h3>Smarten punctuation</h3>%s')%HELP['smarten_punctuation'],

            'metadata':_('<h3>Updating metadata</h3>'
                         '<p>This will update all metadata <i>except</i> the cover in the'
                         ' e-book files to match the current metadata in the'
                         ' calibre library.</p>'
                         ' <p>Note that most e-book'
                         ' formats are not capable of supporting all the'
                         ' metadata in calibre.</p><p>There is a separate option to'
                         ' update the cover.</p>'),
            'do_cover': _('<h3>Update cover</h3><p>Update the covers in the e-book files to match the'
                        ' current cover in the calibre library.</p>'
                        '<p>If the e-book file does not have'
                        ' an identifiable cover, a new cover is inserted.</p>'
                        ),
            'jacket':_('<h3>Book Jacket</h3>%s')%HELP['jacket'],
            'remove_jacket':_('<h3>Remove Book Jacket</h3>%s')%HELP['remove_jacket'],
            'remove_unused_css':_('<h3>Remove unused CSS rules</h3>%s')%HELP['remove_unused_css'],
            'compress_images': _('<h3>Losslessly compress images</h3>%s') % HELP['compress_images'],
        }

        self.l = l = QGridLayout()
        self.setLayout(l)

        self.la = la = QLabel('<b>'+_('Select actions to perform:'))
        l.addWidget(la, 0, 0, 1, 2)

        count = 0
        self.all_actions = OrderedDict([
            ('embed', _('&Embed all referenced fonts')),
            ('subset', _('&Subset all embedded fonts')),
            ('smarten_punctuation', _('Smarten &punctuation')),
            ('metadata', _('Update &metadata in the book files')),
            ('do_cover', _('Update the &cover in the book files')),
            ('jacket', _('Add/replace metadata as a "book &jacket" page')),
            ('remove_jacket', _('&Remove a previously inserted book jacket')),
            ('remove_unused_css', _('Remove &unused CSS rules from the book')),
            ('compress_images', _('Losslessly compress images')),
        ])
        prefs = gprefs.get('polishing_settings', {})
        for name, text in self.all_actions.iteritems():
            count += 1
            x = QCheckBox(text, self)
            x.setChecked(prefs.get(name, False))
            x.stateChanged.connect(partial(self.option_toggled, name))
            l.addWidget(x, count, 0, 1, 1)
            setattr(self, 'opt_'+name, x)
            la = QLabel(' <a href="#%s">%s</a>'%(name, _('About')))
            setattr(self, 'label_'+name, x)
            la.linkActivated.connect(self.help_link_activated)
            l.addWidget(la, count, 1, 1, 1)

        count += 1
        l.addItem(QSpacerItem(10, 10, vPolicy=QSizePolicy.Expanding), count, 1, 1, 2)

        la = self.help_label = QLabel('')
        self.help_link_activated('#polish')
        la.setWordWrap(True)
        la.setTextFormat(Qt.RichText)
        la.setFrameShape(QFrame.StyledPanel)
        la.setAlignment(Qt.AlignLeft|Qt.AlignTop)
        la.setLineWidth(2)
        la.setStyleSheet('QLabel { margin-left: 75px }')
        l.addWidget(la, 0, 2, count+1, 1)
        l.setColumnStretch(2, 1)

        self.show_reports = sr = QCheckBox(_('Show &report'), self)
        sr.setChecked(gprefs.get('polish_show_reports', True))
        sr.setToolTip(textwrap.fill(_('Show a report of all the actions performed'
                        ' after polishing is completed')))
        l.addWidget(sr, count+1, 0, 1, 1)
        self.bb = bb = QDialogButtonBox(QDialogButtonBox.Ok|QDialogButtonBox.Cancel)
        bb.accepted.connect(self.accept)
        bb.rejected.connect(self.reject)
        self.save_button = sb = bb.addButton(_('&Save Settings'), bb.ActionRole)
        sb.clicked.connect(self.save_settings)
        self.load_button = lb = bb.addButton(_('&Load Settings'), bb.ActionRole)
        self.load_menu = QMenu(lb)
        lb.setMenu(self.load_menu)
        self.all_button = b = bb.addButton(_('Select &all'), bb.ActionRole)
        b.clicked.connect(partial(self.select_all, True))
        self.none_button = b = bb.addButton(_('Select &none'), bb.ActionRole)
        b.clicked.connect(partial(self.select_all, False))
        l.addWidget(bb, count+1, 1, 1, -1)
        self.setup_load_button()

        self.resize(QSize(950, 600))

    def select_all(self, enable):
        for action in self.all_actions:
            x = getattr(self, 'opt_'+action)
            x.blockSignals(True)
            x.setChecked(enable)
            x.blockSignals(False)

    def save_settings(self):
        if not self.something_selected:
            return error_dialog(self, _('No actions selected'),
                _('You must select at least one action before saving'),
                                show=True)
        name, ok = QInputDialog.getText(self, _('Choose name'),
                _('Choose a name for these settings'))
        if ok:
            name = unicode(name).strip()
            if name:
                settings = {ac:getattr(self, 'opt_'+ac).isChecked() for ac in
                            self.all_actions}
                saved = gprefs.get('polish_settings', {})
                saved[name] = settings
                gprefs.set('polish_settings', saved)
                self.setup_load_button()

    def setup_load_button(self):
        saved = gprefs.get('polish_settings', {})
        m = self.load_menu
        m.clear()
        self.__actions = []
        a = self.__actions.append
        for name in sorted(saved):
            a(m.addAction(name, partial(self.load_settings, name)))
        m.addSeparator()
        a(m.addAction(_('Remove saved settings'), self.clear_settings))
        self.load_button.setEnabled(bool(saved))

    def clear_settings(self):
        gprefs.set('polish_settings', {})
        self.setup_load_button()

    def load_settings(self, name):
        saved = gprefs.get('polish_settings', {}).get(name, {})
        for action in self.all_actions:
            checked = saved.get(action, False)
            x = getattr(self, 'opt_'+action)
            x.blockSignals(True)
            x.setChecked(checked)
            x.blockSignals(False)

    def option_toggled(self, name, state):
        if state == Qt.Checked:
            self.help_label.setText(self.help_text[name])

    def help_link_activated(self, link):
        link = unicode(link)[1:]
        self.help_label.setText(self.help_text[link])

    @property
    def something_selected(self):
        for action in self.all_actions:
            if getattr(self, 'opt_'+action).isChecked():
                return True
        return False

    def accept(self):
        self.actions = ac = {}
        saved_prefs = {}
        gprefs['polish_show_reports'] = bool(self.show_reports.isChecked())
        something = False
        for action in self.all_actions:
            ac[action] = saved_prefs[action] = bool(getattr(self, 'opt_'+action).isChecked())
            if ac[action]:
                something = True
        if ac['jacket'] and not ac['metadata']:
            if not question_dialog(self, _('Must update metadata'),
                _('You have selected the option to add metadata as '
                  'a "book jacket". For this option to work, you '
                  'must also select the option to update metadata in'
                  ' the book files. Do you want to select it?')):
                return
            ac['metadata'] = saved_prefs['metadata'] = True
            self.opt_metadata.setChecked(True)
        if ac['jacket'] and ac['remove_jacket']:
            if not question_dialog(self, _('Add or remove jacket?'), _(
                    'You have chosen to both add and remove the metadata jacket.'
                    ' This will result in the final book having no jacket. Is this'
                    ' what you want?')):
                return
        if not something:
            return error_dialog(self, _('No actions selected'),
                _('You must select at least one action, or click Cancel.'),
                                show=True)
        gprefs['polishing_settings'] = saved_prefs
        self.queue_files()
        return super(Polish, self).accept()

    def queue_files(self):
        self.tdir = PersistentTemporaryDirectory('_queue_polish')
        self.jobs = []
        if len(self.book_id_map) <= 5:
            for i, (book_id, formats) in enumerate(self.book_id_map.iteritems()):
                self.do_book(i+1, book_id, formats)
        else:
            self.queue = [(i+1, id_) for i, id_ in enumerate(self.book_id_map)]
            self.pd = ProgressDialog(_('Queueing books for polishing'),
                                     max=len(self.queue), parent=self)
            QTimer.singleShot(0, self.do_one)
            self.pd.exec_()

    def do_one(self):
        if not self.queue:
            self.pd.accept()
            return
        if self.pd.canceled:
            self.jobs = []
            self.pd.reject()
            return
        num, book_id = self.queue.pop()
        try:
            self.do_book(num, book_id, self.book_id_map[book_id])
        except:
            self.pd.reject()
            raise
        else:
            self.pd.set_value(num)
            QTimer.singleShot(0, self.do_one)

    def do_book(self, num, book_id, formats):
        base = os.path.join(self.tdir, unicode(book_id))
        os.mkdir(base)
        db = self.db()
        opf = os.path.join(base, 'metadata.opf')
        with open(opf, 'wb') as opf_file:
            mi = create_opf_file(db, book_id, opf_file=opf_file)[0]
        data = {'opf':opf, 'files':[]}
        for action in self.actions:
            data[action] = bool(getattr(self, 'opt_'+action).isChecked())
        cover = os.path.join(base, 'cover.jpg')
        if db.copy_cover_to(book_id, cover, index_is_id=True):
            data['cover'] = cover
        is_orig = {}
        for fmt in formats:
            ext = fmt.replace('ORIGINAL_', '').lower()
            is_orig[ext.upper()] = 'ORIGINAL_' in fmt
            with open(os.path.join(base, '%s.%s'%(book_id, ext)), 'wb') as f:
                db.copy_format_to(book_id, fmt, f, index_is_id=True)
                data['files'].append(f.name)

        desc = ngettext(_('Polish %s')%mi.title,
                        _('Polish book %(nums)s of %(tot)s (%(title)s)')%dict(
                            nums=num, tot=len(self.book_id_map),
                            title=mi.title), len(self.book_id_map))
        if hasattr(self, 'pd'):
            self.pd.set_msg(_('Queueing book %(nums)s of %(tot)s (%(title)s)')%dict(
                            nums=num, tot=len(self.book_id_map), title=mi.title))

        self.jobs.append((desc, data, book_id, base, is_orig))
Ejemplo n.º 37
0
    Colormap,
    Tuple[str, VispyColormap],
    Tuple[str, Colormap],
    Dict[str, VispyColormap],
    Dict[str, Colormap],
    Dict,
]


matplotlib_colormaps = _MATPLOTLIB_COLORMAP_NAMES = OrderedDict(
    viridis=trans._p('colormap', 'viridis'),
    magma=trans._p('colormap', 'magma'),
    inferno=trans._p('colormap', 'inferno'),
    plasma=trans._p('colormap', 'plasma'),
    gray=trans._p('colormap', 'gray'),
    gray_r=trans._p('colormap', 'gray r'),
    hsv=trans._p('colormap', 'hsv'),
    turbo=trans._p('colormap', 'turbo'),
    twilight=trans._p('colormap', 'twilight'),
    twilight_shifted=trans._p('colormap', 'twilight shifted'),
    gist_earth=trans._p('colormap', 'gist earth'),
    PiYG=trans._p('colormap', 'PiYG'),
)
_MATPLOTLIB_COLORMAP_NAMES_REVERSE = {
    v: k for k, v in matplotlib_colormaps.items()
}
_VISPY_COLORMAPS_ORIGINAL = _VCO = get_colormaps()
_VISPY_COLORMAPS_TRANSLATIONS = OrderedDict(
    autumn=(trans._p('colormap', 'autumn'), _VCO['autumn']),
    blues=(trans._p('colormap', 'blues'), _VCO['blues']),
    cool=(trans._p('colormap', 'cool'), _VCO['cool']),
    greens=(trans._p('colormap', 'greens'), _VCO['greens']),
Ejemplo n.º 38
0
def getEnvPerformance(env_generals, env_windows, env_doors, env_HBs, env_EF_Peris, A_EF, A_A, region):

    results = OrderedDict()
    
    ##### 面積関係の計算 #####
    
    # 外皮の表面積の合計(m2)
    a_env = get_A_env(  [ env_general.A for env_general in env_generals ]
                      + [ env_window.A  for env_window  in env_windows  ]
                      + [ env_door.A    for env_door    in env_doors    ], A_EF )
    results["外皮の面積の合計"] = a_env
    
    # 床面積の合計に対する外皮の部位の比
    r_env = get_r_env( a_env, A_A )
    results["床面積の合計に対する外皮の部位の比"] = r_env
    
    ##### UA値、Q'値の計算 #####

    # ( 面積, U値, 温度差係数 ) の配列を作成
    # 温度差係数は、隣接空間の種類と地域の区分から決定
    U_generals = [ (env_general.A, env_general.U, get_H(env_general.ASType, region) ) for env_general in env_generals ]
    U_windows  = [ (env_window.A,  env_window.U,  get_H(env_window.ASType, region)  ) for env_window  in env_windows  ]
    U_doors    = [ (env_door.A,    env_door.U,    get_H(env_door.ASType, region)    ) for env_door    in env_doors    ]
    results["U_generals"] = U_generals
    results["U_windows"] = U_windows
    results["U_doors"] = U_doors
    
    # ( 長さ, psi値, 温度差係数 ) の配列を作成
    # 温度差係数は、隣接空間の種類と地域の区分から決定
    psi_HBs = [ (env_HB.L, env_HB.psi, get_H(env_HB.ASType, region) ) for env_HB in env_HBs ]
    results["psi_HBs"] = psi_HBs
    
    # ( 長さ, psi値, 温度差係数 ) の配列を作成
    # 温度差係数は、隣接空間の種類と地域の区分から決定
    psi_EF_Peris = [ (env_EF_Peri.L, env_EF_Peri.psi, get_H(env_EF_Peri.ASType, region) ) for env_EF_Peri in env_EF_Peris ]
    results["psi_EF_Peris"] = psi_EF_Peris

    # UA値(W/m2K)
    UA = get_UA( U_generals + U_windows + U_doors , psi_HBs + psi_EF_Peris , a_env )
    results["UA値"] = UA
    
    # 熱損失係数(換気による熱損失を含まない) 
    Q_dash = get_Q_dash( float(UA), r_env )
    results["熱損失係数(換気による熱損失を含まない) "] = Q_dash
    
    ##### ηA値、m値の計算 (暖房期) #####

    # ( 面積, 日射熱取得率, 方位係数 ) の配列を作成
    eta_H_generals = [ (env_general.A, env_general.eta_H, get_nu_H(region)[env_general.direction] ) for env_general in env_generals ]
    eta_H_windows  = [ (env_window.A,  env_window.eta_H,  get_nu_H(region)[env_window.direction ] ) for env_window  in env_windows  ]
    eta_H_doors    = [ (env_door.A,    env_door.eta_H,    get_nu_H(region)[env_door.direction   ] ) for env_door    in env_doors    ]
    results["eta_H_generals"] = eta_H_generals
    results["eta_H_windows"] = eta_H_windows
    results["eta_H_doors"] = eta_H_doors

    # ( 面積, 熱橋の日射熱取得率, 方位係数1, 方位係数2 ) の配列を作成
    eta_H_dash_HBs = [ (env_HB.L, env_HB.eta_H_dash, get_nu_H(region)[env_HB.direction1], get_nu_H(region)[env_HB.direction2] ) for env_HB in env_HBs ]
    results["eta_H_dash_HBs"] = eta_H_dash_HBs

    # ηAH値(-)
    eta_A_H = get_eta_A_H(eta_H_generals + eta_H_windows + eta_H_doors , eta_H_dash_HBs, a_env)
    results["eta_A_H"] = eta_A_H
    
    # μH値(-)
    mu_H = get_Mu_H(float(eta_A_H), r_env)
    results["mu_H"] = mu_H
    
    ##### ηA値、m値の計算 (冷房期) #####

    # ( 面積, 日射熱取得率, 方位係数 ) の配列を作成
    eta_C_generals = [ (env_general.A, env_general.eta_C, get_nu_C(region)[env_general.direction] ) for env_general in env_generals ]
    eta_C_windows  = [ (env_window.A,  env_window.eta_C,  get_nu_C(region)[env_window.direction]  ) for env_window  in env_windows  ]
    eta_C_doors    = [ (env_door.A,    env_door.eta_C,    get_nu_C(region)[env_door.direction]    ) for env_door    in env_doors    ]
    results["eta_C_generals"] = eta_C_generals
    results["eta_C_windows"] = eta_C_windows
    results["eta_C_doors"] = eta_C_doors
    
    # ( 面積, 熱橋の日射熱取得率, 方位係数1, 方位係数2 ) の配列を作成
    eta_C_dash_HBs = [ (env_HB.L, env_HB.eta_C_dash, get_nu_C(region)[env_HB.direction1], get_nu_C(region)[env_HB.direction2] ) for env_HB in env_HBs ]
    results["eta_C_dash_HBs"] = eta_C_dash_HBs    

    # ηAC値(-)
    eta_A_C = get_eta_A_C(eta_C_generals + eta_C_windows + eta_C_doors , eta_C_dash_HBs, a_env)
    results["eta_A_C"] = eta_A_C

    # mC値(-)
    mu_C = get_Mu_C(float(eta_A_C), r_env)
    results["mu_C"] = mu_C
    
    return results
Ejemplo n.º 39
0
model = models.alexnet(pretrained=True)
print(model)

# update model for our uses

# freezing parameters so they aren't updated
for param in model.parameters():
    param.requires_grad = False

# building new classifier

classifier = nn.Sequential(
    OrderedDict([('fc1', nn.Linear(9216, 2048)), ('relu', nn.ReLU()),
                 ('dropout', nn.Dropout(p=0.5)), ('fc2', nn.Linear(2048, 512)),
                 ('relu', nn.ReLU()), ('dropout', nn.Dropout(p=0.2)),
                 ('fc3', nn.Linear(512, 128)), ('relu', nn.ReLU()),
                 ('dropout', nn.Dropout(p=0.2)), ('fc4', nn.Linear(128, 26)),
                 ('output', nn.LogSoftmax(dim=1))]))

model.classifier = classifier

if train_on_gpu:
    model.cuda()

# loss function and optimizer

criterion = nn.CrossEntropyLoss()
optimizer = optim.SGD(model.parameters(), lr=0.001)

# train the model
Ejemplo n.º 40
0
def simpleEnvPerformanceSelected(house_type, region,
                                 U_roof, U_wall, U_door, U_wnd, U_floor, U_base, U_base_d, psi_prm, psi_prm_d,
                                 eta_d_H, eta_d_C, f_H, f_C, is_f_value_default, default_f_H, default_f_C):
    # house_type: 住宅の種類(床断熱住宅・基礎断熱住宅・床断熱基礎断熱併用住宅)
    # region: 地域の区分
    # U_roof, U_wall, U_door, U_wnd, U_floor, U_base, U_base_d: 熱損失係数(W/m2K)(屋根・外壁・ドア・窓・床・基礎・玄関等の基礎)
    # psi_prm, psi_prm_d: 線熱損失係数(W/mK)(土間床外周部・玄関等の土間床外周部)
    # eta_d_H, eta_d_C: 窓のηd値(暖房期・冷房期)(-)
    # f_H, f_C: 窓のf値(暖房期・冷房期)(-)(各窓において最も安全側の値として選択された値である。f値としてデフォルト値を使用する場合はこの値は使用しない。)
    # is_f_value_default: f値としてデフォルト値を使用するか否か [bool値]
    # default_f_H, default_f_C; f値としてデフォルト値を使用する場合のf値(暖房期・冷房期)(-)

    results = OrderedDict()

    def get_U_A_provisional(house_type, U_roof, U_wall, U_door, U_wnd, U_floor, U_base, U_base_d, psi_prm, psi_prm_d):
        al = simple_Area_and_Length(house_type)
        return get_simple_U_A (al.roof, al.wall, al.door, al.wnd, al.floor, al.base, al.base_IS, al.base_d, al.base_d_IS, al.prm, al.prm_IS, al.prm_d, al.prm_d_IS,
                               simple_HValue.roof, simple_HValue.wall, simple_HValue.door, simple_HValue.wnd, simple_HValue.floor, simple_HValue.base_OS, simple_HValue.base_IS, simple_HValue.prm_OS, simple_HValue.prm_IS,
                               U_roof, U_wall, U_door, U_wnd, U_floor, U_base, U_base_d, psi_prm, psi_prm_d,
                               al.env )
    
    U_A_floor_ins = get_U_A_provisional(simple_HouseType.floor_ins,U_roof, U_wall, U_door, U_wnd, U_floor, U_base, U_base_d, psi_prm, psi_prm_d)
    U_A_base_ins  = get_U_A_provisional(simple_HouseType.base_ins ,U_roof, U_wall, U_door, U_wnd, U_floor, U_base, U_base_d, psi_prm, psi_prm_d)
    
    if house_type == simple_HouseType.floor_ins:
        house_type_on_calc = house_type
        U_A = U_A_floor_ins
    elif house_type == simple_HouseType.base_ins:
        house_type_on_calc = house_type
        U_A = U_A_base_ins
    else:
        U_A, house_type_on_calc = judge_simple_house_type(U_A_floor_ins, U_A_base_ins)
    
    results["Judged House Type"] = house_type_on_calc
    results["UA"] = U_A
    
    al = simple_Area_and_Length(house_type_on_calc)

    eta_H_roof   = get_simple_eta_H_roof(U_roof)
    eta_H_wall   = get_simple_eta_H_wall(U_wall)
    eta_H_door   = get_simple_eta_H_door(U_door)
    eta_H_wnd    = get_simple_eta_H_window(eta_d_H, f_H, is_f_value_default, default_f_H)
    eta_H_base   = get_simple_eta_H_base(U_base)
    eta_H_base_d = get_simple_eta_H_base_d(U_base_d)
    eta_C_roof   = get_simple_eta_C_roof(U_roof)
    eta_C_wall   = get_simple_eta_C_wall(U_wall)
    eta_C_door   = get_simple_eta_C_door(U_door)
    eta_C_wnd    = get_simple_eta_C_window(eta_d_C, f_C, is_f_value_default, default_f_C)
    eta_C_base   = get_simple_eta_C_base(U_base)
    eta_C_base_d = get_simple_eta_C_base_d(U_base_d)
    
    results["eta_H_wnd"]=eta_H_wnd
    results["eta_C_wnd"]=eta_C_wnd

    eta_A_H = get_simple_eta_A_H(al.roof, al.wall, al.door, al.wnd, al.base, al.base_d,
                                 get_nu_H(region)[Direction.top], get_simple_Orientation_value_from_Direction(get_nu_H(region)),
                                 eta_H_roof, eta_H_wall, eta_H_door, eta_H_wnd, eta_H_base, eta_H_base_d,
                                 al.env)
    eta_A_C = get_simple_eta_A_C(al.roof, al.wall, al.door, al.wnd, al.base, al.base_d,
                                 get_nu_C(region)[Direction.top], get_simple_Orientation_value_from_Direction(get_nu_C(region)),
                                 eta_C_roof, eta_C_wall, eta_C_door, eta_C_wnd, eta_C_base, eta_C_base_d,
                                 al.env)

    results["eta_A_H"] = eta_A_H
    results["eta_A_C"] = eta_A_C
    
    r_env = get_r_env(al.env, al.A)

    Q_dash = get_Q_dash( float(U_A), r_env )
    results["Q_dash"] = Q_dash
    
    Mu_H = get_Mu_H(float(eta_A_H), r_env)
    Mu_C = get_Mu_C(float(eta_A_C), r_env)
    results["Mu_H"] = Mu_H
    results["Mu_C"] = Mu_C
    return results
Ejemplo n.º 41
0
 def __init__(self, fname):
     """Initializes from file @fname."""
     super().__init__(fname)
     # key: gripper name and value: gripper model
     self.grippers = OrderedDict()
Ejemplo n.º 42
0
class aggregate_sid_counter(PybindBase):
    """
  This class was auto-generated by the PythonClass plugin for PYANG
  from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/mpls/signaling-protocols/segment-routing/aggregate-sid-counters/aggregate-sid-counter. Each member element of
  the container is represented as a class variable - with a specific
  YANG type.

  YANG Description: Counters aggregated across all of the interfaces of the local
system corresponding to traffic received or forwarded with a
particular SID
  """
    __slots__ = ("_path_helper", "_extmethods", "__mpls_label", "__state")

    _yang_name = "aggregate-sid-counter"

    _pybind_generated_by = "container"

    def __init__(self, *args, **kwargs):

        self._path_helper = False

        self._extmethods = False
        self.__mpls_label = YANGDynClass(
            base=six.text_type,
            is_leaf=True,
            yang_name="mpls-label",
            parent=self,
            path_helper=self._path_helper,
            extmethods=self._extmethods,
            register_paths=True,
            is_keyval=True,
            namespace="http://openconfig.net/yang/network-instance",
            defining_module="openconfig-network-instance",
            yang_type="leafref",
            is_config=False,
        )
        self.__state = YANGDynClass(
            base=state.state,
            is_container="container",
            yang_name="state",
            parent=self,
            path_helper=self._path_helper,
            extmethods=self._extmethods,
            register_paths=True,
            extensions=None,
            namespace="http://openconfig.net/yang/network-instance",
            defining_module="openconfig-network-instance",
            yang_type="container",
            is_config=False,
        )

        load = kwargs.pop("load", None)
        if args:
            if len(args) > 1:
                raise TypeError("cannot create a YANG container with >1 argument")
            all_attr = True
            for e in self._pyangbind_elements:
                if not hasattr(args[0], e):
                    all_attr = False
                    break
            if not all_attr:
                raise ValueError("Supplied object did not have the correct attributes")
            for e in self._pyangbind_elements:
                nobj = getattr(args[0], e)
                if nobj._changed() is False:
                    continue
                setmethod = getattr(self, "_set_%s" % e)
                if load is None:
                    setmethod(getattr(args[0], e))
                else:
                    setmethod(getattr(args[0], e), load=load)

    def _path(self):
        if hasattr(self, "_parent"):
            return self._parent._path() + [self._yang_name]
        else:
            return [
                "network-instances",
                "network-instance",
                "mpls",
                "signaling-protocols",
                "segment-routing",
                "aggregate-sid-counters",
                "aggregate-sid-counter",
            ]

    def _get_mpls_label(self):
        """
    Getter method for mpls_label, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/aggregate_sid_counters/aggregate_sid_counter/mpls_label (leafref)

    YANG Description: The MPLS label representing the segment identifier
    """
        return self.__mpls_label

    def _set_mpls_label(self, v, load=False):
        """
    Setter method for mpls_label, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/aggregate_sid_counters/aggregate_sid_counter/mpls_label (leafref)
    If this variable is read-only (config: false) in the
    source YANG file, then _set_mpls_label is considered as a private
    method. Backends looking to populate this variable should
    do so via calling thisObj._set_mpls_label() directly.

    YANG Description: The MPLS label representing the segment identifier
    """
        parent = getattr(self, "_parent", None)
        if parent is not None and load is False:
            raise AttributeError(
                "Cannot set keys directly when" + " within an instantiated list"
            )

        if hasattr(v, "_utype"):
            v = v._utype(v)
        try:
            t = YANGDynClass(
                v,
                base=six.text_type,
                is_leaf=True,
                yang_name="mpls-label",
                parent=self,
                path_helper=self._path_helper,
                extmethods=self._extmethods,
                register_paths=True,
                is_keyval=True,
                namespace="http://openconfig.net/yang/network-instance",
                defining_module="openconfig-network-instance",
                yang_type="leafref",
                is_config=False,
            )
        except (TypeError, ValueError):
            raise ValueError(
                {
                    "error-string": """mpls_label must be of a type compatible with leafref""",
                    "defined-type": "leafref",
                    "generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="mpls-label", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='leafref', is_config=False)""",
                }
            )

        self.__mpls_label = t
        if hasattr(self, "_set"):
            self._set()

    def _unset_mpls_label(self):
        self.__mpls_label = YANGDynClass(
            base=six.text_type,
            is_leaf=True,
            yang_name="mpls-label",
            parent=self,
            path_helper=self._path_helper,
            extmethods=self._extmethods,
            register_paths=True,
            is_keyval=True,
            namespace="http://openconfig.net/yang/network-instance",
            defining_module="openconfig-network-instance",
            yang_type="leafref",
            is_config=False,
        )

    def _get_state(self):
        """
    Getter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/aggregate_sid_counters/aggregate_sid_counter/state (container)

    YANG Description: State parameters for per-SID statistics
    """
        return self.__state

    def _set_state(self, v, load=False):
        """
    Setter method for state, mapped from YANG variable /network_instances/network_instance/mpls/signaling_protocols/segment_routing/aggregate_sid_counters/aggregate_sid_counter/state (container)
    If this variable is read-only (config: false) in the
    source YANG file, then _set_state is considered as a private
    method. Backends looking to populate this variable should
    do so via calling thisObj._set_state() directly.

    YANG Description: State parameters for per-SID statistics
    """
        if hasattr(v, "_utype"):
            v = v._utype(v)
        try:
            t = YANGDynClass(
                v,
                base=state.state,
                is_container="container",
                yang_name="state",
                parent=self,
                path_helper=self._path_helper,
                extmethods=self._extmethods,
                register_paths=True,
                extensions=None,
                namespace="http://openconfig.net/yang/network-instance",
                defining_module="openconfig-network-instance",
                yang_type="container",
                is_config=False,
            )
        except (TypeError, ValueError):
            raise ValueError(
                {
                    "error-string": """state must be of a type compatible with container""",
                    "defined-type": "container",
                    "generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
                }
            )

        self.__state = t
        if hasattr(self, "_set"):
            self._set()

    def _unset_state(self):
        self.__state = YANGDynClass(
            base=state.state,
            is_container="container",
            yang_name="state",
            parent=self,
            path_helper=self._path_helper,
            extmethods=self._extmethods,
            register_paths=True,
            extensions=None,
            namespace="http://openconfig.net/yang/network-instance",
            defining_module="openconfig-network-instance",
            yang_type="container",
            is_config=False,
        )

    mpls_label = __builtin__.property(_get_mpls_label)
    state = __builtin__.property(_get_state)

    _pyangbind_elements = OrderedDict([("mpls_label", mpls_label), ("state", state)])
Ejemplo n.º 43
0
class IsoAreaAsPointcloudFromPoint(QgisAlgorithm):

    INPUT = 'INPUT'
    START_POINT = 'START_POINT'
    MAX_DIST = "MAX_DIST"
    STRATEGY = 'STRATEGY'
    DIRECTION_FIELD = 'DIRECTION_FIELD'
    VALUE_FORWARD = 'VALUE_FORWARD'
    VALUE_BACKWARD = 'VALUE_BACKWARD'
    VALUE_BOTH = 'VALUE_BOTH'
    DEFAULT_DIRECTION = 'DEFAULT_DIRECTION'
    SPEED_FIELD = 'SPEED_FIELD'
    DEFAULT_SPEED = 'DEFAULT_SPEED'
    TOLERANCE = 'TOLERANCE'
    OUTPUT = 'OUTPUT'

    def icon(self):
        return QIcon(os.path.join(pluginPath, 'QNEAT3', 'icons', 'icon_servicearea_points.svg'))

    def group(self):
        return self.tr('Iso-Areas')

    def groupId(self):
        return 'isoareas'
    
    def name(self):
        return 'isoareaaspointcloudfrompoint'

    def displayName(self):
        return self.tr('Iso-Area as Pointcloud (from Point)')
    
    def shortHelpString(self):
        return  "<b>General:</b><br>"\
                "This algorithm implements iso-pointcloud analysis to return all <b>network nodes reachable within a maximum cost level as pointcloud</b> on a given <b>network dataset for a manually chosen point</b>.<br>"\
                "It accounts for <b>points outside of the network</b> (eg. <i>non-network-elements</i>) and increments the iso-areas cost regarding to distance/default speed value. Distances are measured accounting for <b>ellipsoids</b>.<br>Please, <b>only use a projected coordinate system (eg. no WGS84)</b> for this kind of analysis.<br><br>"\
                "<b>Parameters (required):</b><br>"\
                "Following Parameters must be set to run the algorithm:"\
                "<ul><li>Network Layer</li><li>Startpoint</li><li>Unique Point ID Field (numerical)</li><li>Maximum cost level for Iso-Area</li><li>Cost Strategy</li></ul><br>"\
                "<b>Parameters (optional):</b><br>"\
                "There are also a number of <i>optional parameters</i> to implement <b>direction dependent</b> shortest paths and provide information on <b>speeds</b> on the networks edges."\
                "<ul><li>Direction Field</li><li>Value for forward direction</li><li>Value for backward direction</li><li>Value for both directions</li><li>Default direction</li><li>Speed Field</li><li>Default Speed (affects entry/exit costs)</li><li>Topology tolerance</li></ul><br>"\
                "<b>Output:</b><br>"\
                "The output of the algorithm is one layer:"\
                "<ul><li>Point layer of reachable network nodes</li></ul><br>"\
                "You may use the output pointcloud as input for further analyses."
    
    def msg(self, var):
        return "Type:"+str(type(var))+" repr: "+var.__str__()

    def __init__(self):
        super().__init__()

    def initAlgorithm(self, config=None):
        self.DIRECTIONS = OrderedDict([
            (self.tr('Forward direction'), QgsVectorLayerDirector.DirectionForward),
            (self.tr('Backward direction'), QgsVectorLayerDirector.DirectionBackward),
            (self.tr('Both directions'), QgsVectorLayerDirector.DirectionBoth)])

        self.STRATEGIES = [self.tr('Shortest'),
                           self.tr('Fastest')
                           ]

        self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
                                                              self.tr('Network Layer'),
                                                              [QgsProcessing.TypeVectorLine]))
        self.addParameter(QgsProcessingParameterPoint(self.START_POINT,
                                                      self.tr('Start Point')))
        self.addParameter(QgsProcessingParameterNumber(self.MAX_DIST,
                                                   self.tr('Size of Iso-Area (distance or seconds depending on strategy)'),
                                                   QgsProcessingParameterNumber.Double,
                                                   2500.0, False, 0, 99999999.99))
        self.addParameter(QgsProcessingParameterEnum(self.STRATEGY,
                                                     self.tr('Optimization Criterion'),
                                                     self.STRATEGIES,
                                                     defaultValue=0))

        params = []
        params.append(QgsProcessingParameterField(self.DIRECTION_FIELD,
                                                  self.tr('Direction field'),
                                                  None,
                                                  self.INPUT,
                                                  optional=True))
        params.append(QgsProcessingParameterString(self.VALUE_FORWARD,
                                                   self.tr('Value for forward direction'),
                                                   optional=True))
        params.append(QgsProcessingParameterString(self.VALUE_BACKWARD,
                                                   self.tr('Value for backward direction'),
                                                   optional=True))
        params.append(QgsProcessingParameterString(self.VALUE_BOTH,
                                                   self.tr('Value for both directions'),
                                                   optional=True))
        params.append(QgsProcessingParameterEnum(self.DEFAULT_DIRECTION,
                                                 self.tr('Default direction'),
                                                 list(self.DIRECTIONS.keys()),
                                                 defaultValue=2))
        params.append(QgsProcessingParameterField(self.SPEED_FIELD,
                                                  self.tr('Speed field'),
                                                  None,
                                                  self.INPUT,
                                                  optional=True))
        params.append(QgsProcessingParameterNumber(self.DEFAULT_SPEED,
                                                   self.tr('Default speed (km/h)'),
                                                   QgsProcessingParameterNumber.Double,
                                                   5.0, False, 0, 99999999.99))
        params.append(QgsProcessingParameterNumber(self.TOLERANCE,
                                                   self.tr('Topology tolerance'),
                                                   QgsProcessingParameterNumber.Double,
                                                   0.0, False, 0, 99999999.99))

        for p in params:
            p.setFlags(p.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
            self.addParameter(p)
        
        self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT,
                                                            self.tr('Output Pointcloud'),
                                                            QgsProcessing.TypeVectorPoint))

    def processAlgorithm(self, parameters, context, feedback):
        feedback.pushInfo(self.tr("[QNEAT3Algorithm] This is a QNEAT3 Algorithm: '{}'".format(self.displayName())))
        network = self.parameterAsSource(parameters, self.INPUT, context) #QgsProcessingFeatureSource
        startPoint = self.parameterAsPoint(parameters, self.START_POINT, context, network.sourceCrs()) #QgsPointXY
        max_dist = self.parameterAsDouble(parameters, self.MAX_DIST, context)#float
        strategy = self.parameterAsEnum(parameters, self.STRATEGY, context) #int

        directionFieldName = self.parameterAsString(parameters, self.DIRECTION_FIELD, context) #str (empty if no field given)
        forwardValue = self.parameterAsString(parameters, self.VALUE_FORWARD, context) #str
        backwardValue = self.parameterAsString(parameters, self.VALUE_BACKWARD, context) #str
        bothValue = self.parameterAsString(parameters, self.VALUE_BOTH, context) #str
        defaultDirection = self.parameterAsEnum(parameters, self.DEFAULT_DIRECTION, context) #int
        speedFieldName = self.parameterAsString(parameters, self.SPEED_FIELD, context) #str
        defaultSpeed = self.parameterAsDouble(parameters, self.DEFAULT_SPEED, context) #float
        tolerance = self.parameterAsDouble(parameters, self.TOLERANCE, context) #float

        analysisCrs = network.sourceCrs()
        input_coordinates = [startPoint]
        input_point = getFeatureFromPointParameter(startPoint)
        
        feedback.pushInfo("[QNEAT3Algorithm] Building Graph...")
        feedback.setProgress(10)  
        net = Qneat3Network(network, input_coordinates, strategy, directionFieldName, forwardValue, backwardValue, bothValue, defaultDirection, analysisCrs, speedFieldName, defaultSpeed, tolerance, feedback)
        feedback.setProgress(40)

        analysis_point = Qneat3AnalysisPoint("point", input_point, "point_id", net, net.list_tiedPoints[0], feedback)
        
        fields = QgsFields()
        fields.append(QgsField('vertex_id', QVariant.Int, '', 254, 0))
        fields.append(QgsField('cost', QVariant.Double, '', 254, 7))
        
        (sink, dest_id) = self.parameterAsSink(parameters, self.OUTPUT, context, fields, QgsWkbTypes.Point, network.sourceCrs())
        
        feedback.pushInfo("[QNEAT3Algorithm] Calculating Iso-Pointcloud...")
        iso_pointcloud = net.calcIsoPoints([analysis_point], max_dist)
        feedback.setProgress(90)
        
        sink.addFeatures(iso_pointcloud, QgsFeatureSink.FastInsert)
        
        feedback.pushInfo("[QNEAT3Algorithm] Ending Algorithm")
        feedback.setProgress(100)        
        
        results = {}
        results[self.OUTPUT] = dest_id
        return results
Ejemplo n.º 44
0
class BacktestingEngine(object):
    """组合类CTA策略回测引擎"""

    #----------------------------------------------------------------------
    def __init__(self):
        """Constructor"""
        self.portfolio = None
        
        # 合约配置信息
        self.vtSymbolList = []
        self.sizeDict = {}                  # 合约大小字典
        self.priceTickDict = {}             # 最小价格变动字典
        self.variableCommissionDict = {}    # 变动手续费字典
        self.fixedCommissionDict = {}       # 固定手续费字典
        self.slippageDict = {}              # 滑点成本字典
        
        self.portfolioValue = 0
        self.startDt = None
        self.endDt = None
        self.currentDt = None
        
        self.dataDict = OrderedDict()
        self.tradeDict = OrderedDict()
        
        self.result = None
        self.resultList = []
    
    #----------------------------------------------------------------------
    def setPeriod(self, startDt, endDt):
        """设置回测周期"""
        self.startDt = startDt
        self.endDt = endDt
    
    #----------------------------------------------------------------------
    def initPortfolio(self, filename, portfolioValue=10000000):
        """初始化投资组合"""
        self.portfolioValue = portfolioValue
        
        with open(filename) as f:
            r = DictReader(f)
            for d in r:
                self.vtSymbolList.append(d['vtSymbol'])
                
                SIZE_DICT[d['vtSymbol']] = int(d['size'])
                PRICETICK_DICT[d['vtSymbol']] = float(d['priceTick'])
                VARIABLE_COMMISSION_DICT[d['vtSymbol']] = float(d['variableCommission'])
                FIXED_COMMISSION_DICT[d['vtSymbol']] = float(d['fixedCommission'])
                SLIPPAGE_DICT[d['vtSymbol']] = float(d['slippage'])
            
        self.portfolio = TurtlePortfolio(self)
        self.portfolio.init(portfolioValue, self.vtSymbolList, SIZE_DICT)
        
        self.output(u'投资组合的合约代码%s' %(self.vtSymbolList))
        self.output(u'投资组合的初始价值%s' %(portfolioValue))
    
    #----------------------------------------------------------------------
    def loadData(self):
        """加载数据"""
        mc = MongoClient()
        db = mc[DAILY_DB_NAME]
        
        for vtSymbol in self.vtSymbolList:
            flt = {'datetime':{'$gte':self.startDt,
                               '$lte':self.endDt}} 
            
            collection = db[vtSymbol]
            cursor = collection.find(flt).sort('datetime')
            
            for d in cursor:
                bar = VtBarData()
                bar.__dict__ = d
                
                barDict = self.dataDict.setdefault(bar.datetime, OrderedDict())
                barDict[bar.vtSymbol] = bar
            
            self.output(u'%s数据加载完成,总数据量:%s' %(vtSymbol, cursor.count()))
        
        self.output(u'全部数据加载完成')
    
    #----------------------------------------------------------------------
    def runBacktesting(self):
        """运行回测"""
        self.output(u'开始回放K线数据')
        
        for dt, barDict in self.dataDict.items():
            self.currentDt = dt
            
            previousResult = self.result
            
            self.result = DailyResult(dt)
            self.result.updatePos(self.portfolio.posDict)
            self.resultList.append(self.result)
            
            if previousResult:
                self.result.updatePreviousClose(previousResult.closeDict)
            
            for bar in barDict.values():
                self.portfolio.onBar(bar)
                self.result.updateBar(bar)
        
        self.output(u'K线数据回放结束')
    
    #----------------------------------------------------------------------
    def calculateResult(self, annualDays=240):
        """计算结果"""
        self.output(u'开始统计回测结果')
        
        for result in self.resultList:
            result.calculatePnl()
        
        resultList = self.resultList
        dateList = [result.date for result in resultList]
        
        startDate = dateList[0]
        endDate = dateList[-1]  
        totalDays = len(dateList)
        
        profitDays = 0
        lossDays = 0
        endBalance = self.portfolioValue
        highlevel = self.portfolioValue
        totalNetPnl = 0
        totalCommission = 0
        totalSlippage = 0
        totalTradeCount = 0
        
        netPnlList = []
        balanceList = []
        highlevelList = []
        drawdownList = []
        ddPercentList = []
        returnList = []
        
        for result in resultList:
            if result.netPnl > 0:
                profitDays += 1
            elif result.netPnl < 0:
                lossDays += 1
            netPnlList.append(result.netPnl)
            
            prevBalance = endBalance
            endBalance += result.netPnl
            balanceList.append(endBalance)
            returnList.append(endBalance/prevBalance - 1)
            
            highlevel = max(highlevel, endBalance)
            highlevelList.append(highlevel)
            
            drawdown = endBalance - highlevel
            drawdownList.append(drawdown)
            ddPercentList.append(drawdown/highlevel*100)
            
            totalCommission += result.commission
            totalSlippage += result.slippage
            totalTradeCount += result.tradeCount
            totalNetPnl += result.netPnl
        
        maxDrawdown = min(drawdownList)
        maxDdPercent = min(ddPercentList)
        totalReturn = (endBalance / self.portfolioValue - 1) * 100
        dailyReturn = np.mean(returnList) * 100
        annualizedReturn = dailyReturn * annualDays
        returnStd = np.std(returnList) * 100
        
        if returnStd:
            sharpeRatio = dailyReturn / returnStd * np.sqrt(annualDays)
        else:
            sharpeRatio = 0
        
        # 返回结果
        result = {
            'startDate': startDate,
            'endDate': endDate,
            'totalDays': totalDays,
            'profitDays': profitDays,
            'lossDays': lossDays,
            'endBalance': endBalance,
            'maxDrawdown': maxDrawdown,
            'maxDdPercent': maxDdPercent,
            'totalNetPnl': totalNetPnl,
            'dailyNetPnl': totalNetPnl/totalDays,
            'totalCommission': totalCommission,
            'dailyCommission': totalCommission/totalDays,
            'totalSlippage': totalSlippage,
            'dailySlippage': totalSlippage/totalDays,
            'totalTradeCount': totalTradeCount,
            'dailyTradeCount': totalTradeCount/totalDays,
            'totalReturn': totalReturn,
            'annualizedReturn': annualizedReturn,
            'dailyReturn': dailyReturn,
            'returnStd': returnStd,
            'sharpeRatio': sharpeRatio
            }
        
        timeseries = {
            'balance': balanceList,
            'return': returnList,
            'highLevel': highlevel,
            'drawdown': drawdownList,
            'ddPercent': ddPercentList,
            'date': dateList,
            'netPnl': netPnlList
        }
        
        return timeseries, result
    
    #----------------------------------------------------------------------
    def showResult(self):
        """显示回测结果"""
        timeseries, result = self.calculateResult()
        
        # 输出统计结果
        self.output('-' * 30)
        self.output(u'首个交易日:\t%s' % result['startDate'])
        self.output(u'最后交易日:\t%s' % result['endDate'])
        
        self.output(u'总交易日:\t%s' % result['totalDays'])
        self.output(u'盈利交易日\t%s' % result['profitDays'])
        self.output(u'亏损交易日:\t%s' % result['lossDays'])
        
        self.output(u'起始资金:\t%s' % self.portfolioValue)
        self.output(u'结束资金:\t%s' % formatNumber(result['endBalance']))
    
        self.output(u'总收益率:\t%s%%' % formatNumber(result['totalReturn']))
        self.output(u'年化收益:\t%s%%' % formatNumber(result['annualizedReturn']))
        self.output(u'总盈亏:\t%s' % formatNumber(result['totalNetPnl']))
        self.output(u'最大回撤: \t%s' % formatNumber(result['maxDrawdown']))   
        self.output(u'百分比最大回撤: %s%%' % formatNumber(result['maxDdPercent']))   
        
        self.output(u'总手续费:\t%s' % formatNumber(result['totalCommission']))
        self.output(u'总滑点:\t%s' % formatNumber(result['totalSlippage']))
        self.output(u'总成交笔数:\t%s' % formatNumber(result['totalTradeCount']))
        
        self.output(u'日均盈亏:\t%s' % formatNumber(result['dailyNetPnl']))
        self.output(u'日均手续费:\t%s' % formatNumber(result['dailyCommission']))
        self.output(u'日均滑点:\t%s' % formatNumber(result['dailySlippage']))
        self.output(u'日均成交笔数:\t%s' % formatNumber(result['dailyTradeCount']))
        
        self.output(u'日均收益率:\t%s%%' % formatNumber(result['dailyReturn']))
        self.output(u'收益标准差:\t%s%%' % formatNumber(result['returnStd']))
        self.output(u'Sharpe Ratio:\t%s' % formatNumber(result['sharpeRatio']))
        
        # 绘图
        fig = plt.figure(figsize=(10, 16))
        
        pBalance = plt.subplot(4, 1, 1)
        pBalance.set_title('Balance')
        plt.plot(timeseries['date'], timeseries['balance'])
        
        pDrawdown = plt.subplot(4, 1, 2)
        pDrawdown.set_title('Drawdown')
        pDrawdown.fill_between(range(len(timeseries['drawdown'])), timeseries['drawdown'])
        
        pPnl = plt.subplot(4, 1, 3)
        pPnl.set_title('Daily Pnl') 
        plt.bar(range(len(timeseries['drawdown'])), timeseries['netPnl'])

        pKDE = plt.subplot(4, 1, 4)
        pKDE.set_title('Daily Pnl Distribution')
        plt.hist(timeseries['netPnl'], bins=50)
        
        plt.show()        
    
    #----------------------------------------------------------------------
    def sendOrder(self, vtSymbol, direction, offset, price, volume):
        """记录交易数据(由portfolio调用)"""
        # 对价格四舍五入
        priceTick = PRICETICK_DICT[vtSymbol]
        price = int(round(price/priceTick, 0)) * priceTick
        
        # 记录成交数据
        trade = TradeData(vtSymbol, direction, offset, price, volume)
        l = self.tradeDict.setdefault(self.currentDt, [])        
        l.append(trade)
        self.result.updateTrade(trade)

    #----------------------------------------------------------------------
    def output(self, content):
        """输出信息"""
        print content
    
    #----------------------------------------------------------------------
    def getTradeData(self, vtSymbol=''):
        """获取交易数据"""
        tradeList = []
        
        for k, l in self.tradeDict.items():
            for trade in l:
                trade.dt = k
                if not vtSymbol:
                    tradeList.append(trade)
                elif trade.vtSymbol == vtSymbol:
                    tradeList.append(trade)
        
        return tradeList
class StepKwMultiClassifications(WizardStep, FORM_CLASS):
    """InaSAFE Wizard Step Multi Classifications."""
    def __init__(self, parent=None):
        """Constructor for the tab.

        :param parent: widget to use as parent (Wizard Dialog).
        :type parent: QWidget
        """
        WizardStep.__init__(self, parent)
        self.exposures = []
        self.exposure_labels = []
        self.exposure_combo_boxes = []
        self.exposure_edit_buttons = []
        self.mode = CHOOSE_MODE

        self.layer_purpose = None
        self.layer_mode = None

        # Store the current representative state of the UI.
        # self.classifications = {}
        self.value_maps = {}
        self.thresholds = {}

        # Temporary attributes
        self.threshold_classes = OrderedDict()
        self.active_exposure = None

        self.list_unique_values = None
        self.tree_mapping_widget = None

        # GUI, good for testing
        self.save_button = None
        self.restore_default_button = None

        # Has default threshold
        # Trick for EQ raster for population #3853
        self.use_default_thresholds = False
        # Index of the special case exposure classification
        self.special_case_index = None

    def is_ready_to_next_step(self):
        """Check if the step is complete.

        :returns: True if new step may be enabled.
        :rtype: bool
        """
        # Still editing
        if self.mode == EDIT_MODE:
            return False
        for combo_box in self.exposure_combo_boxes:
            # Enable if there is one that has classification
            if combo_box.currentIndex() > 0:
                return True
        # Trick for EQ raster for population #3853
        if self.use_default_thresholds:
            return True
        return False

    def get_next_step(self):
        """Find the proper step when user clicks the Next button.

        :returns: The step to be switched to.
        :rtype: WizardStep instance or None
        """
        if self.layer_purpose != layer_purpose_aggregation:
            subcategory = self.parent.step_kw_subcategory.\
                selected_subcategory()
        else:
            subcategory = {'key': None}

        if is_raster_layer(self.parent.layer):
            return self.parent.step_kw_source

        # Check if it can go to inasafe field step
        inasafe_fields = get_non_compulsory_fields(self.layer_purpose['key'],
                                                   subcategory['key'])

        if not skip_inasafe_field(self.parent.layer, inasafe_fields):
            return self.parent.step_kw_inasafe_fields

        # Check if it can go to inasafe default field step
        default_inasafe_fields = get_fields(self.layer_purpose['key'],
                                            subcategory['key'],
                                            replace_null=True,
                                            in_group=False)
        if default_inasafe_fields:
            return self.parent.step_kw_default_inasafe_fields

        # Any other case
        return self.parent.step_kw_source

    def set_wizard_step_description(self):
        """Set the text for description."""
        subcategory = self.parent.step_kw_subcategory.selected_subcategory()
        field = self.parent.step_kw_field.selected_fields()
        is_raster = is_raster_layer(self.parent.layer)

        if is_raster:
            if self.layer_mode == layer_mode_continuous:
                text_label = multiple_continuous_hazard_classifications_raster
            else:
                text_label = multiple_classified_hazard_classifications_raster
            # noinspection PyAugmentAssignment
            text_label = text_label % (subcategory['name'],
                                       self.layer_purpose['name'])
        else:
            if self.layer_mode == layer_mode_continuous:
                text_label = multiple_continuous_hazard_classifications_vector
            else:
                text_label = multiple_classified_hazard_classifications_vector
            # noinspection PyAugmentAssignment
            text_label = text_label % (subcategory['name'],
                                       self.layer_purpose['name'], field)

        self.multi_classifications_label.setText(text_label)

    def setup_left_panel(self):
        """Setup the UI for left panel.

        Generate all exposure, combobox, and edit button.
        """
        hazard = self.parent.step_kw_subcategory.selected_subcategory()
        left_panel_heading = QLabel(tr('Classifications'))
        left_panel_heading.setFont(big_font)
        self.left_layout.addWidget(left_panel_heading)

        inner_left_layout = QGridLayout()

        row = 0
        for exposure in exposure_all:
            special_case = False
            if not setting('developer_mode'):
                # Filter out unsupported exposure for the hazard
                if exposure in hazard['disabled_exposures']:
                    # Remove from the storage if the exposure is disabled
                    if self.layer_mode == layer_mode_continuous:
                        if exposure['key'] in self.thresholds:
                            self.thresholds.pop(exposure['key'])
                    else:
                        if exposure['key'] in self.value_maps:
                            self.value_maps.pop(exposure['key'])
                    continue
            # Trick for EQ raster for population #3853
            if exposure == exposure_population and hazard == hazard_earthquake:
                if is_raster_layer(self.parent.layer):
                    if self.layer_mode == layer_mode_continuous:
                        self.use_default_thresholds = True
                        special_case = True
                        # Set classification for EQ Raster for Population
                        self.thresholds[exposure_population['key']] = {
                            earthquake_mmi_scale['key']: {
                                'classes':
                                default_classification_thresholds(
                                    earthquake_mmi_scale),
                                'active':
                                True
                            }
                        }

            # Add label
            # Hazard on Exposure Classifications
            label = tr(
                '{hazard_name} on {exposure_name} Classifications').format(
                    hazard_name=hazard['name'], exposure_name=exposure['name'])
            exposure_label = QLabel(label)

            # Add combo box
            exposure_combo_box = QComboBox()
            hazard_classifications = hazard.get('classifications')
            exposure_combo_box.addItem(tr('No classifications'))
            exposure_combo_box.setItemData(0, None, Qt.UserRole)

            current_index = 0
            i = 0
            # Iterate through all available hazard classifications
            for hazard_classification in hazard_classifications:
                # Skip if the classification is not for the exposure
                if 'exposures' in hazard_classification:
                    if exposure not in hazard_classification['exposures']:
                        continue
                exposure_combo_box.addItem(hazard_classification['name'])
                exposure_combo_box.setItemData(i + 1, hazard_classification,
                                               Qt.UserRole)
                if self.layer_mode == layer_mode_continuous:
                    current_hazard_classifications = self.thresholds.get(
                        exposure['key'])
                else:
                    current_hazard_classifications = self.value_maps.get(
                        exposure['key'])
                if current_hazard_classifications:
                    current_hazard_classification = \
                        current_hazard_classifications.get(
                            hazard_classification['key'])
                    if current_hazard_classification:
                        is_active = current_hazard_classification.get('active')
                        if is_active:
                            current_index = i + 1
                i += 1
            # Set current classification
            exposure_combo_box.setCurrentIndex(current_index)

            # Add edit button
            exposure_edit_button = QPushButton(tr('Edit'))

            # For special case. Raster EQ on Population.
            if special_case:
                mmi_index = exposure_combo_box.findText(
                    earthquake_mmi_scale['name'])
                exposure_combo_box.setCurrentIndex(mmi_index)
                exposure_combo_box.setEnabled(False)
                exposure_edit_button.setEnabled(False)
                tool_tip_message = tr(
                    'InaSAFE use default classification for Raster Earthquake '
                    'hazard on population.')
                exposure_label.setToolTip(tool_tip_message)
                exposure_combo_box.setToolTip(tool_tip_message)
                exposure_edit_button.setToolTip(tool_tip_message)

            else:
                if current_index == 0:
                    # Disable if there is no classification chosen.
                    exposure_edit_button.setEnabled(False)
                exposure_edit_button.clicked.connect(
                    partial(self.edit_button_clicked,
                            edit_button=exposure_edit_button,
                            exposure_combo_box=exposure_combo_box,
                            exposure=exposure))
                exposure_combo_box.currentIndexChanged.connect(
                    partial(self.classifications_combo_box_changed,
                            exposure=exposure,
                            exposure_combo_box=exposure_combo_box,
                            edit_button=exposure_edit_button))

            # Arrange in layout
            inner_left_layout.addWidget(exposure_label, row, 0)
            inner_left_layout.addWidget(exposure_combo_box, row, 1)
            inner_left_layout.addWidget(exposure_edit_button, row, 2)

            # Adding to step's attribute
            self.exposures.append(exposure)
            self.exposure_combo_boxes.append(exposure_combo_box)
            self.exposure_edit_buttons.append(exposure_edit_button)
            self.exposure_labels.append(label)
            if special_case:
                self.special_case_index = len(self.exposures) - 1

            row += 1

        self.left_layout.addLayout(inner_left_layout)
        # To push the inner_left_layout up
        self.left_layout.addStretch(1)

    # noinspection PyUnusedLocal
    def edit_button_clicked(self, edit_button, exposure_combo_box, exposure):
        """Method to handle when an edit button is clicked.

        :param edit_button: The edit button.
        :type edit_button: QPushButton

        :param exposure_combo_box: The combo box of the exposure, contains
            list of classifications.
        :type exposure_combo_box: QComboBox

        :param exposure: Exposure definition.
        :type exposure: dict
        """
        # Note(IS): Do not change the text of edit button for now until we
        # have better behaviour.
        classification = self.get_classification(exposure_combo_box)

        if self.mode == CHOOSE_MODE:
            # Change mode
            self.mode = EDIT_MODE
            # Set active exposure
            self.active_exposure = exposure
            # Disable all edit button
            for exposure_edit_button in self.exposure_edit_buttons:
                exposure_edit_button.setEnabled(False)
            # Except one that was clicked
            # edit_button.setEnabled(True)
            # Disable all combo box
            for exposure_combo_box in self.exposure_combo_boxes:
                exposure_combo_box.setEnabled(False)
            # Change the edit button to cancel
            # edit_button.setText(tr('Cancel'))

            # Clear right panel
            clear_layout(self.right_layout)
            # Show edit threshold or value mapping
            if self.layer_mode == layer_mode_continuous:
                self.setup_thresholds_panel(classification)
            else:
                self.setup_value_mapping_panels(classification)
            self.add_buttons(classification)

        elif self.mode == EDIT_MODE:
            # Behave the same as cancel button clicked.
            self.cancel_button_clicked()

        self.parent.pbnNext.setEnabled(self.is_ready_to_next_step())

    def show_current_state(self):
        """Setup the UI for QTextEdit to show the current state."""
        right_panel_heading = QLabel(tr('Status'))
        right_panel_heading.setFont(big_font)
        right_panel_heading.setSizePolicy(QSizePolicy.Maximum,
                                          QSizePolicy.Maximum)
        self.right_layout.addWidget(right_panel_heading)

        message = m.Message()
        if self.layer_mode == layer_mode_continuous:
            title = tr('Thresholds')
        else:
            title = tr('Value maps')

        message.add(m.Heading(title, **INFO_STYLE))

        for i in range(len(self.exposures)):
            message.add(m.Text(self.exposure_labels[i]))

            classification = self.get_classification(
                self.exposure_combo_boxes[i])
            if self.layer_mode == layer_mode_continuous:
                thresholds = self.thresholds.get(self.exposures[i]['key'])
                if not thresholds or not classification:
                    message.add(m.Paragraph(tr('No classifications set.')))
                    continue
                table = m.Table(
                    style_class='table table-condensed table-striped')
                header = m.Row()
                header.add(m.Cell(tr('Class name')))
                header.add(m.Cell(tr('Minimum')))
                header.add(m.Cell(tr('Maximum')))
                table.add(header)
                classes = classification.get('classes')
                # Sort by value, put the lowest first
                classes = sorted(classes, key=lambda k: k['value'])
                for the_class in classes:
                    threshold = thresholds[classification['key']]['classes'][
                        the_class['key']]
                    row = m.Row()
                    row.add(m.Cell(the_class['name']))
                    row.add(m.Cell(threshold[0]))
                    row.add(m.Cell(threshold[1]))
                    table.add(row)
            else:
                value_maps = self.value_maps.get(self.exposures[i]['key'])
                if not value_maps or not classification:
                    message.add(m.Paragraph(tr('No classifications set.')))
                    continue
                table = m.Table(
                    style_class='table table-condensed table-striped')
                header = m.Row()
                header.add(m.Cell(tr('Class name')))
                header.add(m.Cell(tr('Value')))
                table.add(header)
                classes = classification.get('classes')
                # Sort by value, put the lowest first
                classes = sorted(classes, key=lambda k: k['value'])
                for the_class in classes:
                    value_map = value_maps[
                        classification['key']]['classes'].get(
                            the_class['key'], [])
                    row = m.Row()
                    row.add(m.Cell(the_class['name']))
                    row.add(m.Cell(', '.join([str(v) for v in value_map])))
                    table.add(row)
            message.add(table)

        # status_text_edit = QTextBrowser(None)
        status_text_edit = QWebView(None)
        status_text_edit.setSizePolicy(QSizePolicy.Ignored,
                                       QSizePolicy.Ignored)

        status_text_edit.page().mainFrame().setScrollBarPolicy(
            Qt.Horizontal, Qt.ScrollBarAlwaysOff)
        html_string = html_header() + message.to_html() + html_footer()
        status_text_edit.setHtml(html_string)
        self.right_layout.addWidget(status_text_edit)

    def set_widgets(self):
        """Set widgets on the Multi classification step."""
        self.clear()
        self.layer_mode = self.parent.step_kw_layermode.selected_layermode()
        self.layer_purpose = self.parent.step_kw_purpose.selected_purpose()
        self.set_current_state()

        # Set the step description
        self.set_wizard_step_description()

        # Set the left panel
        self.setup_left_panel()

        # Set the right panel, for the beginning show the viewer
        self.show_current_state()

    def clear(self):
        """Clear current state."""
        self.exposures = []
        self.exposure_labels = []
        self.exposure_combo_boxes = []
        self.exposure_edit_buttons = []
        self.mode = CHOOSE_MODE

        self.layer_purpose = None
        self.layer_mode = None
        self.special_case_index = None

        self.value_maps = {}
        self.thresholds = {}

        # Temporary attributes
        self.threshold_classes = OrderedDict()
        self.active_exposure = None

        self.list_unique_values = None
        self.tree_mapping_widget = None

        clear_layout(self.left_layout)
        clear_layout(self.right_layout)

    def get_current_state(self):
        """Obtain current classification and value map / threshold."""
        def clean_state(dictionary):
            """Clean dictionary from bad value.

            :param dictionary: Dictionary of value maps or thresholds.
            :type dictionary: dict

            :returns: Clean state.
            :rtype: dict
            """
            clean_dictionary = {
                k: v
                for k, v in list(dictionary.items()) if isinstance(v, dict)
            }

            return clean_dictionary

        if self.layer_mode == layer_mode_continuous:
            output = {'thresholds': clean_state(self.thresholds)}
            key = 'thresholds'
        else:
            output = {'value_maps': clean_state(self.value_maps)}
            key = 'value_maps'

        # Clean non existing hazard class key
        empty_exposure_classifications = []
        for the_exposure, the_hazard_classifications in list(
                output[key].items()):
            for the_hazard_classification in list(
                    the_hazard_classifications.keys()):
                invalid_classifications = []
                if not definition(the_hazard_classification):
                    invalid_classifications.append(the_hazard_classification)
                for invalid_classification in invalid_classifications:
                    the_hazard_classifications.pop(invalid_classification)
            if not the_hazard_classifications:
                empty_exposure_classifications.append(the_exposure)

        for empty_exposure_classification in empty_exposure_classifications:
            output[key].pop(empty_exposure_classification)

        return output

    @staticmethod
    def get_classification(combo_box):
        """Helper to obtain the classification from a combo box.

        :param combo_box: A classification combo box.
        :type combo_box: QComboBox.

        :returns: Classification definitions.
        :rtype: dict
        """
        return combo_box.itemData(combo_box.currentIndex(), Qt.UserRole)

    def setup_thresholds_panel(self, classification):
        """Setup threshold panel in the right panel.

        :param classification: Classification definition.
        :type classification: dict
        """
        # Set text in the label
        layer_purpose = self.parent.step_kw_purpose.selected_purpose()
        layer_subcategory = self.parent.step_kw_subcategory.\
            selected_subcategory()

        if is_raster_layer(self.parent.layer):
            active_band = self.parent.step_kw_band_selector.selected_band()
            layer_extent = self.parent.layer.extent()
            statistics = self.parent.layer.dataProvider().bandStatistics(
                active_band, QgsRasterBandStats.All, layer_extent, 0)
            description_text = continuous_raster_question % (
                layer_purpose['name'], layer_subcategory['name'],
                classification['name'], statistics.minimumValue,
                statistics.maximumValue)
        else:
            field_name = self.parent.step_kw_field.selected_fields()
            field_index = self.parent.layer.fields().lookupField(field_name)
            min_value_layer = self.parent.layer.minimumValue(field_index)
            max_value_layer = self.parent.layer.maximumValue(field_index)
            description_text = continuous_vector_question % (
                layer_purpose['name'], layer_subcategory['name'], field_name,
                classification['name'], min_value_layer, max_value_layer)

        # Set description
        description_label = QLabel(description_text)
        description_label.setWordWrap(True)
        self.right_layout.addWidget(description_label)

        if self.thresholds:
            thresholds = self.thresholds
        else:
            thresholds = self.parent.get_existing_keyword('thresholds')
        selected_unit = self.parent.step_kw_unit.selected_unit()['key']

        self.threshold_classes = OrderedDict()
        classes = classification.get('classes')
        # Sort by value, put the lowest first
        classes = sorted(classes, key=lambda the_key: the_key['value'])

        grid_layout_thresholds = QGridLayout()

        for i, the_class in enumerate(classes):
            class_layout = QHBoxLayout()

            # Class label
            class_label = QLabel(the_class['name'])

            # Min label
            min_label = QLabel(tr('Min >'))

            # Min value as double spin
            min_value_input = QDoubleSpinBox()
            # TODO(IS) We can set the min and max depends on the unit, later
            min_value_input.setMinimum(0)
            min_value_input.setMaximum(999999)

            if thresholds.get(self.active_exposure['key']):
                exposure_thresholds = thresholds.get(
                    self.active_exposure['key'])
                if exposure_thresholds.get(classification['key']):
                    exposure_thresholds_classifications = exposure_thresholds\
                        .get(classification['key'])
                    min_value_input.setValue(
                        exposure_thresholds_classifications['classes'][
                            the_class['key']][0])
                else:
                    default_min = the_class['numeric_default_min']
                    if isinstance(default_min, dict):
                        default_min = the_class['numeric_default_min'][
                            selected_unit]
                    min_value_input.setValue(default_min)
            else:
                default_min = the_class['numeric_default_min']
                if isinstance(default_min, dict):
                    default_min = the_class['numeric_default_min'][
                        selected_unit]
                min_value_input.setValue(default_min)
            min_value_input.setSingleStep(0.1)

            # Max label
            max_label = QLabel(tr('Max <='))

            # Max value as double spin
            max_value_input = QDoubleSpinBox()
            # TODO(IS) We can set the min and max depends on the unit, later
            max_value_input.setMinimum(0)
            max_value_input.setMaximum(999999)
            if thresholds.get(self.active_exposure['key']):
                exposure_thresholds = thresholds.get(
                    self.active_exposure['key'])
                if exposure_thresholds.get(classification['key']):
                    exposure_thresholds_classifications = exposure_thresholds \
                        .get(classification['key'])
                    max_value_input.setValue(
                        exposure_thresholds_classifications['classes'][
                            the_class['key']][1])
                else:
                    default_max = the_class['numeric_default_max']
                    if isinstance(default_max, dict):
                        default_max = the_class['numeric_default_max'][
                            selected_unit]
                    max_value_input.setValue(default_max)
            else:
                default_max = the_class['numeric_default_max']
                if isinstance(default_max, dict):
                    default_max = the_class['numeric_default_max'][
                        selected_unit]
                max_value_input.setValue(default_max)
            max_value_input.setSingleStep(0.1)

            # Add to class_layout
            class_layout.addWidget(min_label)
            class_layout.addWidget(min_value_input)
            class_layout.addWidget(max_label)
            class_layout.addWidget(max_value_input)

            class_layout.setStretch(0, 1)
            class_layout.setStretch(1, 2)
            class_layout.setStretch(2, 1)
            class_layout.setStretch(3, 2)

            # Add to grid_layout
            grid_layout_thresholds.addWidget(class_label, i, 0)
            grid_layout_thresholds.addLayout(class_layout, i, 1)

            self.threshold_classes[the_class['key']] = [
                min_value_input, max_value_input
            ]

        grid_layout_thresholds.setColumnStretch(0, 1)
        grid_layout_thresholds.setColumnStretch(0, 2)

        def min_max_changed(double_spin_index, mode):
            """Slot when min or max value change.

            :param double_spin_index: The index of the double spin.
            :type double_spin_index: int

            :param mode: The flag to indicate the min or max value.
            :type mode: int
            """
            if mode == MAX_VALUE_MODE:
                current_max_value = list(
                    self.threshold_classes.values())[double_spin_index][1]
                target_min_value = list(
                    self.threshold_classes.values())[double_spin_index + 1][0]
                if current_max_value.value() != target_min_value.value():
                    target_min_value.setValue(current_max_value.value())
            elif mode == MIN_VALUE_MODE:
                current_min_value = list(
                    self.threshold_classes.values())[double_spin_index][0]
                target_max_value = list(
                    self.threshold_classes.values())[double_spin_index - 1][1]
                if current_min_value.value() != target_max_value.value():
                    target_max_value.setValue(current_min_value.value())

        # Set behaviour
        for k, v in list(self.threshold_classes.items()):
            index = list(self.threshold_classes.keys()).index(k)
            if index < len(self.threshold_classes) - 1:
                # Max value changed
                v[1].valueChanged.connect(
                    partial(min_max_changed,
                            double_spin_index=index,
                            mode=MAX_VALUE_MODE))
            if index > 0:
                # Min value
                v[0].valueChanged.connect(
                    partial(min_max_changed,
                            double_spin_index=index,
                            mode=MIN_VALUE_MODE))

        grid_layout_thresholds.setSpacing(0)

        self.right_layout.addLayout(grid_layout_thresholds)

    def add_buttons(self, classification):
        """Helper to setup 3 buttons.

        :param classification: The current classification.
        :type classification: dict
        """
        # Note(IS): Until we have good behaviour, we will disable cancel
        # button.
        # Add 3 buttons: Restore default, Cancel, Save

        # Restore default button, only for continuous layer (with threshold)
        if self.layer_mode == layer_mode_continuous['key']:
            self.restore_default_button = QPushButton(tr('Restore Default'))
            self.restore_default_button.clicked.connect(
                partial(self.restore_default_button_clicked,
                        classification=classification))

        # Cancel button
        # cancel_button = QPushButton(tr('Cancel'))
        # cancel_button.clicked.connect(self.cancel_button_clicked)

        # Save button
        self.save_button = QPushButton(tr('Save'))
        self.save_button.clicked.connect(
            partial(self.save_button_clicked, classification=classification))

        button_layout = QHBoxLayout()
        button_layout.addStretch(1)
        button_layout.addWidget(self.restore_default_button)
        button_layout.addWidget(self.save_button)

        button_layout.setStretch(0, 3)
        button_layout.setStretch(1, 1)
        button_layout.setStretch(2, 1)
        # button_layout.setStretch(3, 1)

        self.right_layout.addLayout(button_layout)

    def setup_value_mapping_panels(self, classification):
        """Setup value mapping panel in the right panel.

        :param classification: Classification definition.
        :type classification: dict
        """
        # Set text in the label
        layer_purpose = self.parent.step_kw_purpose.selected_purpose()
        layer_subcategory = self.parent.step_kw_subcategory. \
            selected_subcategory()

        if is_raster_layer(self.parent.layer):
            description_text = classify_raster_question % (
                layer_subcategory['name'], layer_purpose['name'],
                classification['name'])

            dataset = gdal.Open(self.parent.layer.source(), GA_ReadOnly)
            active_band = self.parent.step_kw_band_selector.selected_band()
            unique_values = numpy.unique(
                numpy.array(dataset.GetRasterBand(active_band).ReadAsArray()))
            field_type = 0
            # Convert datatype to a json serializable type
            if numpy.issubdtype(unique_values.dtype, float):
                unique_values = [float(i) for i in unique_values]
            else:
                unique_values = [int(i) for i in unique_values]
        else:
            field = self.parent.step_kw_field.selected_fields()
            field_index = self.parent.layer.fields().indexFromName(field)
            field_type = self.parent.layer.fields()[field_index].type()
            description_text = classify_vector_question % (
                layer_subcategory['name'], layer_purpose['name'],
                classification['name'], field.upper())
            unique_values = list(self.parent.layer.uniqueValues(field_index))

        # Set description
        description_label = QLabel(description_text)
        description_label.setWordWrap(True)
        self.right_layout.addWidget(description_label)

        self.list_unique_values = QListWidget()
        self.list_unique_values.setDragDropMode(QAbstractItemView.DragDrop)
        self.list_unique_values.setDefaultDropAction(Qt.MoveAction)

        self.tree_mapping_widget = QTreeWidget()
        self.tree_mapping_widget.setDragDropMode(QAbstractItemView.DragDrop)
        self.tree_mapping_widget.setDefaultDropAction(Qt.MoveAction)
        self.tree_mapping_widget.header().hide()

        self.tree_mapping_widget.itemChanged.connect(
            self.update_dragged_item_flags)

        value_mapping_layout = QHBoxLayout()
        value_mapping_layout.addWidget(self.list_unique_values)
        value_mapping_layout.addWidget(self.tree_mapping_widget)

        self.right_layout.addLayout(value_mapping_layout)

        default_classes = classification['classes']

        # Assign unique values to classes (according to default)
        unassigned_values = list()
        assigned_values = dict()
        for default_class in default_classes:
            assigned_values[default_class['key']] = list()
        for unique_value in unique_values:
            if (unique_value is None or
                (hasattr(unique_value, 'isNull') and unique_value.isNull())):
                # Don't classify features with NULL value
                continue
            # Capitalization of the value and removing '_' (raw OSM data).
            value_as_string = str(unique_value).upper().replace('_', ' ')
            assigned = False
            for default_class in default_classes:
                if 'string_defaults' in default_class:
                    # To make it case insensitive
                    upper_string_defaults = [
                        c.upper() for c in default_class['string_defaults']
                    ]
                    in_string_default = (value_as_string
                                         in upper_string_defaults)
                    condition_1 = field_type > 9 and in_string_default
                else:
                    condition_1 = False
                condition_2 = (
                    field_type < 10 and 'numeric_default_min' in default_class
                    and 'numeric_default_max' in default_class
                    and (default_class['numeric_default_min'] <= unique_value <
                         default_class['numeric_default_max']))
                if condition_1 or condition_2:
                    assigned_values[default_class['key']] += [unique_value]
                    assigned = True
                    break
            if not assigned:
                # add to unassigned values list otherwise
                unassigned_values += [unique_value]
        self.populate_classified_values(unassigned_values, assigned_values,
                                        default_classes,
                                        self.list_unique_values,
                                        self.tree_mapping_widget)

        # Current value map for exposure and classification
        available_classifications = self.value_maps.get(
            self.active_exposure['key'])
        if not available_classifications:
            return
        # Get active one
        current_classification = available_classifications.get(
            classification['key'])
        if not current_classification:
            return
        # Should come from metadata
        current_value_map = current_classification.get('classes')
        if not current_value_map:
            return

        unassigned_values = list()
        assigned_values = dict()
        for default_class in default_classes:
            assigned_values[default_class['key']] = list()
        for unique_value in unique_values:
            if (unique_value is None or
                (hasattr(unique_value, 'isNull') and unique_value.isNull())):
                # Don't classify features with NULL value
                continue
            # check in value map
            assigned = False
            for key, value_list in list(current_value_map.items()):
                if unique_value in value_list and key in assigned_values:
                    assigned_values[key] += [unique_value]
                    assigned = True
            if not assigned:
                unassigned_values += [unique_value]
        self.populate_classified_values(unassigned_values, assigned_values,
                                        default_classes,
                                        self.list_unique_values,
                                        self.tree_mapping_widget)

    # noinspection PyMethodMayBeStatic
    def update_dragged_item_flags(self, item):
        """Fix the drop flag after the item is dropped.

        Check if it looks like an item dragged from QListWidget
        to QTreeWidget and disable the drop flag.
        For some reasons the flag is set when dragging.

        :param item: Item which is dragged.
        :type item: QTreeWidgetItem

        .. note:: This is a slot executed when the item change.
        """
        if int(item.flags() & Qt.ItemIsDropEnabled) \
                and int(item.flags() & Qt.ItemIsDragEnabled):
            item.setFlags(item.flags() & ~Qt.ItemIsDropEnabled)

    @staticmethod
    def populate_classified_values(unassigned_values, assigned_values,
                                   default_classes, list_unique_values,
                                   tree_mapping_widget):
        """Populate lstUniqueValues and treeClasses.from the parameters.

        :param unassigned_values: List of values that haven't been assigned
            to a class. It will be put in list_unique_values.
        :type unassigned_values: list

        :param assigned_values: Dictionary with class as the key and list of
            value as the value of the dictionary. It will be put in
            tree_mapping_widget.
        :type assigned_values: dict

        :param default_classes: Default classes from unit.
        :type default_classes: list

        :param list_unique_values: List Widget for unique values
        :type list_unique_values: QListWidget

        :param tree_mapping_widget: Tree Widget for classifying.
        :type tree_mapping_widget: QTreeWidget
        """
        # Populate the unique values list
        list_unique_values.clear()
        list_unique_values.setSelectionMode(
            QAbstractItemView.ExtendedSelection)
        for value in unassigned_values:
            value_as_string = value is not None and str(value) or 'NULL'
            list_item = QListWidgetItem(list_unique_values)
            list_item.setFlags(Qt.ItemIsEnabled
                               | Qt.ItemIsSelectable
                               | Qt.ItemIsDragEnabled)
            list_item.setData(Qt.UserRole, value)
            list_item.setText(value_as_string)
            list_unique_values.addItem(list_item)
        # Populate assigned values tree
        tree_mapping_widget.clear()
        bold_font = QFont()
        bold_font.setItalic(True)
        bold_font.setBold(True)
        bold_font.setWeight(75)
        tree_mapping_widget.invisibleRootItem().setFlags(Qt.ItemIsEnabled)
        for default_class in default_classes:
            # Create branch for class
            tree_branch = QTreeWidgetItem(tree_mapping_widget)
            tree_branch.setFlags(Qt.ItemIsDropEnabled | Qt.ItemIsEnabled)
            tree_branch.setExpanded(True)
            tree_branch.setFont(0, bold_font)
            if 'name' in default_class:
                default_class_name = default_class['name']
            else:
                default_class_name = default_class['key']
            tree_branch.setText(0, default_class_name)
            tree_branch.setData(0, Qt.UserRole, default_class['key'])
            if 'description' in default_class:
                tree_branch.setToolTip(0, default_class['description'])
            # Assign known values
            for value in assigned_values[default_class['key']]:
                string_value = value is not None and str(value) or 'NULL'
                tree_leaf = QTreeWidgetItem(tree_branch)
                tree_leaf.setFlags(Qt.ItemIsEnabled
                                   | Qt.ItemIsSelectable
                                   | Qt.ItemIsDragEnabled)
                tree_leaf.setData(0, Qt.UserRole, value)
                tree_leaf.setText(0, string_value)

    def cancel_button_clicked(self):
        """Action for cancel button clicked."""
        # Change mode
        self.mode = CHOOSE_MODE
        # Enable all edit buttons and combo boxes
        for i in range(len(self.exposures)):
            if i == self.special_case_index:
                self.exposure_edit_buttons[i].setEnabled(False)
                self.exposure_combo_boxes[i].setEnabled(False)
                continue
            if self.get_classification(self.exposure_combo_boxes[i]):
                self.exposure_edit_buttons[i].setEnabled(True)
            else:
                self.exposure_edit_buttons[i].setEnabled(False)
            # self.exposure_edit_buttons[i].setText(tr('Edit'))
            self.exposure_combo_boxes[i].setEnabled(True)

        # Clear right panel
        clear_layout(self.right_layout)
        # Show current state
        self.show_current_state()
        # Unset active exposure
        self.active_exposure = None

        self.parent.pbnNext.setEnabled(self.is_ready_to_next_step())

    def save_button_clicked(self, classification):
        """Action for save button clicked.

        :param classification: The classification that being edited.
        :type classification: dict
        """
        # Save current edit
        if self.layer_mode == layer_mode_continuous:
            thresholds = self.get_threshold()
            classification_class = {'classes': thresholds, 'active': True}
            if self.thresholds.get(self.active_exposure['key']):
                # Set other class to not active
                for current_classification in list(
                        self.thresholds.get(
                            self.active_exposure['key']).values()):
                    current_classification['active'] = False
            else:
                self.thresholds[self.active_exposure['key']] = {}

            self.thresholds[self.active_exposure['key']][
                classification['key']] = classification_class
        else:
            value_maps = self.get_value_map()
            classification_class = {'classes': value_maps, 'active': True}
            if self.value_maps.get(self.active_exposure['key']):
                # Set other class to not active
                for current_classification in list(
                        self.value_maps.get(
                            self.active_exposure['key']).values()):
                    current_classification['active'] = False
            else:
                self.value_maps[self.active_exposure['key']] = {}

            self.value_maps[self.active_exposure['key']][
                classification['key']] = classification_class
        # Back to choose mode
        self.cancel_button_clicked()

    def restore_default_button_clicked(self, classification):
        """Action for restore default button clicked.

        It will set the threshold with default value.

        :param classification: The classification that being edited.
        :type classification: dict
        """
        # Obtain default value
        class_dict = {}
        for the_class in classification.get('classes'):
            class_dict[the_class['key']] = {
                'numeric_default_min': the_class['numeric_default_min'],
                'numeric_default_max': the_class['numeric_default_max'],
            }
        # Set for all threshold
        for key, value in list(self.threshold_classes.items()):
            value[0].setValue(class_dict[key]['numeric_default_min'])
            value[1].setValue(class_dict[key]['numeric_default_max'])

    def get_threshold(self):
        """Return threshold based on current state."""
        value_map = dict()
        for key, value in list(self.threshold_classes.items()):
            value_map[key] = [
                value[0].value(),
                value[1].value(),
            ]
        return value_map

    def get_value_map(self):
        """Obtain the value-to-class mapping set by user.

        :returns: The complete mapping as a dict of lists.
        :rtype: dict
        """
        value_map = {}
        tree_clone = self.tree_mapping_widget.invisibleRootItem().clone()
        for tree_branch in tree_clone.takeChildren():
            value_list = []
            for tree_leaf in tree_branch.takeChildren():
                value_list += [tree_leaf.data(0, Qt.UserRole)]
            if value_list:
                value_map[tree_branch.data(0, Qt.UserRole)] = value_list
        return value_map

    def set_current_state(self):
        """"Helper to set the state of the step from current keywords."""
        if not self.thresholds:
            self.thresholds = self.parent.get_existing_keyword('thresholds')
        if not self.value_maps:
            self.value_maps = self.parent.get_existing_keyword('value_maps')

    def classifications_combo_box_changed(self, index, exposure,
                                          exposure_combo_box, edit_button):
        """Action when classification combo box changed.

        :param index: The index of the combo box.
        :type index: int

        :param exposure: The exposure associated with the combo box.
        :type exposure: dict

        :param exposure_combo_box: Combo box for the classification.
        :type exposure_combo_box: QComboBox

        :param edit_button: The edit button associate with combo box.
        :type edit_button: QPushButton
        """
        # Disable button if it's no classification
        edit_button.setEnabled(bool(index))

        classification = self.get_classification(exposure_combo_box)
        self.activate_classification(exposure, classification)
        clear_layout(self.right_layout)
        self.show_current_state()

        self.parent.pbnNext.setEnabled(self.is_ready_to_next_step())

        # Open edit panel directly
        edit_button.click()

    def activate_classification(self, exposure, classification=None):
        """Set active to True for classification for the exposure.

        If classification = None, all classification set active = False.

        :param exposure: Exposure definition.
        :type exposure: dict

        :param classification: Classification definition.
        :type classification: dict
        """
        if self.layer_mode == layer_mode_continuous:
            selected_unit = self.parent.step_kw_unit.selected_unit()['key']
            target = self.thresholds.get(exposure['key'])
            if target is None:
                self.thresholds[exposure['key']] = {}
            target = self.thresholds.get(exposure['key'])
        else:
            selected_unit = None
            target = self.value_maps.get(exposure['key'])
            if target is None:
                self.value_maps[exposure['key']] = {}
            target = self.value_maps.get(exposure['key'])

        if classification is not None:
            if classification['key'] not in target:
                if self.layer_mode == layer_mode_continuous:
                    default_classes = default_classification_thresholds(
                        classification, selected_unit)
                    target[classification['key']] = {
                        'classes': default_classes,
                        'active': True
                    }
                else:
                    # Set classes to empty, since we haven't mapped anything
                    target[classification['key']] = {
                        'classes': {},
                        'active': True
                    }
                return

        for classification_key, value in list(target.items()):
            if classification is None:
                value['active'] = False
                continue

            if classification_key == classification['key']:
                value['active'] = True
            else:
                value['active'] = False

    @property
    def step_name(self):
        """Get the human friendly name for the wizard step.

        :returns: The name of the wizard step.
        :rtype: str
        """
        return tr('Multi Classification Step')

    def help_content(self):
        """Return the content of help for this step wizard.

            We only needs to re-implement this method in each wizard step.

        :returns: A message object contains help.
        :rtype: m.Message
        """
        message = m.Message()
        message.add(
            m.Paragraph(
                tr('In this wizard step: {step_name}, you will be able to set the '
                   'classification that you will use per exposure type. You can also '
                   'set the threshold or value map for each classification.').
                format(step_name=self.step_name)))
        return message
device = get_device(args.gpu)
model = model_selection(args.model)

## variables
classifier_features_num = get_features(model)
hidden_layers = args.hidden_layers
flower_outputs = 102
learn_rate = args.learning_rate
epochs = args.epochs
print('Hidden layers: ', hidden_layers, '\nLearning rate: ', learn_rate, '\nepochs: ', epochs)


## classifier
classifier = nn.Sequential(OrderedDict([
                          ('fc1', nn.Linear(classifier_features_num, hidden_layers)),
                          ('relu', nn.ReLU()),
                          ('fc2', nn.Linear(hidden_layers, flower_outputs)),
                          ('output', nn.LogSoftmax(dim=1))
                          ]))
model.classifier = classifier
    
## criterion and optimizer, with frozen parameters
criterion = nn.NLLLoss ()
optimizer = optim.Adam (model.classifier.parameters (), lr = learn_rate)



##### train and validate model #####

## print every 50 images
image_print = 50
steps = 0
Ejemplo n.º 47
0
    def enum_valid_dist_attr_for_program(program,
                                         process_mesh_topology,
                                         is_pipeline=False):
        """Enumerate valid distributed attributes for all ops in program."""
        valid_dist_attr_dict = OrderedDict()
        ops = program.global_block().ops
        vars = program.global_block().vars

        processes = reduce(lambda x, y: x * y, process_mesh_topology)
        global_group = [i for i in range(processes)]
        global_process_mesh = None
        pipeline_process_meshes = None

        # in the pipeline mode, there are some process meshes
        if is_pipeline:
            pipeline_stages = process_mesh_topology[-1]
            op_count_per_stage = len(ops) // pipeline_stages
            if len(process_mesh_topology) > 1:
                process_mesh_shape = process_mesh_topology[:-1]
                per_process_mesh_group = processes // pipeline_stages
                pipeline_process_meshes = [auto.ProcessMesh(mesh=np.array(global_group[i*per_process_mesh_group: \
                (i+1)*per_process_mesh_group]).reshape(process_mesh_shape).tolist()) for i in range(pipeline_stages)]
            elif len(process_mesh_topology) == 1:
                pipeline_process_meshes = [
                    auto.ProcessMesh(mesh=[i]) for i in range(pipeline_stages)
                ]
        else:
            if len(process_mesh_topology) > 1:
                global_process_mesh = auto.ProcessMesh(mesh=np.array(
                    global_group).reshape(process_mesh_topology).tolist())
            else:
                global_process_mesh = auto.ProcessMesh(mesh=global_group)

        # enumerate valid distributed attribute for each op in the program
        for idx, op in enumerate(ops):
            op_valid_dist_attrs = None
            op_process_mesh = global_process_mesh
            pipeline_stage = -1
            if pipeline_process_meshes is not None:
                pipeline_stage = idx // op_count_per_stage if idx // op_count_per_stage < len(
                    pipeline_process_meshes) else idx // op_count_per_stage - 1
                if pipeline_stage >= len(pipeline_process_meshes):
                    pipeline_stage = len(pipeline_process_meshes) - 1
                op_process_mesh = pipeline_process_meshes[pipeline_stage]

            if op.type in PlanSpace.not_enum_ops:
                op_dist_attr = OperatorDistributedAttribute()
                op_dist_attr.process_mesh = op_process_mesh
                for var_name in op.input_arg_names:
                    if var_name in PlanSpace.special_vars:
                        op_dist_attr.set_input_dims_mapping(var_name, [])
                    else:
                        dims_mapping = [-1 for i in vars[var_name].shape]
                        op_dist_attr.set_input_dims_mapping(
                            var_name, dims_mapping)

                for var_name in op.output_arg_names:
                    if var_name in PlanSpace.special_vars:
                        op_dist_attr.set_output_dims_mapping(var_name, [])
                    else:
                        dims_mapping = [-1 for i in vars[var_name].shape]
                        op_dist_attr.set_output_dims_mapping(
                            var_name, dims_mapping)
                op_valid_dist_attrs = [op_dist_attr]
                pipeline_stage = 0 if pipeline_stage != -1 else pipeline_stage
            else:
                op_valid_dist_attrs = PlanSpace._enum_valid_dist_attr_for_op(
                    program, op, op_process_mesh)

            assert op_valid_dist_attrs is not None, "Enumerate {} valid distributed attribute failed.".format(
                op)
            valid_dist_attr_dict[op.desc.id()] = [
                op_valid_dist_attrs, pipeline_stage
            ]

        return valid_dist_attr_dict, pipeline_process_meshes, global_process_mesh
Ejemplo n.º 48
0
    def initAlgorithm(self, config=None):
        self.DIRECTIONS = OrderedDict([
            (self.tr('Forward direction'), QgsVectorLayerDirector.DirectionForward),
            (self.tr('Backward direction'), QgsVectorLayerDirector.DirectionBackward),
            (self.tr('Both directions'), QgsVectorLayerDirector.DirectionBoth)])

        self.STRATEGIES = [self.tr('Shortest'),
                           self.tr('Fastest')
                           ]

        self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT,
                                                              self.tr('Network Layer'),
                                                              [QgsProcessing.TypeVectorLine]))
        self.addParameter(QgsProcessingParameterPoint(self.START_POINT,
                                                      self.tr('Start Point')))
        self.addParameter(QgsProcessingParameterNumber(self.MAX_DIST,
                                                   self.tr('Size of Iso-Area (distance or seconds depending on strategy)'),
                                                   QgsProcessingParameterNumber.Double,
                                                   2500.0, False, 0, 99999999.99))
        self.addParameter(QgsProcessingParameterEnum(self.STRATEGY,
                                                     self.tr('Optimization Criterion'),
                                                     self.STRATEGIES,
                                                     defaultValue=0))

        params = []
        params.append(QgsProcessingParameterField(self.DIRECTION_FIELD,
                                                  self.tr('Direction field'),
                                                  None,
                                                  self.INPUT,
                                                  optional=True))
        params.append(QgsProcessingParameterString(self.VALUE_FORWARD,
                                                   self.tr('Value for forward direction'),
                                                   optional=True))
        params.append(QgsProcessingParameterString(self.VALUE_BACKWARD,
                                                   self.tr('Value for backward direction'),
                                                   optional=True))
        params.append(QgsProcessingParameterString(self.VALUE_BOTH,
                                                   self.tr('Value for both directions'),
                                                   optional=True))
        params.append(QgsProcessingParameterEnum(self.DEFAULT_DIRECTION,
                                                 self.tr('Default direction'),
                                                 list(self.DIRECTIONS.keys()),
                                                 defaultValue=2))
        params.append(QgsProcessingParameterField(self.SPEED_FIELD,
                                                  self.tr('Speed field'),
                                                  None,
                                                  self.INPUT,
                                                  optional=True))
        params.append(QgsProcessingParameterNumber(self.DEFAULT_SPEED,
                                                   self.tr('Default speed (km/h)'),
                                                   QgsProcessingParameterNumber.Double,
                                                   5.0, False, 0, 99999999.99))
        params.append(QgsProcessingParameterNumber(self.TOLERANCE,
                                                   self.tr('Topology tolerance'),
                                                   QgsProcessingParameterNumber.Double,
                                                   0.0, False, 0, 99999999.99))

        for p in params:
            p.setFlags(p.flags() | QgsProcessingParameterDefinition.FlagAdvanced)
            self.addParameter(p)
        
        self.addParameter(QgsProcessingParameterFeatureSink(self.OUTPUT,
                                                            self.tr('Output Pointcloud'),
                                                            QgsProcessing.TypeVectorPoint))
Ejemplo n.º 49
0
def test_groupby_transform(setup):
    df1 = pd.DataFrame({
        'a': [3, 4, 5, 3, 5, 4, 1, 2, 3],
        'b': [1, 3, 4, 5, 6, 5, 4, 4, 4],
        'c': list('aabaaddce'),
        'd': [3, 4, 5, 3, 5, 4, 1, 2, 3],
        'e': [1, 3, 4, 5, 6, 5, 4, 4, 4],
        'f': list('aabaaddce'),
    })

    def transform_series(s, truncate=True):
        s = s.sort_index()
        if truncate and len(s.index) > 1:
            s = s.iloc[:-1].reset_index(drop=True)
        return s

    mdf = md.DataFrame(df1, chunk_size=3)

    r = mdf.groupby('b').transform(transform_series, truncate=False)
    pd.testing.assert_frame_equal(
        r.execute().fetch().sort_index(),
        df1.groupby('b').transform(transform_series,
                                   truncate=False).sort_index())

    if pd.__version__ != '1.1.0':
        r = mdf.groupby('b').transform(['cummax', 'cumsum'], _call_agg=True)
        pd.testing.assert_frame_equal(
            r.execute().fetch().sort_index(),
            df1.groupby('b').agg(['cummax', 'cumsum']).sort_index())

        agg_list = ['cummax', 'cumsum']
        r = mdf.groupby('b').transform(agg_list, _call_agg=True)
        pd.testing.assert_frame_equal(
            r.execute().fetch().sort_index(),
            df1.groupby('b').agg(agg_list).sort_index())

        agg_dict = OrderedDict([('d', 'cummax'), ('b', 'cumsum')])
        r = mdf.groupby('b').transform(agg_dict, _call_agg=True)
        pd.testing.assert_frame_equal(
            r.execute().fetch().sort_index(),
            df1.groupby('b').agg(agg_dict).sort_index())

    agg_list = ['sum', lambda s: s.sum()]
    r = mdf.groupby('b').transform(agg_list, _call_agg=True)
    pd.testing.assert_frame_equal(r.execute().fetch().sort_index(),
                                  df1.groupby('b').agg(agg_list).sort_index())

    series1 = pd.Series([3, 4, 5, 3, 5, 4, 1, 2, 3])
    ms1 = md.Series(series1, chunk_size=3)

    r = ms1.groupby(lambda x: x % 3).transform(lambda x: x + 1)
    pd.testing.assert_series_equal(
        r.execute().fetch().sort_index(),
        series1.groupby(lambda x: x % 3).transform(
            lambda x: x + 1).sort_index())

    r = ms1.groupby(lambda x: x % 3).transform('cummax', _call_agg=True)
    pd.testing.assert_series_equal(
        r.execute().fetch().sort_index(),
        series1.groupby(lambda x: x % 3).agg('cummax').sort_index())

    agg_list = ['cummax', 'cumcount']
    r = ms1.groupby(lambda x: x % 3).transform(agg_list, _call_agg=True)
    pd.testing.assert_frame_equal(
        r.execute().fetch().sort_index(),
        series1.groupby(lambda x: x % 3).agg(agg_list).sort_index())
    def setup_thresholds_panel(self, classification):
        """Setup threshold panel in the right panel.

        :param classification: Classification definition.
        :type classification: dict
        """
        # Set text in the label
        layer_purpose = self.parent.step_kw_purpose.selected_purpose()
        layer_subcategory = self.parent.step_kw_subcategory.\
            selected_subcategory()

        if is_raster_layer(self.parent.layer):
            active_band = self.parent.step_kw_band_selector.selected_band()
            layer_extent = self.parent.layer.extent()
            statistics = self.parent.layer.dataProvider().bandStatistics(
                active_band, QgsRasterBandStats.All, layer_extent, 0)
            description_text = continuous_raster_question % (
                layer_purpose['name'], layer_subcategory['name'],
                classification['name'], statistics.minimumValue,
                statistics.maximumValue)
        else:
            field_name = self.parent.step_kw_field.selected_fields()
            field_index = self.parent.layer.fields().lookupField(field_name)
            min_value_layer = self.parent.layer.minimumValue(field_index)
            max_value_layer = self.parent.layer.maximumValue(field_index)
            description_text = continuous_vector_question % (
                layer_purpose['name'], layer_subcategory['name'], field_name,
                classification['name'], min_value_layer, max_value_layer)

        # Set description
        description_label = QLabel(description_text)
        description_label.setWordWrap(True)
        self.right_layout.addWidget(description_label)

        if self.thresholds:
            thresholds = self.thresholds
        else:
            thresholds = self.parent.get_existing_keyword('thresholds')
        selected_unit = self.parent.step_kw_unit.selected_unit()['key']

        self.threshold_classes = OrderedDict()
        classes = classification.get('classes')
        # Sort by value, put the lowest first
        classes = sorted(classes, key=lambda the_key: the_key['value'])

        grid_layout_thresholds = QGridLayout()

        for i, the_class in enumerate(classes):
            class_layout = QHBoxLayout()

            # Class label
            class_label = QLabel(the_class['name'])

            # Min label
            min_label = QLabel(tr('Min >'))

            # Min value as double spin
            min_value_input = QDoubleSpinBox()
            # TODO(IS) We can set the min and max depends on the unit, later
            min_value_input.setMinimum(0)
            min_value_input.setMaximum(999999)

            if thresholds.get(self.active_exposure['key']):
                exposure_thresholds = thresholds.get(
                    self.active_exposure['key'])
                if exposure_thresholds.get(classification['key']):
                    exposure_thresholds_classifications = exposure_thresholds\
                        .get(classification['key'])
                    min_value_input.setValue(
                        exposure_thresholds_classifications['classes'][
                            the_class['key']][0])
                else:
                    default_min = the_class['numeric_default_min']
                    if isinstance(default_min, dict):
                        default_min = the_class['numeric_default_min'][
                            selected_unit]
                    min_value_input.setValue(default_min)
            else:
                default_min = the_class['numeric_default_min']
                if isinstance(default_min, dict):
                    default_min = the_class['numeric_default_min'][
                        selected_unit]
                min_value_input.setValue(default_min)
            min_value_input.setSingleStep(0.1)

            # Max label
            max_label = QLabel(tr('Max <='))

            # Max value as double spin
            max_value_input = QDoubleSpinBox()
            # TODO(IS) We can set the min and max depends on the unit, later
            max_value_input.setMinimum(0)
            max_value_input.setMaximum(999999)
            if thresholds.get(self.active_exposure['key']):
                exposure_thresholds = thresholds.get(
                    self.active_exposure['key'])
                if exposure_thresholds.get(classification['key']):
                    exposure_thresholds_classifications = exposure_thresholds \
                        .get(classification['key'])
                    max_value_input.setValue(
                        exposure_thresholds_classifications['classes'][
                            the_class['key']][1])
                else:
                    default_max = the_class['numeric_default_max']
                    if isinstance(default_max, dict):
                        default_max = the_class['numeric_default_max'][
                            selected_unit]
                    max_value_input.setValue(default_max)
            else:
                default_max = the_class['numeric_default_max']
                if isinstance(default_max, dict):
                    default_max = the_class['numeric_default_max'][
                        selected_unit]
                max_value_input.setValue(default_max)
            max_value_input.setSingleStep(0.1)

            # Add to class_layout
            class_layout.addWidget(min_label)
            class_layout.addWidget(min_value_input)
            class_layout.addWidget(max_label)
            class_layout.addWidget(max_value_input)

            class_layout.setStretch(0, 1)
            class_layout.setStretch(1, 2)
            class_layout.setStretch(2, 1)
            class_layout.setStretch(3, 2)

            # Add to grid_layout
            grid_layout_thresholds.addWidget(class_label, i, 0)
            grid_layout_thresholds.addLayout(class_layout, i, 1)

            self.threshold_classes[the_class['key']] = [
                min_value_input, max_value_input
            ]

        grid_layout_thresholds.setColumnStretch(0, 1)
        grid_layout_thresholds.setColumnStretch(0, 2)

        def min_max_changed(double_spin_index, mode):
            """Slot when min or max value change.

            :param double_spin_index: The index of the double spin.
            :type double_spin_index: int

            :param mode: The flag to indicate the min or max value.
            :type mode: int
            """
            if mode == MAX_VALUE_MODE:
                current_max_value = list(
                    self.threshold_classes.values())[double_spin_index][1]
                target_min_value = list(
                    self.threshold_classes.values())[double_spin_index + 1][0]
                if current_max_value.value() != target_min_value.value():
                    target_min_value.setValue(current_max_value.value())
            elif mode == MIN_VALUE_MODE:
                current_min_value = list(
                    self.threshold_classes.values())[double_spin_index][0]
                target_max_value = list(
                    self.threshold_classes.values())[double_spin_index - 1][1]
                if current_min_value.value() != target_max_value.value():
                    target_max_value.setValue(current_min_value.value())

        # Set behaviour
        for k, v in list(self.threshold_classes.items()):
            index = list(self.threshold_classes.keys()).index(k)
            if index < len(self.threshold_classes) - 1:
                # Max value changed
                v[1].valueChanged.connect(
                    partial(min_max_changed,
                            double_spin_index=index,
                            mode=MAX_VALUE_MODE))
            if index > 0:
                # Min value
                v[0].valueChanged.connect(
                    partial(min_max_changed,
                            double_spin_index=index,
                            mode=MIN_VALUE_MODE))

        grid_layout_thresholds.setSpacing(0)

        self.right_layout.addLayout(grid_layout_thresholds)
Ejemplo n.º 51
0
    import ConfigParser as configparser

_start_string = '.. towncrier release notes start\n'
_title_format = '{name} {version}\n==========\n'
_template_fname = None
_default_types = OrderedDict([
    (u"feature", {
        "name": u"Features",
        "showcontent": True
    }),
    (u"bugfix", {
        "name": u"Bugfixes",
        "showcontent": True
    }),
    (u"doc", {
        "name": u"Improved Documentation",
        "showcontent": True
    }),
    (u"removal", {
        "name": u"Deprecations and Removals",
        "showcontent": True
    }),
    (u"misc", {
        "name": u"Misc",
        "showcontent": False
    }),
])


def load_config_ini(from_dir):

    config = configparser.ConfigParser({
Ejemplo n.º 52
0
    def _enum_valid_dist_attr_for_op(program, op, process_mesh):
        """Enumerate the valid distributed attribute for op based on the given process mesh."""
        vars = program.global_block().vars
        dims_mapping_dict = OrderedDict()
        op_valid_dist_attrs = []
        dist_op_impl_container = get_distributed_operator_impl_container(
            op.type)

        # enumerate all valid dims mapping of tensor when process mesh given
        for var_name in chain(op.input_arg_names, op.output_arg_names):
            visited = [
                False for _ in range(
                    len(list(range(-1, len(process_mesh.topology)))))
            ]
            depth = 0
            path = []
            dims_mapping_list = []
            PlanSpace._enum_dims_mapping(process_mesh.topology, visited, path,
                                         depth, dims_mapping_list,
                                         vars[var_name].shape)
            dims_mapping_dict[var_name] = copy.deepcopy(dims_mapping_list)

        # compose dims mapping
        composed_dims_mapping_list = list(
            product(
                *[dims_mapping_dict[key] for key in dims_mapping_dict.keys()]))
        for composed_dims_mapping in composed_dims_mapping_list:
            op_dist_attr = OperatorDistributedAttribute()
            op_dist_attr.process_mesh = process_mesh
            var_names = list(dims_mapping_dict.keys())

            for idx, dims_mapping in enumerate(composed_dims_mapping):
                if var_names[idx] in op.input_arg_names:
                    op_dist_attr.set_input_dims_mapping(
                        var_names[idx], dims_mapping)
                elif var_names[idx] in op.output_arg_names:
                    op_dist_attr.set_output_dims_mapping(
                        var_names[idx], dims_mapping)
                else:
                    raise ValueError(
                        "The {varname} is not input or output of op {op}.".
                        format(varname='var_names[idx]', op='op'))

            dist_op = DistributedOperator(op, op_dist_attr)
            if dist_op_impl_container is None:
                if is_elementwise_op(op.type):
                    changed = True
                    valid = True
                    try:
                        changed = update_op_dims_mapping_by_elementwise_like_dist_impl(
                            dist_op)
                    except Exception as e:
                        valid = False
                    if valid and not changed:
                        if PlanFilter.check_dims_mapping_for_op(
                                op, dist_op.dist_attr, vars
                        ) and PlanFilter.check_dims_mapping_for_special_op(
                                op, dist_op.dist_attr, vars):
                            dist_op.dist_attr.impl_type = "elementwise"
                            dist_op.dist_attr.impl_idx = 0
                            op_valid_dist_attrs.append(dist_op.dist_attr)
                    continue
                else:
                    changed = True
                    valid = True
                    try:
                        changed = update_op_dims_mapping_by_default_dist_impl(
                            dist_op)
                    except Exception as e:
                        valid = False
                    if valid and not changed:
                        if PlanFilter.check_dims_mapping_for_op(
                                op, dist_op.dist_attr, vars
                        ) and PlanFilter.check_dims_mapping_for_special_op(
                                op, dist_op.dist_attr, vars):
                            dist_op.dist_attr.impl_type = "default"
                            dist_op.dist_attr.impl_idx = 0
                            op_valid_dist_attrs.append(dist_op.dist_attr)
                    continue

            # if op has distributed implements, find all valid dist attr of this op
            impls = dist_op_impl_container.impls
            for idx, impl in enumerate(impls):
                if impl.is_auto_compatible(dist_op):
                    if PlanFilter.check_dims_mapping_for_op(
                            op, dist_op.dist_attr, vars):
                        dist_op.dist_attr.impl_type = dist_op.serial_op.type
                        dist_op.dist_attr.impl_idx = idx
                        op_valid_dist_attrs.append(dist_op.dist_attr)

        # set default dist attr for some special ops whose distributed attributes can not be enumerated
        if not op_valid_dist_attrs:
            op_dist_attr = OperatorDistributedAttribute()
            op_dist_attr.process_mesh = process_mesh
            dist_op = DistributedOperator(op, op_dist_attr)
            for var_name in op.input_arg_names:
                op_dist_attr.set_input_dims_mapping(
                    vars[var_name], [-1 for i in vars[var_name].shape])
            for var_name in op.output_arg_names:
                op_dist_attr.set_output_dims_mapping(
                    vars[var_name], [-1 for i in vars[var_name].shape])
            dist_op.dist_attr.impl_type = "default"
            dist_op.dist_attr.impl_idx = 0
            op_valid_dist_attrs.append(dist_op.dist_attr)

        return op_valid_dist_attrs
df_ls = [pd.read_csv(p, skiprows=[1], encoding="latin") for p in df_paths[1:]]

df_all = reduce(lambda x, y: pd.merge(x, y, on = 'Delta'), df_ls)

#Filter out disproportionally large L_b values
df_all['l_a'].where((df_all['L_b'] < 500.) , other=np.nan, inplace=True)
df_all['l_a'].where((df_all['Delta'] != "Nakdong"), other=np.nan, inplace=True)

#%%Select variables to plot

#Ranges of hydrogeological parameters
hdrglgy2plot = OrderedDict((
            ("logKaqf",  "$\log(K_{h,aqf} \; [m/d])$"),
            ("logKaqt",  "$\log(K_{v,aqt} \; [m/d])$"), 
            ("Recharge", "$R \; [m/d]$"), 
            ("logAnisotropy", "$\log(K_h/K_v \; [-])$"),
#            ("logSs", "$\log(S_s)$")
            ))

#Histograms for other parameters
var2plot = OrderedDict((("l_a",         "$l_a$ [-]"), 
                        ("beta",        r"$\beta$ [rad]"),
                        (r"H_a/H_b",    "$f_H$ [-]"),
                        ('H_b',         "$H_b$ [m]"),
                        (r'Mud/Total',  "$f_{aqt}$ [-]"),
                        ('l_conf',      "$l_{conf}$ [-]"),
                        ('N_aqt',       "$N_{aqt}$ [-]"),
                        ('N_pal',       "$N_{pal}$ [-]"),
                        ('l_tra',       "$l_{tra}$ [-]"),
                        ('N_chan',      "$N_{chan}$ [-]")
Ejemplo n.º 54
0
def test_dataframe_groupby_agg(setup):
    agg_funs = [
        'std', 'mean', 'var', 'max', 'count', 'size', 'all', 'any', 'skew',
        'kurt', 'sem'
    ]

    rs = np.random.RandomState(0)
    raw = pd.DataFrame({
        'c1': np.arange(100).astype(np.int64),
        'c2': rs.choice(['a', 'b', 'c'], (100, )),
        'c3': rs.rand(100)
    })
    mdf = md.DataFrame(raw, chunk_size=13)

    for method in ['tree', 'shuffle']:
        r = mdf.groupby('c2').agg('size', method=method)
        pd.testing.assert_series_equal(
            r.execute().fetch().sort_index(),
            raw.groupby('c2').agg('size').sort_index())

        for agg_fun in agg_funs:
            if agg_fun == 'size':
                continue
            r = mdf.groupby('c2').agg(agg_fun, method=method)
            pd.testing.assert_frame_equal(
                r.execute().fetch().sort_index(),
                raw.groupby('c2').agg(agg_fun).sort_index())

        r = mdf.groupby('c2').agg(agg_funs, method=method)
        pd.testing.assert_frame_equal(
            r.execute().fetch().sort_index(),
            raw.groupby('c2').agg(agg_funs).sort_index())

        agg = OrderedDict([('c1', ['min', 'mean']), ('c3', 'std')])
        r = mdf.groupby('c2').agg(agg, method=method)
        pd.testing.assert_frame_equal(r.execute().fetch().sort_index(),
                                      raw.groupby('c2').agg(agg).sort_index())

        agg = OrderedDict([('c1', 'min'), ('c3', 'sum')])
        r = mdf.groupby('c2').agg(agg, method=method)
        pd.testing.assert_frame_equal(r.execute().fetch().sort_index(),
                                      raw.groupby('c2').agg(agg).sort_index())

        r = mdf.groupby('c2').agg({'c1': 'min'}, method=method)
        pd.testing.assert_frame_equal(
            r.execute().fetch().sort_index(),
            raw.groupby('c2').agg({
                'c1': 'min'
            }).sort_index())

        # test groupby series
        r = mdf.groupby(mdf['c2']).sum(method=method)
        pd.testing.assert_frame_equal(
            r.execute().fetch().sort_index(),
            raw.groupby(raw['c2']).sum().sort_index())

    r = mdf.groupby('c2').size(method='tree')
    pd.testing.assert_series_equal(r.execute().fetch(),
                                   raw.groupby('c2').size())

    # test inserted kurt method
    r = mdf.groupby('c2').kurtosis(method='tree')
    pd.testing.assert_frame_equal(r.execute().fetch(),
                                  raw.groupby('c2').kurtosis())

    for agg_fun in agg_funs:
        if agg_fun == 'size' or callable(agg_fun):
            continue
        r = getattr(mdf.groupby('c2'), agg_fun)(method='tree')
        pd.testing.assert_frame_equal(r.execute().fetch(),
                                      getattr(raw.groupby('c2'), agg_fun)())

    for method in ['tree', 'shuffle']:
        # test as_index=False
        r = mdf.groupby('c2', as_index=False).agg('mean', method=method)
        pd.testing.assert_frame_equal(
            r.execute().fetch().sort_values('c2', ignore_index=True),
            raw.groupby('c2', as_index=False).agg('mean').sort_values(
                'c2', ignore_index=True))
        assert r.op.groupby_params['as_index'] is False

    # test as_index=False takes no effect
    r = mdf.groupby(['c1', 'c2'], as_index=False).agg(['mean', 'count'],
                                                      method='tree')
    pd.testing.assert_frame_equal(
        r.execute().fetch(),
        raw.groupby(['c1', 'c2'], as_index=False).agg(['mean', 'count']))
    assert r.op.groupby_params['as_index'] is True

    r = mdf.groupby('c2').agg(['cumsum', 'cumcount'], method='tree')
    pd.testing.assert_frame_equal(
        r.execute().fetch().sort_index(),
        raw.groupby('c2').agg(['cumsum', 'cumcount']).sort_index())

    r = mdf[['c1', 'c3']].groupby(mdf['c2']).agg(MockReduction2())
    pd.testing.assert_frame_equal(
        r.execute().fetch(),
        raw[['c1', 'c3']].groupby(raw['c2']).agg(MockReduction2()))

    r = mdf.groupby('c2').agg(sum_c1=md.NamedAgg('c1', 'sum'),
                              min_c1=md.NamedAgg('c1', 'min'),
                              mean_c3=md.NamedAgg('c3', 'mean'))
    pd.testing.assert_frame_equal(
        r.execute().fetch(),
        raw.groupby('c2').agg(sum_c1=md.NamedAgg('c1', 'sum'),
                              min_c1=md.NamedAgg('c1', 'min'),
                              mean_c3=md.NamedAgg('c3', 'mean')))
Ejemplo n.º 55
0
def augment_with_aux_indicators(dataset, filename, indicators, mode='all', alt_contrast_mode=False):
    """Augment MRs in a dataset with auxiliary tokens indicating desired discourse phenomena in the corresponding
    utterances. Depending on the mode, the augmented dataset will only contain samples which exhibit 1.) at most one
    of the desired indicators ('single'), 2.) the one selected indicator only ('only'), or 3.) all the desired
    indicators at once ('combo'). The default mode ('all') keeps all samples in the dataset.
    """

    if indicators is None or len(indicators) == 0:
        return

    mrs_augm = []
    mrs_single = []
    utterances_single = []
    mrs_emph_only = []
    utterances_emph_only = []
    mrs_contrast_only = []
    utterances_contrast_only = []
    mrs_combo = []
    utterances_combo = []

    emph_only_ctr = 0
    contrast_only_ctr = 0
    combo_ctr = 0

    print('Augmenting MRs with ' + ' + '.join(indicators) + ' in ' + str(filename))

    # Read in the data
    data_cont = data_loader.init_test_data(os.path.join(config.DATA_DIR, dataset, filename))
    mrs, utterances = data_cont['data']
    _, _, slot_sep, val_sep, val_sep_end = data_cont['separators']

    for mr, utt in zip(mrs, utterances):
        mr_dict = OrderedDict()
        mr_list_augm = []

        # Extract the slot-value pairs into a dictionary
        for slot_value in mr.split(slot_sep):
            slot, value, slot_orig, value_orig = data_loader.parse_slot_and_value(slot_value, val_sep, val_sep_end)
            mr_dict[slot] = value
            mr_list_augm.append((slot, value_orig))
            # mrs[i] = mrs[i].replace(slot_orig, slot)

        # Find the slot alignment
        alignment = find_alignment(utt, mr_dict)

        # Augment the MR with auxiliary tokens
        if 'emphasis' in indicators:
            __add_emphasis_tokens(mr_list_augm, alignment)
        if 'contrast' in indicators:
            __add_contrast_tokens(mr_list_augm, utt, alignment, alt_mode=alt_contrast_mode)

        # Convert augmented MR from list to string representation
        mr_augm = (slot_sep + ' ').join([s + val_sep + v + (val_sep_end if val_sep_end is not None else '') for s, v in mr_list_augm])

        mrs_augm.append(mr_augm)

        # Count and separate the different augmentation instances
        slots_augm = set([s for s, v in mr_list_augm])
        if config.EMPH_TOKEN in slots_augm and (config.CONTRAST_TOKEN in slots_augm or config.CONCESSION_TOKEN in slots_augm):
            mrs_combo.append(mr_augm)
            utterances_combo.append(utt)
            combo_ctr += 1
        else:
            mrs_single.append(mr_augm)
            utterances_single.append(utt)
            if config.EMPH_TOKEN in slots_augm:
                mrs_emph_only.append(mr_augm)
                utterances_emph_only.append(utt)
                emph_only_ctr += 1
            elif config.CONTRAST_TOKEN in slots_augm or config.CONCESSION_TOKEN in slots_augm:
                mrs_contrast_only.append(mr_augm)
                utterances_contrast_only.append(utt)
                contrast_only_ctr += 1

    print('# of MRs with emphasis only:', emph_only_ctr)
    print('# of MRs with contrast/concession only:', contrast_only_ctr)
    print('# of MRs with emphasis & contrast/concession:', combo_ctr)

    new_df = pd.DataFrame(columns=['mr', 'ref'])
    if mode == 'single':
        new_df['mr'] = mrs_single
        new_df['ref'] = utterances_single
    elif mode == 'only':
        if 'emphasis' in indicators:
            new_df['mr'] = mrs_emph_only
            new_df['ref'] = utterances_emph_only
        elif 'contrast' in indicators:
            new_df['mr'] = mrs_contrast_only
            new_df['ref'] = utterances_contrast_only
    elif mode == 'combo':
        new_df['mr'] = mrs_combo
        new_df['ref'] = utterances_combo
    else:
        new_df['mr'] = mrs_augm
        new_df['ref'] = utterances

    # Store augmented dataset to a new file
    filename_out = os.path.splitext(filename)[0] + '_augm_' + '_'.join(indicators) \
        + (('_' + mode) if mode != 'all' else '') + ('_alt' if alt_contrast_mode else '') + '.csv'
    new_df.to_csv(os.path.join(config.DATA_DIR, dataset, filename_out), index=False, encoding='utf8')
Ejemplo n.º 56
0

def test_overrides_ordered_dict(model):
    pytest_raises_wrapper(TypeError, 'Expecting TypeError when overrides is not an OrderedDict',
                          DummyQuantizer, model, overrides={'testing': {'testing': '123'}})


acts_key = 'bits_activations'
wts_key = 'bits_weights'
bias_key = 'bits_bias'


@pytest.mark.parametrize(
    "qbits, overrides, explicit_expected_overrides",
    [
        (QBits(8, 4, 32), OrderedDict(), {}),
        (QBits(8, 4, 32),
         OrderedDict([('conv1', {acts_key: None, wts_key: None, bias_key: None}),
                      ('relu1', {acts_key: None, wts_key: None, bias_key: None})]),
         {'conv1': QBits(None, None, None), 'relu1': QBits(None, None, None)}),
        (QBits(8, 8, 32),
         OrderedDict([('sub.*conv1', {wts_key: 4}), ('sub.*conv2', {acts_key: 4, wts_key: 4})]),
         {'sub1.conv1': QBits(8, 4, 32), 'sub1.conv2': QBits(4, 4, 32), 'sub2.conv1': QBits(8, 4, 32), 'sub2.conv2': QBits(4, 4, 32)}),
        (QBits(4, 4, 32),
         OrderedDict([('sub1\..*1', {acts_key: 16, wts_key: 16}), ('sub1\..*', {acts_key: 8, wts_key: 8})]),
         {'sub1.conv1': QBits(16, 16, 32), 'sub1.bn1': QBits(16, None, None),
          'sub1.relu1': QBits(16, None, None), 'sub1.pool1': QBits(16, None, None),
          'sub1.conv2': QBits(8, 8, 32), 'sub1.bn2': QBits(8, None, None),
          'sub1.relu2': QBits(8, None, None), 'sub1.pool2': QBits(8, None, None)}),
        (QBits(4, 4, 32),
         OrderedDict([('sub1\..*', {acts_key: 8, wts_key: 8}), ('sub1\..*1', {acts_key: 16, wts_key: 16})]),
Ejemplo n.º 57
0
class Command(Parser):
    """The central type in the face framework. Instantiate a Command,
    populate it with flags and subcommands, and then call
    command.run() to execute your CLI.

    Note that only the first three constructor arguments are
    positional, the rest are keyword-only.

    Args:
       func (callable): The function called when this command is
          run with an argv that contains no subcommands.
       name (str): The name of this command, used when this
          command is included as a subcommand. (Defaults to name
          of function)
       doc (str): A description or message that appears in various
           help outputs.
       flags (list): A list of Flag instances to initialize the
          Command with. Flags can always be added later with the
          .add() method.
       posargs (bool): Pass True if the command takes positional
          arguments. Defaults to False. Can also pass a PosArgSpec
          instance.
       post_posargs (bool): Pass True if the command takes
          additional positional arguments after a conventional '--'
          specifier.
       help (bool): Pass False to disable the automatically added
          --help flag. Defaults to True. Also accepts a HelpHandler
          instance, see those docs for more details.
       middlewares (list): A list of @face_middleware decorated
          callables which participate in dispatch. Also addable
          via the .add() method. See Middleware docs for more
          details.

    """
    def __init__(self, func, name=None, doc=None, **kwargs):
        name = name if name is not None else _get_default_name(func)

        if doc is None:
            doc = _docstring_to_doc(func)

        # TODO: default posargs if none by inspecting func
        super(Command,
              self).__init__(name,
                             doc,
                             flags=kwargs.pop('flags', None),
                             posargs=kwargs.pop('posargs', None),
                             post_posargs=kwargs.pop('post_posargs', None),
                             flagfile=kwargs.pop('flagfile', True))

        _help = kwargs.pop('help', DEFAULT_HELP_HANDLER)
        self.help_handler = _help

        # TODO: if func is callable, check that "next_" isn't taken
        self._path_func_map = OrderedDict()
        self._path_func_map[()] = func

        middlewares = list(kwargs.pop('middlewares', None) or [])
        self._path_mw_map = OrderedDict()
        self._path_mw_map[()] = []
        self._path_wrapped_map = OrderedDict()
        self._path_wrapped_map[()] = func
        for mw in middlewares:
            self.add_middleware(mw)

        if kwargs:
            raise TypeError('unexpected keyword arguments: %r' %
                            sorted(kwargs.keys()))

        if _help:
            if _help.flag:
                self.add(_help.flag)
            if _help.subcmd:
                self.add(_help.func, _help.subcmd)  # for 'help' as a subcmd

        if not func and not _help:
            raise ValueError(
                'Command requires a handler function or help handler'
                ' to be set, not: %r' % func)

        return

    @property
    def func(self):
        return self._path_func_map[()]

    def add(self, *a, **kw):
        """Add a flag, subcommand, or middleware to this Command.

        If the first argument is a callable, this method contructs a
        Command from it and the remaining arguments, all of which are
        optional. See the Command docs for for full details on names
        and defaults.

        If the first argument is a string, this method constructs a
        Flag from that flag string and the rest of the method
        arguments, all of which are optional. See the Flag docs for
        more options.

        If the argument is already an instance of Flag or Command, an
        exception is only raised on conflicting subcommands and
        flags. See add_command for details.

        Middleware is only added if it is already decorated with
        @face_middleware. Use .add_middleware() for automatic wrapping
        of callables.

        """
        # TODO: need to check for middleware provides names + flag names
        # conflict

        target = a[0]

        if is_middleware(target):
            return self.add_middleware(target)

        subcmd = a[0]
        if not isinstance(subcmd,
                          Command) and callable(subcmd) or subcmd is None:
            subcmd = Command(*a, **kw)  # attempt to construct a new subcmd

        if isinstance(subcmd, Command):
            self.add_command(subcmd)
            return subcmd

        flag = a[0]
        if not isinstance(flag, Flag):
            flag = Flag(*a, **kw)  # attempt to construct a Flag from arguments
        super(Command, self).add(flag)

        return flag

    def add_command(self, subcmd):
        """Add a Command, and all of its subcommands, as a subcommand of this
        Command.

        Middleware from the current command is layered on top of the
        subcommand's. An exception may be raised if there are
        conflicting middlewares or subcommand names.
        """
        if not isinstance(subcmd, Command):
            raise TypeError('expected Command instance, not: %r' % subcmd)
        self_mw = self._path_mw_map[()]
        super(Command, self).add(subcmd)
        # map in new functions
        for path in self.subprs_map:
            if path not in self._path_func_map:
                self._path_func_map[path] = subcmd._path_func_map[path[1:]]
                sub_mw = subcmd._path_mw_map[path[1:]]
                self._path_mw_map[
                    path] = self_mw + sub_mw  # TODO: check for conflicts
        return

    def add_middleware(self, mw):
        """Add a single middleware to this command. Outermost middleware
        should be added first. Remember: first added, first called.

        """
        if not is_middleware(mw):
            mw = face_middleware(mw)
        check_middleware(mw)

        for flag in mw._face_flags:
            self.add(flag)

        for path, mws in self._path_mw_map.items():
            self._path_mw_map[path] = [mw] + mws  # TODO: check for conflicts

        return

    # TODO: add_flag()

    def get_flag_map(self, path=(), with_hidden=True):
        """Command's get_flag_map differs from Parser's in that it filters
        the flag map to just the flags used by the endpoint at the
        associated subcommand *path*.
        """
        flag_map = super(Command, self).get_flag_map(path=path,
                                                     with_hidden=with_hidden)
        dep_names = self.get_dep_names(path)
        if 'args_' in dep_names or 'flags_' in dep_names:
            # the argument parse result and flag dict both capture
            # _all_ the flags, so for functions accepting these
            # arguments we bypass filtering.

            # Also note that by setting an argument default in the
            # function definition, the dependency becomes "weak", and
            # this bypassing of filtering will not trigger, unless
            # another function in the chain has a non-default,
            # "strong" dependency. This behavior is especially useful
            # for middleware.

            # TODO: add decorator for the corner case where a function
            # accepts these arguments and doesn't use them all.
            return OrderedDict(flag_map)

        return OrderedDict([(k, f) for k, f in flag_map.items()
                            if f.name in dep_names or f is self.flagfile_flag
                            or f is self.help_handler.flag])

    def get_dep_names(self, path=()):
        """Get a list of the names of all required arguments of a command (and
        any associated middleware).

        By specifying *path*, the same can be done for any subcommand.
        """
        func = self._path_func_map[path]
        if not func:
            return []  # for when no handler is specified

        mws = self._path_mw_map[path]

        # start out with all args of handler function, which gets stronger dependencies
        required_args = set(get_arg_names(func, only_required=False))
        dep_map = {func: set(required_args)}
        for mw in mws:
            arg_names = set(get_arg_names(mw, only_required=True))
            for provide in mw._face_provides:
                dep_map[provide] = arg_names
            if not mw._face_optional:
                # all non-optional middlewares get their args required, too.
                required_args.update(arg_names)

        rdep_map = get_rdep_map(dep_map)

        recursive_required_args = rdep_map[func].union(required_args)

        return sorted(recursive_required_args)

    def prepare(self, paths=None):
        """Compile and validate one or more subcommands to ensure all
        dependencies are met. Call this once all flags, subcommands,
        and middlewares have been added (using .add()).

        This method is automatically called by .run() method, but it
        only does so for the specific subcommand being invoked. More
        conscientious users may want to call this method with no
        arguments to validate that all subcommands are ready for
        execution.
        """
        # TODO: also pre-execute help formatting to make sure all
        # values are sane there, too
        if paths is None:
            paths = self._path_func_map.keys()

        for path in paths:
            func = self._path_func_map[path]
            if func is None:
                continue  # handled by run()

            prs = self.subprs_map[path] if path else self
            provides = []
            if prs.posargs.provides:
                provides += [prs.posargs.provides]
            if prs.post_posargs.provides:
                provides += [prs.post_posargs.provides]

            deps = self.get_dep_names(path)
            flag_names = [f.name for f in self.get_flags(path=path)]
            all_mws = self._path_mw_map[path]

            # filter out unused middlewares
            mws = [
                mw for mw in all_mws if not mw._face_optional
                or [p for p in mw._face_provides if p in deps]
            ]
            provides += _BUILTIN_PROVIDES + flag_names
            try:
                wrapped = get_middleware_chain(mws, func, provides)
            except NameError as ne:
                ne.args = (ne.args[0] + ' (in path: %r)' % (path, ), )
                raise

            self._path_wrapped_map[path] = wrapped

        return

    def run(self, argv=None, extras=None, print_error=None):
        """Parses arguments and dispatches to the appropriate subcommand
        handler. If there is a parse error due to invalid user input,
        an error is printed and a CommandLineError is raised. If not
        caught, a CommandLineError will exit the process, typically
        with status code 1. Also handles dispatching to the
        appropriate HelpHandler, if configured.

        Defaults to handling the arguments on the command line
        (``sys.argv``), but can also be explicitly passed arguments
        via the *argv* parameter.

        Args:
           argv (list): A sequence of strings representing the
              command-line arguments. Defaults to ``sys.argv``.
           extras (dict): A map of additional arguments to be made
              available to the subcommand's handler function.
           print_error (callable): The function that formats/prints
               error messages before program exit on CLI errors.

        .. note::

           For efficiency, :meth:`run()` only checks the subcommand
           invoked by *argv*. To ensure that all subcommands are
           configured properly, call :meth:`prepare()`.

        """
        if print_error is None or print_error is True:
            print_error = default_print_error
        elif print_error and not callable(print_error):
            raise TypeError('expected callable for print_error, not %r' %
                            print_error)

        kwargs = dict(extras) if extras else {}
        kwargs[
            'print_error_'] = print_error  # TODO: print_error_ in builtin provides?

        try:
            prs_res = self.parse(argv=argv)
        except ArgumentParseError as ape:
            prs_res = ape.prs_res

            # even if parsing failed, check if the caller was trying to access the help flag
            cmd = prs_res.to_cmd_scope()['subcommand_']
            if cmd.help_handler and prs_res.flags and prs_res.flags.get(
                    cmd.help_handler.flag.name):
                kwargs.update(prs_res.to_cmd_scope())
                return inject(cmd.help_handler.func, kwargs)

            msg = 'error: ' + (prs_res.name or self.name)
            if prs_res.subcmds:
                msg += ' ' + ' '.join(prs_res.subcmds or ())

            # args attribute, nothing to do with cmdline args this is
            # the standard-issue Exception
            e_msg = ape.args[0]
            if e_msg:
                msg += ': ' + e_msg
            cle = CommandLineError(msg)
            if print_error:
                print_error(msg)
            raise cle

        kwargs.update(prs_res.to_cmd_scope())

        # default in case no middlewares have been installed
        func = self._path_func_map[prs_res.subcmds]

        cmd = kwargs['subcommand_']
        if cmd.help_handler and (
                not func or
            (prs_res.flags and prs_res.flags.get(cmd.help_handler.flag.name))):
            return inject(cmd.help_handler.func, kwargs)
        elif not func:  # pragma: no cover
            raise RuntimeError(
                'expected command handler or help handler to be set')

        self.prepare(paths=[prs_res.subcmds])
        wrapped = self._path_wrapped_map.get(prs_res.subcmds, func)

        try:
            ret = inject(wrapped, kwargs)
        except UsageError as ue:
            if print_error:
                print_error(ue.format_message())
            raise
        return ret
Ejemplo n.º 58
0
  def ComputeMetrics(self,
                     include_metrics_per_category=False,
                     all_metrics_per_category=False):
    """Computes detection metrics.

    Args:
      include_metrics_per_category: If True, will include metrics per category.
      all_metrics_per_category: If true, include all the summery metrics for
        each category in per_category_ap. Be careful with setting it to true if
        you have more than handful of categories, because it will pollute
        your mldash.

    Returns:
      1. summary_metrics: a dictionary holding:
        'Precision/mAP': mean average precision over classes averaged over IOU
          thresholds ranging from .5 to .95 with .05 increments
        'Precision/[email protected]': mean average precision at 50% IOU
        'Precision/[email protected]': mean average precision at 75% IOU
        'Precision/mAP (small)': mean average precision for small objects
                        (area < 32^2 pixels)
        'Precision/mAP (medium)': mean average precision for medium sized
                        objects (32^2 pixels < area < 96^2 pixels)
        'Precision/mAP (large)': mean average precision for large objects
                        (96^2 pixels < area < 10000^2 pixels)
        'Recall/AR@1': average recall with 1 detection
        'Recall/AR@10': average recall with 10 detections
        'Recall/AR@100': average recall with 100 detections
        'Recall/AR@100 (small)': average recall for small objects with 100
          detections
        'Recall/AR@100 (medium)': average recall for medium objects with 100
          detections
        'Recall/AR@100 (large)': average recall for large objects with 100
          detections
      2. per_category_ap: a dictionary holding category specific results with
        keys of the form: 'Precision mAP ByCategory/category'
        (without the supercategory part if no supercategories exist).
        For backward compatibility 'PerformanceByCategory' is included in the
        output regardless of all_metrics_per_category.
        If evaluating class-agnostic mode, per_category_ap is an empty
        dictionary.

    Raises:
      ValueError: If category_stats does not exist.
    """
    self.evaluate()
    self.accumulate()
    self.summarize()

    summary_metrics = OrderedDict([
        ('Precision/mAP', self.stats[0]),
        ('Precision/[email protected]', self.stats[1]),
        ('Precision/[email protected]', self.stats[2]),
        ('Precision/mAP (small)', self.stats[3]),
        ('Precision/mAP (medium)', self.stats[4]),
        ('Precision/mAP (large)', self.stats[5]),
        ('Recall/AR@1', self.stats[6]),
        ('Recall/AR@10', self.stats[7]),
        ('Recall/AR@100', self.stats[8]),
        ('Recall/AR@100 (small)', self.stats[9]),
        ('Recall/AR@100 (medium)', self.stats[10]),
        ('Recall/AR@100 (large)', self.stats[11])
    ])
    if not include_metrics_per_category:
      return summary_metrics, {}
    if not hasattr(self, 'category_stats'):
      raise ValueError('Category stats do not exist')
    per_category_ap = OrderedDict([])
    if self.GetAgnosticMode():
      return summary_metrics, per_category_ap
    for category_index, category_id in enumerate(self.GetCategoryIdList()):
      category = self.GetCategory(category_id)['name']
      # Kept for backward compatilbility
      per_category_ap['PerformanceByCategory/mAP/{}'.format(
          category)] = self.category_stats[0][category_index]
      if all_metrics_per_category:
        per_category_ap['Precision mAP ByCategory/{}'.format(
            category)] = self.category_stats[0][category_index]
        per_category_ap['Precision [email protected] ByCategory/{}'.format(
            category)] = self.category_stats[1][category_index]
        per_category_ap['Precision [email protected] ByCategory/{}'.format(
            category)] = self.category_stats[2][category_index]
        per_category_ap['Precision mAP (small) ByCategory/{}'.format(
            category)] = self.category_stats[3][category_index]
        per_category_ap['Precision mAP (medium) ByCategory/{}'.format(
            category)] = self.category_stats[4][category_index]
        per_category_ap['Precision mAP (large) ByCategory/{}'.format(
            category)] = self.category_stats[5][category_index]
        per_category_ap['Recall AR@1 ByCategory/{}'.format(
            category)] = self.category_stats[6][category_index]
        per_category_ap['Recall AR@10 ByCategory/{}'.format(
            category)] = self.category_stats[7][category_index]
        per_category_ap['Recall AR@100 ByCategory/{}'.format(
            category)] = self.category_stats[8][category_index]
        per_category_ap['Recall AR@100 (small) ByCategory/{}'.format(
            category)] = self.category_stats[9][category_index]
        per_category_ap['Recall AR@100 (medium) ByCategory/{}'.format(
            category)] = self.category_stats[10][category_index]
        per_category_ap['Recall AR@100 (large) ByCategory/{}'.format(
            category)] = self.category_stats[11][category_index]

    return summary_metrics, per_category_ap
Ejemplo n.º 59
0
class TableMetadata(object):
    """
    A representation of the schema for a single table.
    """

    keyspace = None
    """ An instance of :class:`~.KeyspaceMetadata`. """

    name = None
    """ The string name of the table. """

    partition_key = None
    """
    A list of :class:`.ColumnMetadata` instances representing the columns in
    the partition key for this table.  This will always hold at least one
    column.
    """

    clustering_key = None
    """
    A list of :class:`.ColumnMetadata` instances representing the columns
    in the clustering key for this table.  These are all of the
    :attr:`.primary_key` columns that are not in the :attr:`.partition_key`.

    Note that a table may have no clustering keys, in which case this will
    be an empty list.
    """
    @property
    def primary_key(self):
        """
        A list of :class:`.ColumnMetadata` representing the components of
        the primary key for this table.
        """
        return self.partition_key + self.clustering_key

    columns = None
    """
    A dict mapping column names to :class:`.ColumnMetadata` instances.
    """

    is_compact_storage = False

    options = None
    """
    A dict mapping table option names to their specific settings for this
    table.
    """

    recognized_options = (
        "comment", "read_repair_chance", "dclocal_read_repair_chance",
        "replicate_on_write", "gc_grace_seconds", "bloom_filter_fp_chance",
        "caching", "compaction_strategy_class", "compaction_strategy_options",
        "min_compaction_threshold", "max_compaction_threshold",
        "compression_parameters", "min_index_interval", "max_index_interval",
        "index_interval", "speculative_retry", "rows_per_partition_to_cache",
        "memtable_flush_period_in_ms", "populate_io_cache_on_flush",
        "compaction", "compression", "default_time_to_live")

    compaction_options = {
        "min_compaction_threshold": "min_threshold",
        "max_compaction_threshold": "max_threshold",
        "compaction_strategy_class": "class"
    }

    def __init__(self,
                 keyspace_metadata,
                 name,
                 partition_key=None,
                 clustering_key=None,
                 columns=None,
                 options=None):
        self.keyspace = keyspace_metadata
        self.name = name
        self.partition_key = [] if partition_key is None else partition_key
        self.clustering_key = [] if clustering_key is None else clustering_key
        self.columns = OrderedDict() if columns is None else columns
        self.options = options
        self.comparator = None

    def export_as_string(self):
        """
        Returns a string of CQL queries that can be used to recreate this table
        along with all indexes on it.  The returned string is formatted to
        be human readable.
        """
        ret = self.as_cql_query(formatted=True)
        ret += ";"

        for col_meta in self.columns.values():
            if col_meta.index:
                ret += "\n%s;" % (col_meta.index.as_cql_query(), )

        return ret

    def as_cql_query(self, formatted=False):
        """
        Returns a CQL query that can be used to recreate this table (index
        creations are not included).  If `formatted` is set to :const:`True`,
        extra whitespace will be added to make the query human readable.
        """
        ret = "CREATE TABLE %s.%s (%s" % (protect_name(
            self.keyspace.name), protect_name(
                self.name), "\n" if formatted else "")

        if formatted:
            column_join = ",\n"
            padding = "    "
        else:
            column_join = ", "
            padding = ""

        columns = []
        for col in self.columns.values():
            columns.append("%s %s%s" % (protect_name(
                col.name), col.typestring, ' static' if col.is_static else ''))

        if len(self.partition_key) == 1 and not self.clustering_key:
            columns[0] += " PRIMARY KEY"

        ret += column_join.join("%s%s" % (padding, col) for col in columns)

        # primary key
        if len(self.partition_key) > 1 or self.clustering_key:
            ret += "%s%sPRIMARY KEY (" % (column_join, padding)

            if len(self.partition_key) > 1:
                ret += "(%s)" % ", ".join(
                    protect_name(col.name) for col in self.partition_key)
            else:
                ret += self.partition_key[0].name

            if self.clustering_key:
                ret += ", %s" % ", ".join(
                    protect_name(col.name) for col in self.clustering_key)

            ret += ")"

        # options
        ret += "%s) WITH " % ("\n" if formatted else "")

        option_strings = []
        if self.is_compact_storage:
            option_strings.append("COMPACT STORAGE")

        if self.clustering_key:
            cluster_str = "CLUSTERING ORDER BY "

            clustering_names = protect_names(
                [c.name for c in self.clustering_key])

            if self.is_compact_storage and \
                    not issubclass(self.comparator, types.CompositeType):
                subtypes = [self.comparator]
            else:
                subtypes = self.comparator.subtypes

            inner = []
            for colname, coltype in zip(clustering_names, subtypes):
                ordering = "DESC" if issubclass(coltype,
                                                types.ReversedType) else "ASC"
                inner.append("%s %s" % (colname, ordering))

            cluster_str += "(%s)" % ", ".join(inner)
            option_strings.append(cluster_str)

        option_strings.extend(self._make_option_strings())

        join_str = "\n    AND " if formatted else " AND "
        ret += join_str.join(option_strings)

        return ret

    def _make_option_strings(self):
        ret = []
        options_copy = dict(self.options.items())
        if not options_copy.get('compaction'):
            options_copy.pop('compaction', None)

            actual_options = json.loads(
                options_copy.pop('compaction_strategy_options', '{}'))
            for system_table_name, compact_option_name in self.compaction_options.items(
            ):
                value = options_copy.pop(system_table_name, None)
                if value:
                    actual_options.setdefault(compact_option_name, value)

            compaction_option_strings = [
                "'%s': '%s'" % (k, v) for k, v in actual_options.items()
            ]
            ret.append('compaction = {%s}' %
                       ', '.join(compaction_option_strings))

        for system_table_name in self.compaction_options.keys():
            options_copy.pop(system_table_name, None)  # delete if present
        options_copy.pop('compaction_strategy_option', None)

        if not options_copy.get('compression'):
            params = json.loads(
                options_copy.pop('compression_parameters', '{}'))
            if params:
                param_strings = [
                    "'%s': '%s'" % (k, v) for k, v in params.items()
                ]
                ret.append('compression = {%s}' % ', '.join(param_strings))

        for name, value in options_copy.items():
            if value is not None:
                if name == "comment":
                    value = value or ""
                ret.append("%s = %s" % (name, protect_value(value)))

        return list(sorted(ret))
Ejemplo n.º 60
0
def augment_with_contrast_tgen(dataset, filename):
    """Augments the MRs with auxiliary tokens indicating a pair of slots that should be contrasted in the
    corresponding generated utterance. The output is in the format accepted by TGen.
    """

    contrast_connectors = ['but', 'however', 'yet']
    scalar_slots = get_scalar_slots()

    alignments = []
    contrasts = []

    print('Augmenting MRs with contrast in ' + str(filename))

    # Read in the data
    data_cont = data_loader.init_test_data(os.path.join(config.DATA_DIR, dataset, filename))
    mrs, utterances = data_cont['data']
    _, _, slot_sep, val_sep, val_sep_end = data_cont['separators']

    for i, mr in enumerate(mrs):
        mr_dict = OrderedDict()

        # Extract the slot-value pairs into a dictionary
        for slot_value in mr.split(slot_sep):
            slot, value, slot_orig, _ = data_loader.parse_slot_and_value(slot_value, val_sep, val_sep_end)
            mr_dict[slot] = value
            mrs[i] = mrs[i].replace(slot_orig, slot)

        alignments.append(find_alignment(utterances[i], mr_dict))

    for i in range(len(utterances)):
        contrasts.append(['none', 'none', 'none', 'none'])
        for contrast_conn in contrast_connectors:
            contrast_pos = utterances[i].find(contrast_conn)
            if contrast_pos >= 0:
                slot_before = None
                value_before = None
                slot_after = None
                value_after = None

                for pos, slot, value in alignments[i]:
                    if pos > contrast_pos:
                        if not slot_before:
                            break
                        if slot in scalar_slots:
                            slot_after = slot
                            value_after = value
                            break
                    else:
                        if slot in scalar_slots:
                            slot_before = slot
                            value_before = value

                if slot_before and slot_after:
                    if scalar_slots[slot_before][value_before] - scalar_slots[slot_after][value_after] == 0:
                        contrasts[i][2] = slot_before
                        contrasts[i][3] = slot_after
                    else:
                        contrasts[i][0] = slot_before
                        contrasts[i][1] = slot_after

                break

    new_df = pd.DataFrame(columns=['mr', 'ref', 'contrast1', 'contrast2', 'concession1', 'concession2'])
    new_df['mr'] = mrs
    new_df['ref'] = utterances
    new_df['contrast1'] = [tup[0] for tup in contrasts]
    new_df['contrast2'] = [tup[1] for tup in contrasts]
    new_df['concession1'] = [tup[2] for tup in contrasts]
    new_df['concession2'] = [tup[3] for tup in contrasts]

    filename_out = ''.join(filename.split('.')[:-1]) + '_augm_contrast_tgen.csv'
    new_df.to_csv(os.path.join(config.DATA_DIR, dataset, filename_out), index=False, encoding='utf8')