def canonicalize(self, solver): """Computes the graph implementation of the problem. Parameters ---------- solver: str The solver being targetted. Returns ------- tuple (affine objective, constraints list, cone dimensions, solver chosen) """ constraints = [] obj, constr = self.objective.canonical_form constraints += constr unique_constraints = list(unique(self.constraints, key=lambda c: c.id)) for constr in unique_constraints: constraints += constr.canonical_form[1] constr_map = self._filter_constraints(constraints) solver = self._choose_solver(constr_map, solver) dims = {} dims["f"] = sum(c.size[0]*c.size[1] for c in constr_map[s.EQ]) dims["l"] = sum(c.size[0]*c.size[1] for c in constr_map[s.LEQ]) # Formats SOC, SOC_EW, and SDP constraints for the solver. nonlin = constr_map[s.SOC] + constr_map[s.SOC_EW] + constr_map[s.SDP] for constr in nonlin: for ineq_constr in constr.format(): constr_map[s.LEQ].append(ineq_constr) dims["q"] = [c.size[0] for c in constr_map[s.SOC]] # Elemwise SOC constraints have an SOC constraint # for each element in their arguments. for constr in constr_map[s.SOC_EW]: for cone_size in constr.size: dims["q"].append(cone_size[0]) dims["s"] = [c.size[0] for c in constr_map[s.SDP]] # Format exponential cone constraints. if solver == s.CVXOPT: for constr in constr_map[s.EXP]: constr_map[s.EQ] += constr.format(s.CVXOPT) elif solver == s.SCS: for constr in constr_map[s.EXP]: constr_map[s.LEQ] += constr.format(s.SCS) dims["ep"] = sum(c.size[0]*c.size[1] for c in constr_map[s.EXP]) # Remove redundant constraints. for key in [s.EQ, s.LEQ]: constraints = unique(constr_map[key], key=lambda c: c.constr_id) constr_map[key] = list(constraints) return (obj, constr_map, dims, solver)
def canonicalize(self, solver): """Computes the graph implementation of the problem. Parameters ---------- solver: str The solver being targetted. Returns ------- tuple (affine objective, constraints list, cone dimensions, solver chosen) """ constraints = [] obj, constr = self.objective.canonical_form constraints += constr unique_constraints = list(unique(self.constraints, key=lambda c: c.id)) for constr in unique_constraints: constraints += constr.canonical_form[1] constr_map = self._filter_constraints(constraints) solver = self._choose_solver(constr_map, solver) dims = {} dims["f"] = sum(c.size[0] * c.size[1] for c in constr_map[s.EQ]) dims["l"] = sum(c.size[0] * c.size[1] for c in constr_map[s.LEQ]) # Formats SOC, SOC_EW, and SDP constraints for the solver. nonlin = constr_map[s.SOC] + constr_map[s.SOC_EW] + constr_map[s.SDP] for constr in nonlin: for ineq_constr in constr.format(): constr_map[s.LEQ].append(ineq_constr) dims["q"] = [c.size[0] for c in constr_map[s.SOC]] # Elemwise SOC constraints have an SOC constraint # for each element in their arguments. for constr in constr_map[s.SOC_EW]: for cone_size in constr.size: dims["q"].append(cone_size[0]) dims["s"] = [c.size[0] for c in constr_map[s.SDP]] # Format exponential cone constraints. if solver == s.CVXOPT: for constr in constr_map[s.EXP]: constr_map[s.EQ] += constr.format(s.CVXOPT) elif solver == s.SCS: for constr in constr_map[s.EXP]: constr_map[s.LEQ] += constr.format(s.SCS) dims["ep"] = sum(c.size[0] * c.size[1] for c in constr_map[s.EXP]) # Remove redundant constraints. for key in [s.EQ, s.LEQ]: constraints = unique(constr_map[key], key=lambda c: c.constr_id) constr_map[key] = list(constraints) return (obj, constr_map, dims, solver)
def matchingfrequencies(*seqs: Iterable[T], key=None) -> Iterable[Tuple[T, int]]: c: Counter = Counter() for seq in seqs: c.update(unique(seq, key=key)) return c.items()
def test_print_table(labels, title): table = cli_inference.print_table(labels, title, print=False) assert isinstance(table, rich.table.Table) assert table.title == title unique = itertoolz.count(itertoolz.unique(labels)) assert table.row_count == unique + 1 assert all(label in getattr(itertoolz.first(table.columns), "_cells") for label in labels) table = cli_inference.print_table(labels, title, print=True)
def presolve(objective, constr_map, check_params=False): """Eliminates unnecessary constraints and short circuits the solver if possible. Parameters ---------- objective : LinOp The canonicalized objective. constr_map : dict A map of constraint type to a list of constraints. check_params : bool, optional Should constraints with parameters be evaluated? Returns ------- bool Is the problem infeasible? """ # Remove redundant constraints. for key, constraints in constr_map.items(): uniq_constr = unique(constraints, key=lambda c: c.constr_id) constr_map[key] = list(uniq_constr) # If there are no constraints, the problem is unbounded # if any of the coefficients are non-zero. # If all the coefficients are zero then return the constant term # and set all variables to 0. if not any(constr_map.values()): str(objective) # TODO # Remove constraints with no variables or parameters. for key in [s.EQ, s.LEQ]: new_constraints = [] for constr in constr_map[key]: vars_ = lu.get_expr_vars(constr.expr) if len(vars_) == 0 and not lu.get_expr_params(constr.expr): coeff = op2mat.get_constant_coeff(constr.expr) sign = intf.sign(coeff) # For equality constraint, coeff must be zero. # For inequality (i.e. <= 0) constraint, # coeff must be negative. if key is s.EQ and not sign.is_zero() or \ key is s.LEQ and not sign.is_negative(): return s.INFEASIBLE else: new_constraints.append(constr) constr_map[key] = new_constraints return None
def _format_for_solver(self, constr_map, solver): """Formats the problem for the solver. Parameters ---------- constr_map : dict A map of constraint type to a list of constraints. solver: str The solver being targetted. Returns ------- dict The dimensions of the cones. """ dims = {} dims["f"] = sum(c.size[0]*c.size[1] for c in constr_map[s.EQ]) dims["l"] = sum(c.size[0]*c.size[1] for c in constr_map[s.LEQ]) # Formats SOC, SOC_EW, and SDP constraints for the solver. nonlin = constr_map[s.SOC] + constr_map[s.SDP] for constr in nonlin: for ineq_constr in constr.format(): constr_map[s.LEQ].append(ineq_constr) # Elemwise SOC constraints have an SOC constraint # for each element in their arguments. dims["q"] = [] for constr in constr_map[s.SOC]: for cone_size in constr.size: dims["q"].append(cone_size[0]) dims["s"] = [c.size[0] for c in constr_map[s.SDP]] # Format exponential cone constraints. if solver == s.CVXOPT: for constr in constr_map[s.EXP]: constr_map[s.EQ] += constr.format(s.CVXOPT) elif solver == s.SCS: for constr in constr_map[s.EXP]: constr_map[s.LEQ] += constr.format(s.SCS) dims["ep"] = sum(c.size[0]*c.size[1] for c in constr_map[s.EXP]) # Remove redundant constraints. for key in [s.EQ, s.LEQ]: constraints = unique(constr_map[key], key=lambda c: c.constr_id) constr_map[key] = list(constraints) return dims
def _format_for_solver(self, constr_map, solver): """Formats the problem for the solver. Parameters ---------- constr_map : dict A map of constraint type to a list of constraints. solver: str The solver being targetted. Returns ------- dict The dimensions of the cones. """ dims = {} dims["f"] = sum(c.size[0] * c.size[1] for c in constr_map[s.EQ]) dims["l"] = sum(c.size[0] * c.size[1] for c in constr_map[s.LEQ]) # Formats SOC, SOC_EW, and SDP constraints for the solver. nonlin = constr_map[s.SOC] + constr_map[s.SDP] for constr in nonlin: for ineq_constr in constr.format(): constr_map[s.LEQ].append(ineq_constr) # Elemwise SOC constraints have an SOC constraint # for each element in their arguments. dims["q"] = [] for constr in constr_map[s.SOC]: for cone_size in constr.size: dims["q"].append(cone_size[0]) dims["s"] = [c.size[0] for c in constr_map[s.SDP]] # Format exponential cone constraints. if solver == s.CVXOPT: for constr in constr_map[s.EXP]: constr_map[s.EQ] += constr.format(s.CVXOPT) elif solver == s.SCS: for constr in constr_map[s.EXP]: constr_map[s.LEQ] += constr.format(s.SCS) dims["ep"] = sum(c.size[0] * c.size[1] for c in constr_map[s.EXP]) # Remove redundant constraints. for key in [s.EQ, s.LEQ]: constraints = unique(constr_map[key], key=lambda c: c.constr_id) constr_map[key] = list(constraints) return dims
def filter_to_preferred_ext(files: Iterable[Path], exts: List[str]) -> Iterable[Path]: """filter_to_preferred_ext""" files = list(files) files_without_ext = (file.with_suffix("") for file in files if not file.name.startswith(".")) file_names_without_ext = (file.name for file in files_without_ext) unique = set(itertoolz.unique(file_names_without_ext)) for file in files: file_without_suffix = file.with_suffix("") for ext in exts: if file_without_suffix.name in unique: if file.with_suffix(ext).is_file(): yield file.with_suffix(ext) else: yield file unique.discard(file_without_suffix.name)
def canonicalize(self): """Computes the graph implementation of the problem. Returns ------- tuple (affine objective, constraints dict) """ constraints = [] obj, constr = self.objective.canonical_form constraints += constr unique_constraints = list(unique(self.constraints, key=lambda c: c.id)) for constr in unique_constraints: constraints += constr.canonical_form[1] constr_map = self._filter_constraints(constraints) return (obj, constr_map)
def add_extra_content_to_mapping_info(self, mapping): # this will append the field names super().add_extra_content_to_mapping_info(mapping) ndar_cols = self._get_ndar_cols_with_def_() # ndar_definition = self._get_ndar_definition_() # if ndar_definition is not None: # ndar_elements = NdarTemplateParser.get_columns(ndar_definition) # ndar_cols = [e.col_name for e in ndar_elements] # else: # ndar_cols = [] fields_for_tables = [self._get_table_fields_(f) for f in self.source.data_table_names] # type: List[List[str]] all_fields_with_no_duplicate = list(unique(chain.from_iterable(fields_for_tables))) # This will insert each field information mapping_header = self._add_mapping_headers_from_src_sink() wtp_col_id_index = mapping_header.index(self.source.template_fields["id"]) wtp_col_name_index = mapping_header.index(self.source.template_fields["col_name"]) wtp_missing_value_index = mapping_header.index(self.source.template_fields["missing_vals"]) ndar_col_id_index = mapping_header.index(self.sink.template_fields["id"]) ndar_col_name_index = mapping_header.index(self.sink.template_fields["col_name"]) ndar_default_index = mapping_header.index(self.sink.template_fields["default"]) longest = max(len(all_fields_with_no_duplicate), len(ndar_cols)) # the index starts from 1, for user convenience for index in range(1, longest + 1): # fill in each section row = [""] * len(mapping_header) # type:List[str] if index <= len(all_fields_with_no_duplicate): row[wtp_col_id_index] = index row[wtp_col_name_index] = all_fields_with_no_duplicate[index - 1] row[wtp_missing_value_index] = "9998, 9999, " if index <= len(ndar_cols): row[ndar_col_id_index] = index row[ndar_col_name_index] = ndar_cols[index - 1] row[ndar_default_index] = "" mapping.content.append(row)
def canonicalize(self): """Computes the graph implementation of the problem. Returns ------- tuple (affine objective, constraints dict) """ canon_constr = [] obj, constr = self.objective.canonical_form canon_constr += constr for constr in self.constraints: canon_constr += constr.canonical_form[1] # Remove redundant constraints. canon_constr = unique(canon_constr, key=lambda c: c.constr_id) constr_map = self._filter_constraints(canon_constr) return (obj, constr_map)
def journal_volume_sorting(packages): """ return namedtuple for package sorting with volume/issue as key """ v = 'dara_Publication_Volume' i = 'dara_Publication_Issue' def t_construct(vi): VIP = namedtuple('VIP', 'volume issue packages') pf = filter(lambda d: d.get(v, '') == vi[0] and d.get(i, '') == vi[1], packages) return VIP(vi[0], vi[1], pf) sort = tk.request.params.get('sort', False) if sort == u'{} desc, {} desc'.format(v, i): vi_list = map(lambda d: (d.get(v, ''), d.get(i, '')), packages) return map(t_construct, unique(vi_list)) return False
def matchingfrequencies(*seqs: Iterable[T], key=None) -> Iterable[Tuple[T, int]]: for k, g in groupby(merge(*[unique(seq, key=key) for seq in seqs], key=key)): yield (k, count(g))
def test_unique(): assert tuple(unique((1, 2, 3))) == (1, 2, 3) assert tuple(unique((1, 2, 1, 3))) == (1, 2, 3) assert tuple(unique((1, 2, 3), key=iseven)) == (1, 2)
def fetchEMail(credentials): """ Fetch all emails and delete old messages and other messages """ from imapclient import IMAPClient rgx = re.compile( r"(\d+) crowdin entries linted for ([a-z]{2}(-[a-zA-Z]{2})?)") server = IMAPClient(credentials["host"], use_uid=True, ssl=True) server.login(credentials["user"], credentials["password"]) select_info = server.select_folder('INBOX') print('{0} messages in INBOX'.format(select_info[b'EXISTS'])) #Fetch list of emails messages = server.search(['NOT', 'DELETED']) msgmap = collections.defaultdict(list) # Fetch, filter and parse messages response = server.fetch(messages, [ 'BODY.PEEK[HEADER.FIELDS (SUBJECT)]', 'BODY.PEEK[HEADER.FIELDS (DATE)]', 'RFC822' ]) for msgid, data in response.items(): try: subject = data[b'BODY[HEADER.FIELDS (SUBJECT)]'].decode("utf-8") except KeyError: continue if subject.startswith("Subject: "): subject = subject[len("Subject: "):] subject = subject.strip() # Date date = data[b'BODY[HEADER.FIELDS (DATE)]'].decode("utf-8").strip() if date.startswith("Date: "): date = date[len("Date: "):] if date.endswith("(PST)"): date = date[:-len("(PST)")].strip() if date.endswith("(PDT)"): date = date[:-len("(PDT)")].strip() if date.endswith("(UTC)"): date = date[:-len("(UTC)")].strip() if date.endswith("(CEST)"): date = date[:-len("(CEST)")].strip() if date.endswith("(EST)"): date = date[:-len("(EST)")].strip() try: date = datetime.strptime(date, "%a, %d %b %Y %H:%M:%S %z") except ValueError: date = datetime.strptime(date, "%d %b %Y %H:%M:%S %z") # match = rgx.match(subject) if not match: # Delete message print(' Deleting "{1}"'.format(msgid, subject)) server.delete_messages(msgid) else: msgmap[match.group(2)].append( KALintMail(msgid, subject, date, data[b"RFC822"].decode("utf-8"))) # Filter duplicates and sort by date msgmap = valmap( lambda vs: sorted(unique(vs, key=operator.attrgetter("msgid")), key=operator.attrgetter("date")), msgmap) # Delete old messages msgidsToDelete = set() for lang, values in msgmap.items(): msgidsToDelete.update([v.msgid for v in values[:-1]]) server.delete_messages(msgidsToDelete) print("Deleted {0} messages".format(len(msgidsToDelete))) # Remove old messages from msgmap (keep only latest) return valmap(operator.itemgetter(-1), msgmap)