class FrozenOrderedDict(Mapping):
    """
    Frozen OrderedDict.
    """

    def __init__(self, *args, **kwargs):
        self.__dict = OrderedDict(*args, **kwargs)
        self.__hash = None

    def __getitem__(self, item):
        return self.__dict[item]

    def __iter__(self):
        return iter(self.__dict)

    def __len__(self):
        return len(self.__dict)

    def __hash__(self):
        if self.__hash is None:
            self.__hash = reduce(operator.xor, map(hash, self.iteritems()), 0)

        return self.__hash

    def __repr__(self):
        return "{}({!r})".format(self.__class__.__name__, self.items())

    def copy(self, *args, **kwargs):
        new_dict = self.__dict.copy()

        if args or kwargs:
            new_dict.update(OrderedDict(*args, **kwargs))

        return self.__class__(new_dict)
Example #2
1
    def _classes_in_config_sample(self):
        """
        Yields only classes with own traits, and their subclasses.

        Thus, produced sample config-file will contain all classes
        on which a trait-value may be overridden:

        - either on the class owning the trait,
        - or on its subclasses, even if those subclasses do not define
          any traits themselves.
        """
        cls_to_config = OrderedDict(
            (cls, bool(cls.class_own_traits(config=True))) for cls in self._classes_inc_parents()
        )

        def is_any_parent_included(cls):
            return any(b in cls_to_config and cls_to_config[b] for b in cls.__bases__)

        ## Mark "empty" classes for inclusion if their parents own-traits,
        #  and loop until no more classes gets marked.
        #
        while True:
            to_incl_orig = cls_to_config.copy()
            cls_to_config = OrderedDict(
                (cls, inc_yes or is_any_parent_included(cls)) for cls, inc_yes in cls_to_config.items()
            )
            if cls_to_config == to_incl_orig:
                break
        for cl, inc_yes in cls_to_config.items():
            if inc_yes:
                yield cl
Example #3
1
def goal_diff_model(game, df):
    new_game = game.copy()
    new_game["win"] = pd.Series()
    new_game["tie"] = pd.Series()
    new_game["win_or_tie"] = pd.Series()
    state_match = ["time", "diff"]

    for row in new_game.index:

        paramHome = OrderedDict([(k, game.iloc[row][k]) for k in state_match])
        paramAway = paramHome.copy()

        paramAway["diff"] = -1 * paramHome["diff"]

        home = probability_of_consolid_state(paramHome, state_match, df)
        away = probability_of_consolid_state(paramAway, state_match, df)

        if home is None or away is None:
            continue

        win, tie = home_win(home, away), home_tie(home, away)

        new_game["win"].ix[row] = win
        new_game["tie"].ix[row] = tie
        new_game["win_or_tie"].ix[row] = win + tie

    return new_game
def createTrainingVectors(tokenized_texts_dict):
    """
        Given the filenames and their contents, this methods creates the training 
        vectors by creating a unique list of all words together in the training
        set
    """
    print("Creating vectors for training data")

    unique_words = []
    for filename, text in tokenized_texts_dict.iteritems():
        # print("Reading {0} and adding to unique word list".format(filename))
        unique_words.extend(word_tokenize(text))

    unique_words = set(unique_words)

    # Creating the initial vector with counts 0 for all training sets
    zero_vector = OrderedDict(zip(unique_words, [0] * len(unique_words)))
    print("Creating the zero vector")

    # For each training file, create an OrderedDict containing its word counts (together with zero counts),
    # and store it in a dict, indexed by its corresponding filename
    vectors = {}
    for filename, token_list in tokenized_texts_dict.iteritems():
        current_vector = zero_vector.copy()
        current_vector.update(Counter(word_tokenize(token_list)))
        vectors[filename] = current_vector

    return vectors, zero_vector
Example #5
1
def naive_model(game, df):
    state_rep = ["time", "home_score", "away_score"]

    new_game = game.copy()
    new_game["win"] = pd.Series()
    new_game["tie"] = pd.Series()
    new_game["win_or_tie"] = pd.Series()

    for row in new_game.index:

        params = OrderedDict([(k, game.iloc[row][k]) for k in state_rep])
        params_away = params.copy()
        params_away["home_score"], params_away["away_score"] = params["away_score"], params["home_score"]

        home = probability_of_consolid_state(params, state_rep, df)
        away = probability_of_consolid_state(params_away, state_rep, df)

        if home is None or away is None:
            continue

        win, tie = home_win(home, away), home_tie(home, away)
        new_game["win"].ix[row] = win
        new_game["tie"].ix[row] = tie
        new_game["win_or_tie"].ix[row] = win + tie

    return new_game
 def test_copying(self):
     # Check that ordered dicts are copyable, deepcopyable, picklable,
     # and have a repr/eval round-trip
     pairs = [("c", 1), ("b", 2), ("a", 3), ("d", 4), ("e", 5), ("f", 6)]
     od = OrderedDict(pairs)
     update_test = OrderedDict()
     update_test.update(od)
     for i, dup in enumerate(
         [
             od.copy(),
             copy.copy(od),
             copy.deepcopy(od),
             pickle.loads(pickle.dumps(od, 0)),
             pickle.loads(pickle.dumps(od, 1)),
             pickle.loads(pickle.dumps(od, 2)),
             pickle.loads(pickle.dumps(od, -1)),
             eval(repr(od)),
             update_test,
             OrderedDict(od),
         ]
     ):
         self.assertTrue(dup is not od)
         self.assertEqual(dup, od)
         self.assertEqual(list(dup.items()), list(od.items()))
         self.assertEqual(len(dup), len(od))
         self.assertEqual(type(dup), type(od))
Example #7
1
def merge_data_for_people(people, models):
    """
        Collect data for a certain set of people from a list of model objects.
        Merge results from models that have the same name.
    """

    # All headers from the models
    all_headers = list(chain.from_iterable([m.analytics_headers() for m in models]))

    # Initialize a dict containing all people each with all headers,
    # and default values of '' for each header
    data = OrderedDict()
    headers = []
    for h in all_headers:
        if h["title"] not in data.keys():
            data[h["title"]] = ""
            headers.append(h)
    persondata = dict((p, data.copy()) for p in people)
    persondata["_headers"] = headers

    for m in models:
        mdata = m.analytics_data_by_person()
        for p in people:
            if p in mdata:
                persondata[p].update(mdata[p])
    return persondata
Example #8
1
class Buckets(object):
    """Proxy for OrderedDict"""

    def __init__(self, *args, **kwargs):
        self._od = OrderedDict(*args, **kwargs)

    def __getattr__(self, a):
        return getattr(self._od, a)

    def __setitem__(self, *args, **kwargs):
        return self._od.__setitem__(*args, **kwargs)

    def __getitem__(self, *args, **kwargs):
        return self._od.__getitem__(*args, **kwargs)

    def __delitem__(self, *args, **kwargs):
        return self._od.__delitem__(*args, **kwargs)

    def __eq__(self, other):
        if isinstance(other, Buckets):
            return self._od.__eq__(other._od)
        else:
            return self._od.__eq__(other)

    def copy(self, *args, **kwargs):
        new = Buckets()
        new._od = self._od.copy()
        return new
Example #9
0
 def _fill_related_objects_cache(self):
     cache = OrderedDict()
     parent_list = self.get_parent_list()
     for parent in self.parents:
         for obj, model in parent._meta.get_all_related_objects_with_model(include_hidden=True):
             if (obj.field.creation_counter < 0 or obj.field.rel.parent_link) and obj.model not in parent_list:
                 continue
             if not model:
                 cache[obj] = parent
             else:
                 cache[obj] = model
     # Collect also objects which are in relation to some proxy child/parent of self.
     proxy_cache = cache.copy()
     for klass in self.apps.get_models(include_auto_created=True):
         if not klass._meta.swapped:
             for f in klass._meta.local_fields + klass._meta.virtual_fields:
                 if (
                     hasattr(f, "rel")
                     and f.rel
                     and not isinstance(f.rel.to, six.string_types)
                     and f.generate_reverse_relation
                 ):
                     if self == f.rel.to._meta:
                         cache[f.related] = None
                         proxy_cache[f.related] = None
                     elif self.concrete_model == f.rel.to._meta.concrete_model:
                         proxy_cache[f.related] = None
     self._related_objects_cache = cache
     self._related_objects_proxy_cache = proxy_cache
Example #10
0
class TestSerialization(unittest.TestCase):
    def setUp(self):
        self.serialized = OrderedDict()
        self.serialized["typeid"] = "malcolm:core/MethodMeta:1.0"
        self.takes = MapMeta()
        self.takes.set_elements(ElementMap({"in_attr": StringMeta("desc")}))
        self.serialized["takes"] = self.takes.to_dict()
        self.serialized["defaults"] = OrderedDict({"in_attr": "default"})
        self.serialized["description"] = "test_description"
        self.serialized["tags"] = []
        self.serialized["writeable"] = True
        self.serialized["label"] = ""
        self.serialized["returns"] = MapMeta().to_dict()

    def test_to_dict(self):
        m = MethodMeta("test_description")
        m.set_takes(self.takes)
        m.set_defaults(self.serialized["defaults"])
        self.assertEqual(m.to_dict(), self.serialized)

    def test_from_dict(self):
        m = MethodMeta.from_dict(self.serialized.copy())
        self.assertEqual(m.takes.to_dict(), self.takes.to_dict())
        self.assertEqual(m.defaults, self.serialized["defaults"])
        self.assertEqual(m.tags, [])
        self.assertEqual(m.writeable, True)
        self.assertEqual(m.label, "")
        self.assertEqual(m.returns.to_dict(), MapMeta().to_dict())
Example #11
0
def findDensestsSubgraphParallell(edgeWeights, nodeDegrees, epsilon):
    # Create a sorted dictionary with ascending node degrees. NB! Does not sort itself later!
    nodeDegrees = OrderedDict(sorted(nodeDegrees.iteritems(), key=lambda (k, v): (v, k)))

    avgGraphDensity = computeAverageDensity(edgeWeights, nodeDegrees)
    avgGraphDensityBest = avgGraphDensity

    count = 0
    while count < 50:
        print("Iteration #{}".format(count))
        nodeDegreesUnchanged = nodeDegrees.copy()

        for node in nodeDegrees.keys():
            degree = nodeDegreesUnchanged[node]

            if degree <= 2 * (1 + epsilon) * avgGraphDensity:
                del nodeDegrees[node]

                # Delete all edges connected to this node
                for edge in edgeWeights.keys():
                    node1, node2 = edge.split("-")
                    """If edge is a self edge, delete it and don't reduce the degree
                    of "the other" node, since it is yourself"""
                    if node1 == node2:
                        del edgeWeights[edge]
                    else:
                        if node == node1:
                            nodeDegrees[node2] -= edgeWeights[edge]
                            del edgeWeights[edge]
                        elif node == node2:
                            nodeDegrees[node1] -= edgeWeights[edge]
                            del edgeWeights[edge]

        # If no more edges left, we are finished
        if len(edgeWeights) == 0:
            break

        avgGraphDensity = computeAverageDensity(edgeWeights, nodeDegrees)

        if avgGraphDensity > avgGraphDensityBest:
            newEdgeWeights = edgeWeights.copy()
            newNodeDegrees = nodeDegrees.copy()
            avgGraphDensityBest = avgGraphDensity

        count += 1

    return newEdgeWeights, newNodeDegrees
Example #12
0
class Basic(object):
    """ Basic object deals with Basic HTTP Authorization configuration file.
    It is passed the path to userdb file. """

    def __init__(self, userdb):
        self.userdb = userdb
        self.initial_users = OrderedDict()
        self.new_users = OrderedDict()

    def __enter__(self):
        with open(self.userdb, "r") as users:
            for i in users:
                user, password = i.split(":", 1)
                self.initial_users[user] = password
        self.new_users = self.initial_users.copy()
        return self

    def __exit__(self, type, value, traceback):
        if self.new_users == self.initial_users:
            return
        with open(self.userdb, "w") as userdb:
            for user in self.new_users:
                userdb.write("%s:%s" % (user, self.new_users[user]))

    def __contains__(self, user):
        return user in self.users

    @property
    def users(self):
        """ Returns users in a tuple """
        return list(self.new_users.keys())

    def add(self, user, password):
        """ Adds a user with password """
        if self.__contains__(user):
            raise UserExists
        self.new_users[user] = self._crypt_password(password) + "\n"

    def pop(self, user):
        """ Deletes a user """
        if not self.__contains__(user):
            raise UserNotExists
        self.new_users.pop(user)

    def change_password(self, user, password):
        """ Changes user password """
        if not self.__contains__(user):
            raise UserNotExists
        self.new_users[user] = self._crypt_password(password) + "\n"

    def _crypt_password(self, password):
        """ Crypts password """

        def salt():
            """ Generates some salt """
            symbols = ascii_letters + digits
            return choice(symbols) + choice(symbols)

        return crypt(password, salt())
Example #13
0
def list_tests(args, config):
    suites = OrderedDict.copy(config["suites"])

    if args.mode == "stingray":
        stingray_suite_keys = ["webapi", "security"]
        for key in suites.keys():
            if key not in stingray_suite_keys:
                del suites[key]

    for test, group in iter_test_lists(suites, args.mode):
        print "%s:%s" % (test, group)
    return True
Example #14
0
class RDict(object):
    def __init__(self, value_cls):
        self.d = OrderedDict()
        self.value_cls = value_cls

    def __getitem__(self, item):
        return self.d[self._key(item)]

    def __setitem__(self, item, value):
        self.d[self._key(item)] = value

    def __delitem__(self, item):
        del self.d[item]  # xxx?  what about iterators

    def __len__(self):
        return len(self.d)

    def copy(self):
        cp = RDict(self.value_cls)
        cp.d = self.d.copy()
        return cp

    def __contains__(self, item):
        return self._key(item) in self.d

    def iter(self):
        return Iterator(self.d)

    def _key(self, item):
        if isinstance(item, str):
            return item
        elif isinstance(item, list):
            return "".join(item)
        else:
            assert isinstance(item, int)
            return str(item)

    def reverse(self, preserve):
        _copy = self.copy()
        _items = _copy.d.items()
        if not preserve:
            pairs = []
            idx = 0
            for k, v in _copy.d.items():

                pairs.append((k, v))
                idx += 1
            pairs.reverse()
            _copy.d = OrderedDict(pairs)
        else:
            _items.reverse()
            _copy.d = OrderedDict(_items)
        return _copy
Example #15
0
    def memo_nodes():
        storage = OrderedDict()

        def func(key):
            node = storage.get(key, None)
            if node is None:
                node = Node(key, None, None)
                storage[key] = node
            return node

        func.storage = storage
        func.memo_table = lambda: storage.copy()
        return func
    def _get_signed_headers_(self, request):

        sorted_headers = OrderedDict(sorted(request.http_headers.items()))

        result = ""

        for header in sorted_headers.copy().keys():

            if result:
                result += ";" + header
            else:
                result += header

        return result.lower()
Example #17
0
    def _get_profile_struct(self):
        default_json_path = os.path.join(self._basefolder, "profiles", "fdmprinter.json")
        with open(default_json_path, "r") as f:
            try:
                raw_profile_dict = json.loads(f.read(), object_pairs_hook=OrderedDict)
            except:
                raise IOError("Couldn't load JSON profile from {path}".format(path=json_path))

        profile_struct = OrderedDict()
        for category in raw_profile_dict["categories"]:
            temp_dict = OrderedDict()
            self._find_settings_with_properties(raw_profile_dict["categories"][category], temp_dict)
            profile_struct[raw_profile_dict["categories"][category]["label"]] = temp_dict.copy()

        return profile_struct
    def storeParameters(self):
        delegates = OrderedDict({})
        changedNames = {}

        # Iterate over every entry.
        for entryNum in self.getRowRange():
            layoutItem = self.gridLayout.itemAtPosition(entryNum, 1)

            if layoutItem == None:
                continue

            layout = layoutItem.layout()

            # Iterate over every attribute of this entry.
            # for attribNum in range (layout.getRowCount()):
            name = str(layout.itemAtPosition(0, self.grdTxtCol).widget().text())
            cmd = str(layout.itemAtPosition(1, self.grdTxtCol).widget().text())

            # This is a hack which I'm using to assign arbitrary text to the
            # widget, since there does not seem to be any way to associate user
            # data with a widget.  If anyone has less ugly way to do this,
            # (including restructuring the code) please let me know.
            previousName = str(layout.itemAtPosition(0, self.grdTxtCol).widget().accessibleName())

            if len(name) == 0:
                self.warn("Entry '" + str(entryNum) + "' does not have name!")
                return False, delegates

                # If an execution delegate with this name already exists.
            if name in delegates:
                # self.warnNameError (name)
                self.warn("Entry '" + str(entryNum) + "' has a duplicate name!")
                return False, delegates

            if len(previousName) != 0 and previousName != name:
                # Keep track of these name changes for later.
                changedNames[previousName] = name

            delegates[name] = cmd

        self.delegates = delegates.copy()

        # All parameters have now been verified and saved.  It's safe to
        # notify about all names which have changed.
        for oldName in changedNames.keys():
            self.delegateNameChanged.emit(oldName, changedNames[oldName])

        return True, delegates
Example #19
0
    def test(self):
        tempDict = OrderedDict()
        for key, value in self.defect.items():
            #   print(key,value)
            #  temp = value.get(key)
            state = value.get("Defect Status", None)
            #   print('Test',key,state)
            if "Closed" in state:
                #  print('Closed')
                None
            elif "Fixed" in state:
                #  print('Fixed')
                None
            else:
                # print('ID',key,state)

                tempDict["Defect ID"] = key
                tempDict["Summary"] = value.get("Summary")
                tempDict["Severity"] = value.get("Severity")
                tempDict["Defect Status"] = value.get("Defect Status")
                tempDict["Status Whiteboard"] = value.get("Status Whiteboard")

                content = self.meeting.get(key, "empty")
                #     print ('Content',content)
                if not "empty" in content:
                    # print('Content',content)
                    for me_key, me_value in content.items():
                        if "Status Whiteboard" in me_key:
                            None
                        elif "Summary" in me_key:
                            None
                        elif "Severity" in me_key:
                            None
                        elif "ID" in me_key:
                            None
                        elif "Defect Status" in me_key:
                            None
                        elif "Priority" in me_key:
                            None
                        else:
                            tempDict[me_key] = me_value

                # print('HH',self.meeting.get(key,None))

                self.combined[key] = tempDict.copy()
                tempDict.clear()

        return self.combined
Example #20
0
def load_accounts():
    accounts = sorted([p for p in pathlib.Path(os.path.join(BASE_DIR, "accounts")).iterdir() if p.is_file()])
    account_count = 0
    print(os.path.join(BASE_DIR, "accounts"))
    print(accounts)
    for acc in accounts:
        bot_name = os.path.basename(str(acc)).split(".")[0]
        if DEBUG:
            if bot_name not in DEBUG_ACCS:
                continue
        elif not DEBUG:
            if "example" in bot_name.lower():
                continue
        account_count += 1
        account_list[bot_name] = OrderedDict()
        Config = ConfigObj(str(acc))
        has_cred = False
        has_sett = False
        for sec in Config.iteritems():
            sec = tuple(sec)
            if sec[0] == "credentials":
                has_cred = True
            elif sec[0] == "settings":
                has_sett = True
            if "-thread" in sec[0]:
                # Start thread import setttings and creds
                pass
            account_list[bot_name][sec[0].lower()] = sec[1].copy()
        if not has_cred:
            print("Credentials not found for bot: {}".format(bot_name))
            input("Press ENTER to close.")
            sys.exit(0)
        elif not has_sett:
            print("No settings are set for bot: {}".format(bot_name))
            input("Press ENTER to close.")
            sys.exit(0)
        temp = OrderedDict()
        for k, v in account_list[bot_name].items():
            for a, b in v.items():
                a = a.lower()
                try:
                    temp[k][a] = b
                except:
                    temp[k] = {a: b}
        account_list[bot_name] = temp.copy()
        del temp
    print("Running {0} Accounts!\n".format(account_count))
    return account_list
Example #21
0
    def __init__(self, species):

        regions = OrderedDict()
        regions["all"] = "All"
        regions["cds"] = "CDS"
        regions["three_prime_utrs"] = "3' UTR"
        regions["five_prime_utrs"] = "5' UTR"
        regions["proxintron500"] = "Proximal\nIntron"
        regions["distintron500"] = "Distal\nIntron"
        regions["exons"] = "Exons"

        assigned_regions = regions.copy()
        del assigned_regions["all"]
        self.species = species
        self.assigned_regions = assigned_regions
        self.features = self.make_features()
Example #22
0
    def dGetFeedFrame(self, sCsvFile, sTimeFrame, sSymbol, sYear):
        dFeedParams = OrderedDict(sTimeFrame=sTimeFrame, sSymbol=sSymbol, sYear=sYear)
        # PandasMt4.dDF_OHLC[sKey = sSymbol + sTimeFrame + sYear]
        dFeedParams["mFeedOhlc"] = oReadMt4Csv(sCsvFile, **dFeedParams)
        dFeedParams["open_label"] = "O"
        dFeedParams["close_label"] = "C"
        dFeedParams["sKey"] = sSymbol + sTimeFrame + sYear
        dFeedParams["sCsvFile"] = sCsvFile

        self.vAppendHdf("feed/mt4/" + dFeedParams["sKey"], dFeedParams["mFeedOhlc"])
        dMetadata = dFeedParams.copy()
        del dMetadata["mFeedOhlc"]
        self.vSetMetadataHdf("feed/mt4/" + dFeedParams["sKey"], dMetadata)
        self.vSetTitleHdf("feed/mt4", "Mt4")
        self.vSetTitleHdf("feed", "Feeds")
        return dFeedParams
    def normalize_datetime_slices(self, results_by_user, submetrics):
        """
        Starting from a sparse set of timeseries results, fill in default values
        for the specified list of sub-metrics.  Also make sure the chronological
        first timeseries slice is >= self.start_date.
        If self.timeseries is NONE, this is a simple identity function.
        
        Parameters
            results_by_user : dictionary of submetrics dictionaries by user
            submetrics      : list of tuples of the form (label, index, default)
        
        Returns
            the results, filled in with default values
        """
        if self.timeseries.data == TimeseriesChoices.NONE:
            return results_by_user

        slice_delta = self.get_delta_from_choice()
        timeseries_slices = OrderedDict()
        start_slice_key = format_pretty_date(self.start_date.data)
        timeseries_slices[start_slice_key] = None

        first_slice = self.get_first_slice()
        first_slice_key = format_pretty_date(first_slice)
        slice_to_default = first_slice
        while slice_to_default < self.end_date.data:
            date_key = format_pretty_date(slice_to_default)
            timeseries_slices[date_key] = None
            slice_to_default += slice_delta

        for user_id, user_submetrics in results_by_user.iteritems():
            for label, i, default in submetrics:
                if not label or not user_submetrics or label not in user_submetrics:
                    continue
                defaults = timeseries_slices.copy()
                defaults.update(user_submetrics[label])
                for k, v in defaults.iteritems():
                    if not v:
                        defaults[k] = default

                if start_slice_key != first_slice_key:
                    # coerce the first datetime slice to be self.start_date
                    defaults[start_slice_key] = defaults.pop(first_slice_key)

                user_submetrics[label] = defaults

        return results_by_user
Example #24
0
    def reorder_layers(self, neworder):
        """
        Reorder layers and redraw figure

        Args:
          neworder (list): New order of layers. Must include all layers
            currently in the figure. See all layers with `fig.layers`
        """
        assert sorted(neworder) == sorted(self.layers.keys()), "Must specify order of all layers"

        new = OrderedDict()
        for i in neworder:
            new[i] = self.layers[i]

        self.layers = new.copy()

        fig.redraw(keeptemp=True)
Example #25
0
    def getReport(cls):
        q = cls.query()
        q = q.order(-DayEventSummary.day)
        days = {}

        defaultDay = {"New User": 0, "Login": 0, "Create Point": 0, "Edit Point": 0}
        defaultDay = OrderedDict(sorted(defaultDay.items(), key=lambda t: t[0]))

        for day in q.iter():
            if str(day.day) in days:
                days[str(day.day)][day.name] = day.count
            else:
                days[str(day.day)] = defaultDay.copy()
                days[str(day.day)][day.name] = day.count

        logging.info(str(days))
        return days
Example #26
0
    def parameterDicts(self):
        # One iterator for each sweep
        iterators = [sweep.parameterDicts() for sweep in self.sweeps]

        # Iterate in parallel across sweeps
        # If one of them comes up short, the final entry will just be repeated
        combination = OrderedDict()
        while True:
            doneCount = 0
            for iterator in iterators:
                try:
                    combination.update(iterator.next())
                except StopIteration:
                    doneCount += 1
            if doneCount == len(iterators):
                break
            yield combination.copy()
Example #27
0
    def __init__(self, valid=None, invalid=None, valid_equivalent=None):
        if valid is None:
            valid = []
        if invalid is None:
            invalid = []
        if valid_equivalent is None:
            valid_equivalent = OrderedDict()

        # Nodes that are valid to have in the graph computing outputs
        self.valid = set(valid)

        # Nodes that are NOT valid to have in the graph computing outputs
        self.invalid = set(invalid)

        # Mapping from invalid variables to equivalent valid ones.
        self.valid_equivalent = valid_equivalent.copy()
        self.valid.update(list(valid_equivalent.values()))
        self.invalid.update(list(valid_equivalent.keys()))
Example #28
0
def regions_generator():

    """
    returns ordered dict of regions without the "all" region and with the "All" region, in that order
    """

    # lazy refactor, should turn entire thing into object, make this a field
    regions = OrderedDict()
    regions["all"] = "All"
    regions["cds"] = "CDS"
    regions["three_prime_utrs"] = "3' UTR"
    regions["five_prime_utrs"] = "5' UTR"
    regions["proxintron500"] = "Proximal\nIntron"
    regions["distintron500"] = "Distal\nIntron"

    # all catagory would break some analysies, create copy and remove it
    assigned_regions = regions.copy()
    del assigned_regions["all"]
    return assigned_regions, regions
Example #29
0
class VectorParser(object):
    def __init__(self):
        self.idxs_and_shapes = OrderedDict()
        self.vect = np.zeros((0,))

    def add_shape(self, name, shape):
        start = len(self.vect)
        size = np.prod(shape)
        self.idxs_and_shapes[name] = (slice(start, start + size), shape)
        self.vect = np.concatenate((self.vect, np.zeros(size)), axis=0)

    def new_vect(self, vect):
        assert vect.size == self.vect.size
        new_parser = self.empty_copy()
        new_parser.vect = vect
        return new_parser

    def empty_copy(self):
        """Creates a parser with a blank vector."""
        new_parser = VectorParser()
        new_parser.idxs_and_shapes = self.idxs_and_shapes.copy()
        new_parser.vect = None
        return new_parser

    def as_dict(self):
        return {k: self[k] for k in self.names}

    @property
    def names(self):
        return self.idxs_and_shapes.keys()

    def __getitem__(self, name):
        idxs, shape = self.idxs_and_shapes[name]
        return np.reshape(self.vect[idxs], shape)

    def __setitem__(self, name, val):
        if isinstance(val, list):
            val = np.array(val)
        if name not in self.idxs_and_shapes:
            self.add_shape(name, val.shape)

        idxs, shape = self.idxs_and_shapes[name]
        self.vect[idxs].reshape(shape)[:] = val
Example #30
0
def UpdateSplitSecurityIndex(mssqlDict, mongoDict):
    splitSecurityIndex = SplitSecurity(mssqlDict, mongoDict)
    splitDict = {"DAY": 200, "WEEK": 100, "MONTH": 50}
    for item in splitDict:
        # print item
        securityCodeList = splitSecurityIndex.GetSecurityCodeList(item)
        splitRangeList = splitSecurityIndex.SplitSecuritySubarea(securityCodeList, splitDict[item])
        num = 0
        for mongoHandle in splitSecurityIndex.mongoHandle:
            mongoHandle.drop("SplitIndex", item)
        document = OrderedDict()
        for splitRange in splitRangeList:
            document["_id"] = num
            document["Range"] = [splitRange["min"], splitRange["max"]]
            # print num,'min:',range['min'],', max:',range['max']
            for mongoHandle in splitSecurityIndex.mongoHandle:
                mongoHandle.insert("SplitIndex", item, document.copy())
            document.clear()
            num += 1