Exemple #1
1
class FilePool:
    def __init__(self, size, compression=None):
        self.size = size
        self.compression = compression
        self.pool = OrderedDict()
        self.hit = 0
        self.miss = 0

    def open(self, filename, mode, encoding=None, file_type=None, compression=None):
        if filename in self.pool:
            self.pool.move_to_end(filename, last=False)
            self.hit += 1
            return self.pool[filename]

        f = open(filename, mode, encoding, file_type, compression)

        if len(self.pool) >= self.size:
            _, old_f = self.pool.popitem(last=False)
            old_f.close()
        self.pool[filename] = f

        self.miss += 1
        return f

    def close(self):
        for f in self.pool.values():
            f.close()
def create_ordered_dict_and_sort(dictionary, key=None):
    dictionary = OrderedDict(dictionary)
    if key:
        dictionary.move_to_end(key, last=False)
        return dictionary
    else:
        return dictionary
Exemple #3
0
def format_analyse(f_in, f_out, f_dic):
    dic = OrderedDict()
    grades = []
    lines = []
    l = []
    newline = True
    with open(f_in, 'r') as fin:
        for line in fin:
            if line.startswith("------------------------"):
                continue
            if line.startswith("++++++++++++++++++++++++"):
                lines.append(l)
                newline = True
                continue
            word = line.split("\t")[2]
            if newline:
                l = []
                grades.append(word)
                newline = False
                continue
            l.append(word)
            dic[word] = 0
            dic.move_to_end(word)
    for i in range(len(grades)):
        grades[i] = ("+1" if grades[i][0] == '+' else "-1")
    format_data(f_out, (grades, lines), f_dic, dic, True)
def simulateCache(requests, size):
    """Cache simulator

    Note that this assumes that the python equality is sufficient
    to distinguish between unique requests; this may not be true,
    depending upon the hash and equality function implementations
    used in the C++ class.

    Parameters
    ----------
    requests : `list`
        List of cache requests.
    size : `int`
        Size of cache to simulate
    """
    print("Total number of requests:", len(requests))
    counts = Counter(requests)
    print("Number of unique requests:", len(counts.keys()))
    cache = OrderedDict()
    hits = defaultdict(int)
    for req in requests:
        if req in cache:
            hits[req] += 1
            cache.move_to_end(req)
        else:
            cache[req] = True
            if len(cache) > size:
                cache.popitem(False)
    print("Number of cache hits:", sum(hits.values()))
    print("Minimum/maximum cache hits:", min(hits.values()), max(hits.values()))
Exemple #5
0
    def serialize(self, include_type=False, simple_geometry=False, geometry=True, **kwargs):
        result = OrderedDict((
            ('id', self.pk),
            ('slug', self.pk),
            ('icon', self.icon),
            ('title', self.title),
            ('subtitle', self.subtitle),
            ('level', self.level.pk),
            ('space', self.space.pk if self.space else None),
            ('areas', tuple(area.pk for area in self.areas)),
            ('grid_square', self.grid_square),
            ('near_area', self.near_area.pk if self.near_area else None),
            ('near_poi', self.near_poi.pk if self.near_poi else None),
            ('altitude', None if self.altitude is None else round(self.altitude, 2))
        ))
        if not grid.enabled:
            result.pop('grid_square')
        if include_type:
            result['type'] = 'custom'
            result.move_to_end('type', last=False)
        if simple_geometry:
            result['point'] = (self.level.pk, self.x, self.y)
            result['bounds'] = ((int(math.floor(self.x)), int(math.floor(self.y))),
                                (int(math.ceil(self.x)), int(math.ceil(self.y))))

        if geometry:
            result['geometry'] = self.serialized_geometry

        return result
def choose_backend():
    """
    Function that tries to setup backend, chosen by user, and if failed,
    setup one of the allowable backends
    """
    global _backend
    old_backend = _backend
    backends = OrderedDict([
        ('psutil', has_psutil),
        ('posix', os.name == 'posix'),
        ('tracemalloc', has_tracemalloc),
        ('no_backend', True)
    ])
    backends.move_to_end(_backend, last=False)
    for n_backend, is_available in backends.items():
        if is_available:
            _backend = n_backend
            break
    if _backend == 'no_backend':
        raise NotImplementedError('Tracemalloc or psutil module is required for non-unix '
                                  'platforms')
    if _backend != old_backend:
        print('{} can not be used, {} used instead'.format(old_backend, _backend))
    global _backend_chosen
    _backend_chosen = True
Exemple #7
0
 def settings_form_fields(self):
     d = OrderedDict(
         [
             ('public_name', I18nFormField(
                 label=_('Payment method name'),
                 widget=I18nTextInput,
             )),
             ('checkout_description', I18nFormField(
                 label=_('Payment process description during checkout'),
                 help_text=_('This text will be shown during checkout when the user selects this payment method. '
                             'It should give a short explanation on this payment method.'),
                 widget=I18nTextarea,
             )),
             ('email_instructions', I18nFormField(
                 label=_('Payment process description in order confirmation emails'),
                 help_text=_('This text will be included for the {payment_info} placeholder in order confirmation '
                             'mails. It should instruct the user on how to proceed with the payment. You can use'
                             'the placeholders {order}, {total}, {currency} and {total_with_currency}'),
                 widget=I18nTextarea,
                 validators=[PlaceholderValidator(['{order}', '{total}', '{currency}', '{total_with_currency}'])],
             )),
             ('pending_description', I18nFormField(
                 label=_('Payment process description for pending orders'),
                 help_text=_('This text will be shown on the order confirmation page for pending orders. '
                             'It should instruct the user on how to proceed with the payment. You can use'
                             'the placeholders {order}, {total}, {currency} and {total_with_currency}'),
                 widget=I18nTextarea,
                 validators=[PlaceholderValidator(['{order}', '{total}', '{currency}', '{total_with_currency}'])],
             )),
         ] + list(super().settings_form_fields.items())
     )
     d.move_to_end('_enabled', last=False)
     return d
Exemple #8
0
def main():
    config_file = expanduser('~') + '/' + '.thinkblink.conf'

    run_loop = False
    
    if args.generate:
        make_default_config(config_file)
        print('Generated default configuration')
        sys.exit()

    config = read_config(config_file)

    if len(set_flags(config)) > 0:
        if args.verbose: print('A blinker process should already be running')
        sys.exit()

    if args.__getattribute__('<flag>'):
        config['flags'][args.__getattribute__('<flag>')] = '1'
    if args.add:
        config['flags'][args.add] = None
    if args.first:
        config['flags'][args.first] = None
        temp = OrderedDict(config['flags'])
        temp.move_to_end(args.first, last=False)
        config['flags'] = temp
    if args.delete:
        config['flags'].pop(args.delete)
    if args.set:
        config['flags'][args.set] = '1'
    if args.unset:
        config['flags'][args.unset] = None
    if args.toggle:
        config['flags'][args.toggle] = toggle_flag(config['flags'][args.toggle])
    with open(config_file, 'w') as f: config.write(f)
    
    if args.list:
        for i, flag in enumerate(map(str, config['flags'].keys())):
            print(flag + '{end}'.format(end=' (' + str(i+1) + ')' if args.verbose else ''))

    if len(set_flags(config)):
        sync(config['files'])
    while len(set_flags(config)):
        start_over = False
        for flag in set_flags(config):
            l = list(config['flags'].keys()).index(flag) + 1
            for i in range(l):
                if unexpected_diff(config['files']):
                    sync(config['files'])
                    start_over = True
                    top_flag = set_flags(config)[-1]
                    config['flags'][top_flag] = toggle_flag(config['flags'][top_flag])
                    with open(config_file, 'w') as f: config.write(f)
                    break
                else:
                    toggle_light(config['files'], config['values'])
                    if i != l-1: time.sleep(0.25)
            if(start_over): break
            time.sleep(0.75)
        config = read_config(config_file)
 def _rearrange(self, table, cols):
     tbl = []
     for r in table:
         d = OrderedDict(r)
         for c in reversed(cols):
             d.move_to_end(c)
         tbl.append(OrderedDict(reversed(list(d.items()))))
     return tbl
def read_dic(f_dict):
    dic = OrderedDict()
    with open(f_dict, 'r') as f:
        for line in f.readlines():
            w = line.replace("\n", "")
            if map(str.isalpha, w.split()):
                dic[w] = 0
                dic.move_to_end(w)
    return dic
Exemple #11
0
def choose_labels(alternatives):
    """
    Prompt the user select several labels from the provided alternatives.

    At least one label must be selected.

    :param list alternatives: Sequence of options that are available to select from
    :return: Several selected labels
    """
    if not alternatives:
        raise ValueError

    if not isinstance(alternatives, list):
        raise TypeError

    choice_map = OrderedDict(
        ('{}'.format(i), value) for i, value in enumerate(alternatives, 1)
    )
    # prepend a termination option
    input_terminator = '0'
    choice_map.update({input_terminator: '<done>'})
    choice_map.move_to_end('0', last=False)

    choice_indexes = choice_map.keys()

    choice_lines = ['{} - {}'.format(*c) for c in choice_map.items()]
    prompt = '\n'.join(
        (
            'Select labels:',
            '\n'.join(choice_lines),
            'Choose from {}'.format(', '.join(choice_indexes)),
        )
    )

    user_choices = set()
    user_choice = None

    while not user_choice == input_terminator:
        if user_choices:
            note('Selected labels: [{}]'.format(', '.join(user_choices)))

        user_choice = click.prompt(
            prompt, type=click.Choice(choice_indexes), default=input_terminator
        )
        done = user_choice == input_terminator
        new_selection = user_choice not in user_choices
        nothing_selected = not user_choices

        if not done and new_selection:
            user_choices.add(choice_map[user_choice])

        if done and nothing_selected:
            error('Please select at least one label')
            user_choice = None

    return user_choices
class memoize_mask(object):
    '''
    Decorator. Caches wktToMask keyed to model_id and the WKT string
    '''
    def __init__(self, func, maxsize=50):
        '''
        Args:
            func: the function to wrap
            maxsize (int): Max size of cache (in MB)
        '''

        self.hits = self.misses = 0
        self.func = func
        self.maxsize = maxsize
        self.cache = OrderedDict()

    def __call__(self, *args):

        nc, fname, wkt, varname = args
        # If we have no key, automatic cache miss
        if (not hasattr(nc, 'model_id')):
            log.debug('Cache MISS (attribute \'model_id\' not found)')
            return self.func(*args)

        # Set key to model_id and wkt polygon
        key = (nc.model_id, wkt)
        log.debug('Checking cache for key %s', key)

        with cache_lock:
            try:
                result = self.cache[key]
                self.cache.move_to_end(key) # record recent use of this key
                self.hits += 1
                log.debug('Cache HIT')
                return result
            except KeyError:
                pass

        log.debug('Cache MISS')
        result = self.func(*args)

        with cache_lock:
            self.cache[key] = result
            self.misses += 1
            while getsize(self.cache) > self.maxsize * 1024 * 1024: # convert to MB
                self.cache.popitem(0) # Purge least recently used cache entry

        return result

    def cache_clear(self):
        with cache_lock:
            self.cache.clear()
            self.hits = 0
            self.misses = 0
class LFU_LRU(abstractLFU):
    def __init__(self, cache_size=1000):
        super(LFU_LRU, self).__init__(cache_size)
        self.cacheDict = OrderedDict()

    def find_evict_key(self):
        evict_key = self.least_freq_elements_list.popleft()
        return evict_key

    def _updateElement(self, element):
        super()._updateElement(element)
        self.cacheDict.move_to_end(element)
    def _querystring(self, params):
        """Generate the querystring to be posted to the MWS endpoint

        Required parameters for every API call.

        AWSAccessKeyId: Your Amazon MWS account is identified by your access key,
            which Amazon MWS uses to look up your secret key.

        SignatureMethod: The HMAC hash algorithm you are using to calculate your
            signature. Both HmacSHA256 and HmacSHA1 are supported hash algorithms,
            but Amazon recommends using HmacSHA256.

        SignatureVersion: Which signature version is being used. This is Amazon
            MWS-specific information that tells Amazon MWS the algorithm you used
            to form the string that is the basis of the signature. For Amazon MWS,
            this value is currently SignatureVersion=2.

        Version: The version of the API section being called.

        Timestamp: Each request must contain the timestamp of the request. The
            Timestamp attribute must contain the client's machine time in
            ISO8601 format; requests with a timestamp significantly different
            (15 minutes) than the receiving machine's clock will be rejected to
            help prevent replay attacks.

        SellerId: Your seller or merchant identifier.
        """
        parameters = {'AWSAccessKeyId': self.mws_access_key,
                      'SignatureMethod': 'HmacSHA256',
                      'SignatureVersion': '2',
                      'Version': self._api_version,
                      'Timestamp': datetime.datetime.utcnow().replace(
                          microsecond=0).isoformat(sep='T') + 'Z'}

        if 'SellerId' not in params:
            parameters['SellerId'] = self.merchant_id

        parameters.update({k: v for (k, v) in params.items()})
        parse_results = parse.urlparse(self._mws_endpoint)

        string_to_sign = "POST\n{}\n{}\n{}".format(
            parse_results[1],
            parse_results[2],
            parse.urlencode(
                sorted(parameters.items())).replace(
                    '+', '%20').replace('*', '%2A').replace('%7E', '~'))

        parameters['Signature'] = self._sign(string_to_sign)

        ordered_parameters = OrderedDict(sorted(parameters.items()))
        ordered_parameters.move_to_end('Signature')
        return parse.urlencode(ordered_parameters).encode(encoding='utf_8')
    def load(self, base_settings):
        """Merge local settings from file with ``base_settings``.

        Returns a new OrderedDict containing the base settings and the
        loaded settings. Ordering is:

            - base settings
            - settings from extended file(s), if any
            - settings from file

        When a setting is overridden, it gets moved to the end.

        """
        if not os.path.exists(self.file_name):
            self.print_warning(
                'Local settings file `{0}` not found'.format(self.file_name))
            return
        is_upper = lambda k: k == k.upper()
        settings = OrderedDict((k, v) for (k, v) in base_settings.items() if is_upper(k))
        for k, v in self.read_file().items():
            names = k.split('.')
            v = self._parse_setting(v, expand_vars=True)
            obj = settings
            for name, next_name in zip(names[:-1], names[1:]):
                next_name = self._convert_name(next_name)
                next_is_seq = isinstance(next_name, int)
                default = [PLACEHOLDER] * (next_name + 1) if next_is_seq else {}
                if isinstance(obj, Mapping):
                    if name not in obj:
                        obj[name] = default
                elif isinstance(obj, Sequence):
                    name = int(name)
                    while name >= len(obj):
                        obj.append(PLACEHOLDER)
                    if obj[name] is PLACEHOLDER:
                        obj[name] = default
                obj = obj[name]
            name = self._convert_name(names[-1])
            try:
                curr_v = obj[name]
            except (KeyError, IndexError):
                pass
            else:
                if isinstance(curr_v, LocalSetting):
                    curr_v.value = v
                    self.registry[curr_v] = name
            obj[name] = v
            settings.move_to_end(names[0])
        settings.pop('extends', None)
        self._do_interpolation(settings, settings)
        return settings
Exemple #16
0
class LRUCache:
    """Fixed size LRUCache backed by an OrderedDict.

    .. note:: LRUCache is not thread safe.
    """

    def __init__(self, max_size=128):
        """
        :param max_size: The max number of entries in the cache.
        """
        self.max_size = max_size
        self.queue = OrderedDict()

    def get(self, key):
        """ Get a cache entry
        :param key: the entry's hashable key
        :returns: entry if present else None
        """
        value = self.queue.get(key, _MISSING)

        if value is not _MISSING:
            LOG.debug('Cache hit for key: %s; value: %s', key, value)
            self.queue.move_to_end(key)
            return value

        LOG.debug('Cache miss for key: %s', key)
        return None

    def set(self, key, value):
        """Set a cache entry. If the number of entries exceeds the max_size,
        then `set` will evict items from the cache until it equal to
        max_size.

        :param key: the entry's hashable key
        :param value:
        """

        prev = self.queue.pop(key, _MISSING)

        LOG.debug('Set key: %s; value: %s; prev: %s', key, value, prev)
        self.queue[key] = value

        LOG.debug('Cache size: %s', len(self.queue))
        while len(self.queue) > self.max_size:
            self.queue.popitem(last=False)

    def clear(self):
        """Clear the cache"""
        LOG.debug('Clear cache, size: %s', len(self.queue))
        self.queue = OrderedDict()
Exemple #17
0
    def getmaininfo(self, json=False):
        message = OrderedDict((c.name, c.mi_todict()) for c in self._mi_controller_array)
        message['BioReactorModel'] = self._version_info['Model']
        message.move_to_end('SecondaryHeat')
        message.move_to_end('MainGas')
        if json:
            msg = json_dumps(OrderedDict((
                ("result", "True"),
                ("message", message)
            )))

            return msg.encode('utf-8')
        else:
            return self.xml_gen.create_hello_xml(message, "Message", "True", self.true_reply_xml_encoding)
Exemple #18
0
def timefreq_fft ( x, **kwargs ):
    '''Labeled analogue of scipy.signal.spectrogram; runs along x's 'time' axis.

    Inputs:
    x - LabeledArray containing the data; must have a 'time' axis, and probably
        shouldn't have a 'frequency' axis
    **kwargs - (Optional) Keyword arguments passed to spectrogram

    Output is a LabeledArray with the same axes as x except for:
        'time' - Determined by the window properties; uses the values returned
            by spectrogram
        'frequency' - Added based on the values returned by spectrogram
    '''
    
    # Make sure we don't override axis in the kwargs
    kwargs.pop( 'axis', None )

    time_axis = x.axis_index( 'time' )
    other_axes = [ k
                   for k in x.axes
                   if not k == 'time' ]
    
    ## Compute spectrograms
    
    # Test with first sample along each non-time axis
    test_slice = functools.reduce( lambda a, b : a + b,
                                  tuple( zip( other_axes,
                                              ( 0 for axis in other_axes ) ) ) )
    f_spec, t_spec, ret_test = sig.spectrogram( x[test_slice], **kwargs )
    
    # Form new axes
    ret_axes = OrderedDict( x.axes )
    ret_axes['time'] = t_spec + x.axes['time'][0]
    ret_axes.move_to_end( 'time', last = True )
    ret_axes['frequency'] = f_spec
    ret_axes.move_to_end( 'frequency', last = False )
    # Allocate full result
    ret = LabeledArray( axes = ret_axes )
    
    # Compute for each trial
    for x_cur, i in x.iter_over( other_axes, return_index = True ):
        cur_slice = functools.reduce( lambda a, b : a + b,
                                      tuple( zip( other_axes,
                                                  i ) ) )
        f_cur, t_cur, ret_cur = sig.spectrogram( x_cur.array, **kwargs )
        ret[cur_slice] = ret_cur
    
    return ret
Exemple #19
0
    def settings_form_fields(self):
        if self.settings.connect_client_id and not self.settings.secret:
            # PayPal connect
            if self.settings.connect_user_id:
                fields = [
                    ('connect_user_id',
                     forms.CharField(
                         label=_('PayPal account'),
                         disabled=True
                     )),
                ]
            else:
                return {}
        else:
            fields = [
                ('client_id',
                 forms.CharField(
                     label=_('Client ID'),
                     max_length=80,
                     min_length=80,
                     help_text=_('<a target="_blank" rel="noopener" href="{docs_url}">{text}</a>').format(
                         text=_('Click here for a tutorial on how to obtain the required keys'),
                         docs_url='https://docs.pretix.eu/en/latest/user/payments/paypal.html'
                     )
                 )),
                ('secret',
                 forms.CharField(
                     label=_('Secret'),
                     max_length=80,
                     min_length=80,
                 )),
                ('endpoint',
                 forms.ChoiceField(
                     label=_('Endpoint'),
                     initial='live',
                     choices=(
                         ('live', 'Live'),
                         ('sandbox', 'Sandbox'),
                     ),
                 )),
            ]

        d = OrderedDict(
            fields + list(super().settings_form_fields.items())
        )

        d.move_to_end('_enabled', False)
        return d
Exemple #20
0
class SpriteGroupBase(SpriteGroupBase):
    """OrderedDict
    """

    def __init__(self):
        super().__init__()
        self._members = OrderedDict()

    def __contains__(self, item):
        return item in self._members

    def __len__(self):
        return len(self._members)

    def __iter__(self):
        return self._members.keys()

    def add_internal(self, sprite):
        self._members[sprite] = 0

    def remove_internal(self, sprite):
        del self._members[sprite]

    def sprites(self):
        return self._members.keys()

    def add(self, sprite):
        if sprite not in self._members:
            self.add_internal(sprite)
            sprite.add_internal(self)

    def remove(self, sprite):
        if sprite in self._members.keys():
            self.remove_internal(sprite)
            sprite.remove_internal(self)

    def clear(self):
        for sprite in self._members.keys():
            sprite.remove_internal(self)
            self.remove_internal(sprite)

    # support render order operations
    def move_sprite_to_back(self, sprite):
        self._members.move_to_end(sprite, 1)

    def move_sprite_to_front(self, sprite):
        self._members.move_to_end(sprite, 0)
Exemple #21
0
 def __new__(mcls, clsname, bases, namespace, **kargs):
     # Make sure the namespace is an ordered mapping
     # for passing the fields to MetaStruct.
     namespace = OrderedDict(namespace)
     
     # Create _fields if not present, ensure sequence is a tuple.
     fields = tuple(namespace.get('_fields', ()))
     namespace['_fields'] = fields
     
     # For each field, if an explicit definition is not provided,
     # add one, and if it is provided, put it in the right order.
     for fname in fields:
         if fname not in namespace:
             namespace[fname] = Field()
         else:
             namespace.move_to_end(fname)
     
     return super().__new__(mcls, clsname, bases, namespace, **kargs)
def result_generator(count):
    use_params = {
        'Inspection_Start': '01/26/2015',
        'Inspection_End': '201/26/2016',
        'Zip_Code': '98101'
    }
    # html = get_inspection_page(**use_params)
    html = load_inspection_page('inspection_page.html')
    parsed = parse_source(html)
    content_col = parsed.find("td", id="contentcol")
    data_list = restaurant_data_generator(content_col)
    for data_div in data_list[:int(count)]:
        metadata = OrderedDict(extract_restaurant_metadata(data_div))
        inspection_data = get_score_data(data_div)
        metadata.update(inspection_data)
        metadata.move_to_end(check_sorting(), last=False)
        print(metadata)
        yield metadata
Exemple #23
0
def build_dic(lines):
    dic = OrderedDict()
    final_lines = []
    print("Building dictionnary...")
    for line in lines:
        l = line.split("\t")
        if len(l) == 1:
            lines.remove(line)
            continue
        l = l[1]
        l = clean_string(l)
        words = bigrams(l) + l.split()
        final_lines.append(words)
        for w in words:
            dic[w] = 0
            dic.move_to_end(w)
    done()
    return (dic, final_lines)
Exemple #24
0
    def rules(self, node):
        if node != self.root:
            parent = self.parent(node)
            # Convert the parent list of rules into an ordered dict
            pr = OrderedDict([(r.attr_name, r) for r in self.rules(parent)])

            parent_attr = self.attribute(parent)
            # Get the parent attribute type
            parent_attr_cv = parent_attr.compute_value

            is_left_child = self.__left_child(parent) == node

            # The parent split variable is discrete
            if isinstance(parent_attr_cv, Indicator) and \
                    hasattr(parent_attr_cv.variable, 'values'):
                values = parent_attr_cv.variable.values
                attr_name = parent_attr_cv.variable.name
                eq = not is_left_child * (len(values) != 2)
                value = values[abs(parent_attr_cv.value -
                                   is_left_child * (len(values) == 2))]
                new_rule = DiscreteRule(attr_name, eq, value)
                # Since discrete variables should appear in their own lines
                # they must not be merged, so the dict key is set with the
                # value, so the same keys can exist with different values
                # e.g. #legs ≠ 2 and #legs ≠ 4
                attr_name = attr_name + '_' + value
            # The parent split variable is continuous
            else:
                attr_name = parent_attr.name
                sign = not is_left_child
                value = self._tree.threshold[self.parent(node)]
                new_rule = ContinuousRule(attr_name, sign, value,
                                          inclusive=is_left_child)

            # Check if a rule with that attribute exists
            if attr_name in pr:
                pr[attr_name] = pr[attr_name].merge_with(new_rule)
                pr.move_to_end(attr_name)
            else:
                pr[attr_name] = new_rule

            return list(pr.values())
        else:
            return []
Exemple #25
0
class LRUCache:

    def __init__(self, capacity):
        self.capacity = capacity
        self.cache = OrderedDict()

    def get(self, key):
        val = self.cache.get(key, -1)
        if val != -1:
            self.cache.move_to_end(key, last=True)
        return val

    def set(self, key, value):
        if self.cache.get(key, -1) != -1:
            self.cache.pop(key)
        if len(self.cache) == self.capacity:
            self.cache.popitem(last=False)

        self.cache[key] = value
Exemple #26
0
class LocalCacheProxy:
    # django cache, buffered using a LRU cache
    # only usable for stuff that never changes, obviously
    def __init__(self, maxsize=128):
        self._maxsize = maxsize
        self._mapupdate = None
        self._items = OrderedDict()

    def get(self, key, default=None):
        if self._mapupdate is None:
            self._check_mapupdate()
        try:
            # first check out cache
            result = self._items[key]
        except KeyError:
            # not in our cache
            result = cache.get(key, default=NoneFromCache)
            if result is not NoneFromCache:
                self._items[key] = result
                self._prune()
            else:
                result = default
        else:
            self._items.move_to_end(key, last=True)
        return result

    def _prune(self):
        # remove old items
        while len(self._items) > self._maxsize:
            self._items.pop(next(iter(self._items.keys())))

    def _check_mapupdate(self):
        mapupdate = MapUpdate.current_cache_key()
        if self._mapupdate != mapupdate:
            self._items = OrderedDict()
            self._mapupdate = mapupdate

    def set(self, key, value, expire):
        self._check_mapupdate()
        cache.set(key, value, expire)
        self._items[key] = value
        self._prune()
Exemple #27
0
def cleanup(raw_dict):
    od = OrderedDict((trans[k], fix(trans[k], v)) 
                       for k, v in raw_dict.items() 
                       if trans[k] not in irrelevant)
    if not od:
        return {}
    if 'points' in od:
        od.move_to_end('points', last=False)
    od.move_to_end('id', last=False)
    if 'site' in od:
        od.move_to_end('site')
    od.move_to_end('syllabus')
    return od
Exemple #28
0
class LRUCache:
    '''Store items in the order the keys were last added'''
    def __init__(self, size):
        self.od = OrderedDict()
        self.size = size

    def get(self, key, default=None):
        try:
            self.od.move_to_end(key)
        except KeyError:
            return default
        return self.od[key]

    def __setitem__(self, key, value):
        try:
            del self.od[key]
        except KeyError:
            if len(self.od) == self.size:
                self.od.popitem(last=False)
        self.od[key] = value
def choicelist_queryset_to_translated_dict(queryset,language_code,ordered=True,id_prefix='_',shortlist=False,choices_to_exclude=None):

    codes_to_adjectives = dict(settings.LANGUAGES)

    if language_code not in codes_to_adjectives.keys():
        adjective = 'english'
    else:
        adjective = codes_to_adjectives[language_code].lower()

    raw_choice_list = []

    try:
        for choice in queryset:
            if choices_to_exclude == None or choice not in choices_to_exclude:
                raw_choice_list.append((id_prefix + str(choice.machine_value), getattr(choice, adjective + '_name')))

    except AttributeError:
        for choice in queryset:
            if choices_to_exclude == None or choice not in choices_to_exclude:
                raw_choice_list.append((id_prefix+str(choice.machine_value),getattr(choice,'english_name')))

    if ordered:

        if shortlist:
            sorted_choice_list = OrderedDict(sorted(raw_choice_list,key = lambda x: x[1]))
        else:
            sorted_choice_list = OrderedDict(sorted(raw_choice_list,key = lambda x: x[1]))
            sorted_choice_list.update({id_prefix+'1':'N/A'})
            sorted_choice_list.move_to_end(id_prefix+'1', last=False)
            sorted_choice_list.update({id_prefix+'0':'-'})
            sorted_choice_list.move_to_end(id_prefix+'0', last=False)

        return sorted_choice_list
    else:

        if shortlist:
            sorted_choice_list = sorted(raw_choice_list, key=lambda x: x[1])
        else:
            sorted_choice_list = [(id_prefix + '0', '-'), (id_prefix + '1', 'N/A')] + sorted(raw_choice_list,
                                                                                             key=lambda x: x[1])
        return sorted_choice_list
def merge_obj(result, obj, position=None):
    if not isinstance(result, dict):
        result = OrderedDict() if position else {}

    if not isinstance(obj, dict):
        return obj

    if position:
        if position not in ('first', 'last'):
            raise ValueError("position can either be first or last")
        if not isinstance(result, OrderedDict) or not isinstance(obj, OrderedDict):
            raise ValueError("If using position all dicts need to be OrderedDicts")

    for key, value in obj.items():
        if isinstance(value, dict):
            target = result.get(key)
            if isinstance(target, dict):
                merge_obj(target, value, position)
                continue
            result[key] = OrderedDict() if position else {}
            if position and position == 'first':
                if sys.version_info >= (3, 2):
                    result.move_to_end(key, False)
                else:
                    move_to_start(result, key)
            merge_obj(result[key], value, position)
            continue
        if value is None:
            result.pop(key, None)
            continue
        if key not in result and position == 'first':
            result[key] = value
            if sys.version_info >= (3, 2):
                result.move_to_end(key, False)
            else:
                move_to_start(result, key)
        else:
            result[key] = value

    return result
Exemple #31
0
class BufferPool:
    """
  A buffer pool implementation.

  Since the buffer pool is a cache, we do not provide any serialization methods.

  >>> schema = DBSchema('employee', [('id', 'int'), ('age', 'int')])
  >>> bp = BufferPool()
  >>> fm = Storage.FileManager.FileManager(bufferPool=bp)
  >>> bp.setFileManager(fm)

  # Check initial buffer pool size
  >>> len(bp.pool.getbuffer()) == bp.poolSize
  True

  """

    # Default to a 10 MB buffer pool.
    defaultPoolSize = 10 * (1 << 20)

    # Buffer pool constructor.
    #
    # REIMPLEMENT this as desired.
    #
    # Constructors keyword arguments, with defaults if not present:
    # pageSize       : the page size to be used with this buffer pool
    # poolSize       : the size of the buffer pool
    def __init__(self, **kwargs):
        self.pageSize = kwargs.get("pageSize", io.DEFAULT_BUFFER_SIZE)
        self.poolSize = kwargs.get("poolSize", BufferPool.defaultPoolSize)
        self.pool = io.BytesIO(b'\x00' * self.poolSize)

        ####################################################################################
        # DESIGN QUESTION: what other data structures do we need to keep in the buffer pool?

        # Buffer Pool Frames
        # Dictionary : offset -> (pId, pin number)
        # ? We can keep a backward mapping as well
        self.frames = {
            x: None
            for x in range(0, self.poolSize, self.pageSize)
        }
        self.backward = dict()

        # Use a queue to store freeList
        self.freeList = list(self.frames.keys())

        # Buffer Pool replacement queue ( Least Recently Used )
        self.replaceQ = OrderedDict()

    def setFileManager(self, fileMgr):
        self.fileMgr = fileMgr

    # Basic statistics

    def numPages(self):
        return math.floor(self.poolSize / self.pageSize)

    def numFreePages(self):
        return len(self.freeList)

    def size(self):
        return self.poolSize

    def freeSpace(self):
        return self.numFreePages() * self.pageSize

    def usedSpace(self):
        return self.size() - self.freeSpace()

    # Buffer pool operations

    def hasPage(self, pageId):
        return (pageId in self.backward)

    def getPage(self, pageId):

        if self.hasPage(pageId):
            # Here we only have one requestor, no need to pin the page
            # update replacement ordered dictionary
            self.replaceQ.move_to_end(pageId, last=True)

        else:
            # Cache miss
            if not self.freeList:
                self.evictPage()
                # Here we are not thinking of concurrency problem.
                # Then by the algorithm, now we should have 1 element in the freeList
                # read the page to pool buffer:
                buffer = self.pool.getbuffer()[self.freeList[0]:(
                    self.freeList[0] + self.pageSize)]
                page = self.fileMgr.readPage(pageId, buffer)
                # add page to our datastructure
                self.frames[self.freeList[0]] = page
                self.backward[pageId] = self.freeList[0]
                self.replaceQ.update({pageId: self.freeList[0]})
                self.freeList.pop(0)
            else:
                offset = self.freeList.pop(0)
                buffer = self.pool.getbuffer()[offset:(offset + self.pageSize)]
                page = self.fileMgr.readPage(pageId, buffer)
                # Maintain datastructures
                self.frames[offset] = page
                self.backward[pageId] = offset
                self.replaceQ.update({pageId: offset})

        # Overall we need to return
        return self.frames[self.backward[pageId]]

    # Removes a page from the page map, returning it to the free
    # page list without flushing the page to the disk.
    def discardPage(self, pageId):

        self.freeList.append(self.backward[pageId])
        self.backward.pop(pageId, None)
        self.replaceQ.pop(pageId)

    def flushPage(self, pageId):
        offset = self.backward[pageId]
        self.fileMgr.writePage(self.frames[offset])

    # Evict using LRU policy.
    # We implement LRU through the use of an OrderedDict, and by moving pages
    # to the end of the ordering every time it is accessed through getPage()
    def evictPage(self):

        (pageId, offset) = self.replaceQ.popitem(last=False)
        # Note that we don't need to check the pin of pageId here
        self.freeList.append(offset)
        if self.backward[pageId].isDirty():
            # We write the evicted page back to the disk;
            self.flushPage(pageId)
        # Maintain our data structure
        self.backward.pop(pageId, None)
Exemple #32
0
 def settings_form_fields(self):
     d = OrderedDict(
         list(super().settings_form_fields.items()) +
         list(BankTransfer.form_fields().items()) + [
             ('omit_hyphen',
              forms.BooleanField(label=_(
                  'Do not include a hypen in the payment reference.'),
                                 help_text=_(
                                     'This is required in some countries.'),
                                 required=False)),
         ])
     d.move_to_end('bank_details', last=False)
     d.move_to_end('bank_details_sepa_bank', last=False)
     d.move_to_end('bank_details_sepa_bic', last=False)
     d.move_to_end('bank_details_sepa_iban', last=False)
     d.move_to_end('bank_details_sepa_name', last=False)
     d.move_to_end('bank_details_type', last=False)
     d.move_to_end('ack', last=False)
     d.move_to_end('_enabled', last=False)
     return d
Exemple #33
0
class HistoryList(MyTreeView, AcceptFileDragDrop):
    filter_columns = [1, 2, 3]  # Date, Description, Amount
    TX_HASH_ROLE = Qt.UserRole
    SORT_ROLE = Qt.UserRole + 1

    def should_hide(self, proxy_row):
        if self.start_timestamp and self.end_timestamp:
            item = self.item_from_coordinate(proxy_row, 0)
            txid = item.data(self.TX_HASH_ROLE)
            date = self.transactions[txid]['date']
            if date:
                in_interval = self.start_timestamp <= date <= self.end_timestamp
                if not in_interval:
                    return True
            return False

    def __init__(self, parent=None):
        super().__init__(parent, self.create_menu, 2)
        self.std_model = QStandardItemModel(self)
        self.proxy = HistorySortModel(self)
        self.proxy.setSourceModel(self.std_model)
        self.setModel(self.proxy)

        self.txid_to_items = {}
        self.transactions = OrderedDict()
        self.summary = {}
        self.blue_brush = QBrush(QColor("#1E1EFF"))
        self.red_brush = QBrush(QColor("#BC1E1E"))
        self.monospace_font = QFont(MONOSPACE_FONT)
        self.config = parent.config
        AcceptFileDragDrop.__init__(self, ".txn")
        self.setSortingEnabled(True)
        self.start_timestamp = None
        self.end_timestamp = None
        self.years = []
        self.create_toolbar_buttons()

        self.wallet = self.parent.wallet  # type: Abstract_Wallet
        fx = self.parent.fx
        r = self.wallet.get_full_history(domain=self.get_domain(),
                                         from_timestamp=None,
                                         to_timestamp=None,
                                         fx=fx)
        self.transactions.update([(x['txid'], x) for x in r['transactions']])
        self.summary = r['summary']
        if not self.years and self.transactions:
            start_date = next(iter(
                self.transactions.values())).get('date') or date.today()
            end_date = next(iter(reversed(
                self.transactions.values()))).get('date') or date.today()
            self.years = [
                str(i) for i in range(start_date.year, end_date.year + 1)
            ]
            self.period_combo.insertItems(1, self.years)
        if fx: fx.history_used_spot = False
        self.refresh_headers()
        for tx_item in self.transactions.values():
            self.insert_tx(tx_item)
        self.sortByColumn(0, Qt.AscendingOrder)

    #def on_activated(self, idx: QModelIndex):
    #    # TODO use siblingAtColumn when min Qt version is >=5.11
    #    self.edit(idx.sibling(idx.row(), 2))

    def format_date(self, d):
        return str(datetime.date(d.year, d.month, d.day)) if d else _('None')

    def refresh_headers(self):
        headers = ['', _('Date'), _('Description'), _('Amount'), _('Balance')]
        fx = self.parent.fx
        if fx and fx.show_history():
            headers.extend(['%s ' % fx.ccy + _('Value')])
            self.editable_columns |= {5}
            if fx.get_history_capital_gains_config():
                headers.extend(['%s ' % fx.ccy + _('Acquisition price')])
                headers.extend(['%s ' % fx.ccy + _('Capital Gains')])
        else:
            self.editable_columns -= {5}
        col_count = self.std_model.columnCount()
        diff = col_count - len(headers)
        if col_count > len(headers):
            if diff == 2:
                self.std_model.removeColumns(6, diff)
            else:
                assert diff in [1, 3]
                self.std_model.removeColumns(5, diff)
            for items in self.txid_to_items.values():
                while len(items) > col_count:
                    items.pop()
        elif col_count < len(headers):
            self.std_model.clear()
            self.txid_to_items.clear()
            self.transactions.clear()
            self.summary.clear()
        self.update_headers(headers, self.std_model)

    def get_domain(self):
        '''Replaced in address_dialog.py'''
        return self.wallet.get_addresses()

    def on_combo(self, x):
        s = self.period_combo.itemText(x)
        x = s == _('Custom')
        self.start_button.setEnabled(x)
        self.end_button.setEnabled(x)
        if s == _('All'):
            self.start_timestamp = None
            self.end_timestamp = None
            self.start_button.setText("-")
            self.end_button.setText("-")
        else:
            try:
                year = int(s)
            except:
                return
            self.start_timestamp = start_date = datetime.datetime(year, 1, 1)
            self.end_timestamp = end_date = datetime.datetime(year + 1, 1, 1)
            self.start_button.setText(
                _('From') + ' ' + self.format_date(start_date))
            self.end_button.setText(_('To') + ' ' + self.format_date(end_date))
        self.hide_rows()

    def create_toolbar_buttons(self):
        self.period_combo = QComboBox()
        self.start_button = QPushButton('-')
        self.start_button.pressed.connect(self.select_start_date)
        self.start_button.setEnabled(False)
        self.end_button = QPushButton('-')
        self.end_button.pressed.connect(self.select_end_date)
        self.end_button.setEnabled(False)
        self.period_combo.addItems([_('All'), _('Custom')])
        self.period_combo.activated.connect(self.on_combo)

    def get_toolbar_buttons(self):
        return self.period_combo, self.start_button, self.end_button

    def on_hide_toolbar(self):
        self.start_timestamp = None
        self.end_timestamp = None
        self.hide_rows()

    def save_toolbar_state(self, state, config):
        config.set_key('show_toolbar_history', state)

    def select_start_date(self):
        self.start_timestamp = self.select_date(self.start_button)
        self.hide_rows()

    def select_end_date(self):
        self.end_timestamp = self.select_date(self.end_button)
        self.hide_rows()

    def select_date(self, button):
        d = WindowModalDialog(self, _("Select date"))
        d.setMinimumSize(600, 150)
        d.date = None
        vbox = QVBoxLayout()

        def on_date(date):
            d.date = date

        cal = QCalendarWidget()
        cal.setGridVisible(True)
        cal.clicked[QDate].connect(on_date)
        vbox.addWidget(cal)
        vbox.addLayout(Buttons(OkButton(d), CancelButton(d)))
        d.setLayout(vbox)
        if d.exec_():
            if d.date is None:
                return None
            date = d.date.toPyDate()
            button.setText(self.format_date(date))
            return datetime.datetime(date.year, date.month, date.day)

    def show_summary(self):
        h = self.summary
        if not h:
            self.parent.show_message(_("Nothing to summarize."))
            return
        start_date = h.get('start_date')
        end_date = h.get('end_date')
        format_amount = lambda x: self.parent.format_amount(
            x.value) + ' ' + self.parent.base_unit()
        d = WindowModalDialog(self, _("Summary"))
        d.setMinimumSize(600, 150)
        vbox = QVBoxLayout()
        grid = QGridLayout()
        grid.addWidget(QLabel(_("Start")), 0, 0)
        grid.addWidget(QLabel(self.format_date(start_date)), 0, 1)
        grid.addWidget(QLabel(str(h.get('start_fiat_value')) + '/BTC'), 0, 2)
        grid.addWidget(QLabel(_("Initial balance")), 1, 0)
        grid.addWidget(QLabel(format_amount(h['start_balance'])), 1, 1)
        grid.addWidget(QLabel(str(h.get('start_fiat_balance'))), 1, 2)
        grid.addWidget(QLabel(_("End")), 2, 0)
        grid.addWidget(QLabel(self.format_date(end_date)), 2, 1)
        grid.addWidget(QLabel(str(h.get('end_fiat_value')) + '/BTC'), 2, 2)
        grid.addWidget(QLabel(_("Final balance")), 4, 0)
        grid.addWidget(QLabel(format_amount(h['end_balance'])), 4, 1)
        grid.addWidget(QLabel(str(h.get('end_fiat_balance'))), 4, 2)
        grid.addWidget(QLabel(_("Income")), 5, 0)
        grid.addWidget(QLabel(format_amount(h.get('income'))), 5, 1)
        grid.addWidget(QLabel(str(h.get('fiat_income'))), 5, 2)
        grid.addWidget(QLabel(_("Expenditures")), 6, 0)
        grid.addWidget(QLabel(format_amount(h.get('expenditures'))), 6, 1)
        grid.addWidget(QLabel(str(h.get('fiat_expenditures'))), 6, 2)
        grid.addWidget(QLabel(_("Capital gains")), 7, 0)
        grid.addWidget(QLabel(str(h.get('capital_gains'))), 7, 2)
        grid.addWidget(QLabel(_("Unrealized gains")), 8, 0)
        grid.addWidget(QLabel(str(h.get('unrealized_gains', ''))), 8, 2)
        vbox.addLayout(grid)
        vbox.addLayout(Buttons(CloseButton(d)))
        d.setLayout(vbox)
        d.exec_()

    def plot_history_dialog(self):
        if plot_history is None:
            self.parent.show_message(
                _("Can't plot history.") + '\n' +
                _("Perhaps some dependencies are missing...") +
                " (matplotlib?)")
            return
        try:
            plt = plot_history(list(self.transactions.values()))
            plt.show()
        except NothingToPlotException as e:
            self.parent.show_message(str(e))

    def insert_tx(self, tx_item):
        fx = self.parent.fx
        tx_hash = tx_item['txid']
        height = tx_item['height']
        conf = tx_item['confirmations']
        timestamp = tx_item['timestamp']
        value = tx_item['value'].value
        balance = tx_item['balance'].value
        label = tx_item['label']
        tx_mined_status = TxMinedStatus(height, conf, timestamp, None)
        status, status_str = self.wallet.get_tx_status(tx_hash,
                                                       tx_mined_status)
        has_invoice = self.wallet.invoices.paid.get(tx_hash)
        v_str = self.parent.format_amount(value,
                                          is_diff=True,
                                          whitespaces=True)
        balance_str = self.parent.format_amount(balance, whitespaces=True)
        entry = ['', status_str, label, v_str, balance_str]
        item = [QStandardItem(e) for e in entry]
        item[3].setData(value, self.SORT_ROLE)
        item[4].setData(balance, self.SORT_ROLE)
        if has_invoice:
            item[2].setIcon(self.icon_cache.get(":icons/seal"))
        for i in range(len(entry)):
            self.set_item_properties(item[i], i, tx_hash)
        if value and value < 0:
            item[2].setForeground(self.red_brush)
            item[3].setForeground(self.red_brush)
        self.txid_to_items[tx_hash] = item
        self.update_item(tx_hash, self.wallet.get_tx_height(tx_hash))
        source_row_idx = self.std_model.rowCount()
        self.std_model.insertRow(source_row_idx, item)
        new_idx = self.std_model.index(source_row_idx, 0)
        history = fx.show_history()
        if history:
            self.update_fiat(tx_hash, tx_item)
        self.hide_row(self.proxy.mapFromSource(new_idx).row())

    def set_item_properties(self, item, i, tx_hash):
        if i > 2:
            item.setTextAlignment(Qt.AlignRight | Qt.AlignVCenter)
        if i != 1:
            item.setFont(self.monospace_font)
        item.setEditable(i in self.editable_columns)
        item.setData(tx_hash, self.TX_HASH_ROLE)

    def ensure_fields_available(self, items, idx, txid):
        while len(items) < idx + 1:
            row = list(self.transactions.keys()).index(txid)
            qidx = self.std_model.index(row, len(items))
            assert qidx.isValid(), (self.std_model.columnCount(), idx)
            item = self.std_model.itemFromIndex(qidx)
            self.set_item_properties(item, len(items), txid)
            items.append(item)

    @profiler
    def update(self):
        fx = self.parent.fx
        r = self.wallet.get_full_history(domain=self.get_domain(),
                                         from_timestamp=None,
                                         to_timestamp=None,
                                         fx=fx)
        seen = set()
        history = fx.show_history()
        tx_list = list(self.transactions.values())
        if r['transactions'] == tx_list:
            return
        if r['transactions'][:-1] == tx_list:
            print_error('history_list: one new transaction')
            row = r['transactions'][-1]
            txid = row['txid']
            if txid not in self.transactions:
                self.transactions[txid] = row
                self.transactions.move_to_end(txid, last=True)
                self.insert_tx(row)
                return
            else:
                print_error(
                    'history_list: tx added but txid is already in list (weird), txid: ',
                    txid)
        for idx, row in enumerate(r['transactions']):
            txid = row['txid']
            seen.add(txid)
            if txid not in self.transactions:
                self.transactions[txid] = row
                self.transactions.move_to_end(txid, last=True)
                self.insert_tx(row)
                continue
            old = self.transactions[txid]
            if old == row:
                continue
            self.update_item(txid, self.wallet.get_tx_height(txid))
            if history:
                self.update_fiat(txid, row)
            balance_str = self.parent.format_amount(row['balance'].value,
                                                    whitespaces=True)
            self.txid_to_items[txid][4].setText(balance_str)
            self.txid_to_items[txid][4].setData(row['balance'].value,
                                                self.SORT_ROLE)
            old.clear()
            old.update(**row)
        removed = 0
        l = list(enumerate(self.transactions.keys()))
        for idx, txid in l:
            if txid not in seen:
                del self.transactions[txid]
                del self.txid_to_items[txid]
                items = self.std_model.takeRow(idx - removed)
                removed_txid = items[0].data(self.TX_HASH_ROLE)
                assert removed_txid == txid, (idx, removed)
                removed += 1
        self.apply_filter()

    def update_fiat(self, txid, row):
        cap_gains = self.parent.fx.get_history_capital_gains_config()
        items = self.txid_to_items[txid]
        self.ensure_fields_available(items, 7 if cap_gains else 5, txid)
        if not row['fiat_default'] and row['fiat_value']:
            items[5].setForeground(self.blue_brush)
        value_str = self.parent.fx.format_fiat(row['fiat_value'].value)
        items[5].setText(value_str)
        items[5].setData(row['fiat_value'].value, self.SORT_ROLE)
        # fixme: should use is_mine
        if row['value'].value < 0 and cap_gains:
            acq = row['acquisition_price'].value
            items[6].setText(self.parent.fx.format_fiat(acq))
            items[6].setData(acq, self.SORT_ROLE)
            cg = row['capital_gain'].value
            items[7].setText(self.parent.fx.format_fiat(cg))
            items[7].setData(cg, self.SORT_ROLE)

    def update_on_new_fee_histogram(self):
        pass
        # TODO update unconfirmed tx'es

    def on_edited(self, index, user_role, text):
        row, column = index.row(), index.column()
        item = self.item_from_coordinate(row, column)
        key = item.data(self.TX_HASH_ROLE)
        # fixme
        if column == 2:
            self.wallet.set_label(key, text)
            self.update_labels()
            self.parent.update_completions()
        elif column == 5:
            tx_item = self.transactions[key]
            self.wallet.set_fiat_value(key, self.parent.fx.ccy, text,
                                       self.parent.fx, tx_item['value'].value)
            value = tx_item['value'].value
            if value is not None:
                fee = tx_item['fee']
                fiat_fields = self.wallet.get_tx_item_fiat(
                    key, value, self.parent.fx, fee.value if fee else None)
                tx_item.update(fiat_fields)
                self.update_fiat(key, tx_item)
        else:
            assert False

    def mouseDoubleClickEvent(self, event: QMouseEvent):
        idx = self.indexAt(event.pos())
        item = self.item_from_coordinate(idx.row(), idx.column())
        if not item or item.isEditable():
            super().mouseDoubleClickEvent(event)
        elif item:
            tx_hash = item.data(self.TX_HASH_ROLE)
            self.show_transaction(tx_hash)

    def show_transaction(self, tx_hash):
        tx = self.wallet.transactions.get(tx_hash)
        if not tx:
            return
        label = self.wallet.get_label(
            tx_hash
        ) or None  # prefer 'None' if not defined (force tx dialog to hide Description field if missing)
        self.parent.show_transaction(tx, label)

    def update_labels(self):
        root = self.std_model.invisibleRootItem()
        child_count = root.rowCount()
        for i in range(child_count):
            item = root.child(i, 2)
            txid = item.data(self.TX_HASH_ROLE)
            label = self.wallet.get_label(txid)
            item.setText(label)

    def update_item(self, tx_hash, tx_mined_status):
        conf = tx_mined_status.conf
        status, status_str = self.wallet.get_tx_status(tx_hash,
                                                       tx_mined_status)
        icon = self.icon_cache.get(":icons/" + TX_ICONS[status])
        if tx_hash not in self.txid_to_items:
            return
        items = self.txid_to_items[tx_hash]
        items[0].setIcon(icon)
        items[0].setToolTip(
            str(conf) + _(" confirmation" + ("s" if conf != 1 else "")))
        items[0].setData((status, conf), self.SORT_ROLE)
        items[1].setText(status_str)

    def create_menu(self, position: QPoint):
        org_idx: QModelIndex = self.indexAt(position)
        idx = self.proxy.mapToSource(org_idx)
        item: QStandardItem = self.std_model.itemFromIndex(idx)
        assert item, 'create_menu: index not found in model'
        tx_hash = idx.data(self.TX_HASH_ROLE)
        column = idx.column()
        assert tx_hash, "create_menu: no tx hash"
        tx = self.wallet.transactions.get(tx_hash)
        assert tx, "create_menu: no tx"
        if column == 0:
            column_title = _('Transaction ID')
            column_data = tx_hash
        else:
            column_title = self.std_model.horizontalHeaderItem(column).text()
            column_data = item.text()
        tx_URL = block_explorer_URL(self.config, 'tx', tx_hash)
        height = self.wallet.get_tx_height(tx_hash).height
        is_relevant, is_mine, v, fee = self.wallet.get_wallet_delta(tx)
        is_unconfirmed = height <= 0
        pr_key = self.wallet.invoices.paid.get(tx_hash)
        menu = QMenu()
        if height == TX_HEIGHT_LOCAL:
            menu.addAction(_("Remove"), lambda: self.remove_local_tx(tx_hash))
        menu.addAction(
            _("Copy {}").format(column_title),
            lambda: self.parent.app.clipboard().setText(column_data))
        for c in self.editable_columns:
            label = self.std_model.horizontalHeaderItem(c).text()
            # TODO use siblingAtColumn when min Qt version is >=5.11
            persistent = QPersistentModelIndex(
                org_idx.sibling(org_idx.row(), c))
            menu.addAction(_("Edit {}").format(label),
                           lambda p=persistent: self.edit(QModelIndex(p)))
        menu.addAction(_("Details"), lambda: self.show_transaction(tx_hash))
        if is_unconfirmed and tx:
            # note: the current implementation of RBF *needs* the old tx fee
            rbf = is_mine and not tx.is_final() and fee is not None
            if rbf:
                menu.addAction(_("Increase fee"),
                               lambda: self.parent.bump_fee_dialog(tx))
            else:
                child_tx = self.wallet.cpfp(tx, 0)
                if child_tx:
                    menu.addAction(_("Child pays for parent"),
                                   lambda: self.parent.cpfp(tx, child_tx))
        if pr_key:
            menu.addAction(self.icon_cache.get(":icons/seal"),
                           _("View invoice"),
                           lambda: self.parent.show_invoice(pr_key))
        if tx_URL:
            menu.addAction(_("View on block explorer"),
                           lambda: webbrowser.open(tx_URL))
        menu.exec_(self.viewport().mapToGlobal(position))

    def remove_local_tx(self, delete_tx):
        to_delete = {delete_tx}
        to_delete |= self.wallet.get_depending_transactions(delete_tx)
        question = _("Are you sure you want to remove this transaction?")
        if len(to_delete) > 1:
            question = _(
                "Are you sure you want to remove this transaction and {} child transactions?"
                .format(len(to_delete) - 1))
        answer = QMessageBox.question(self.parent, _("Please confirm"),
                                      question, QMessageBox.Yes,
                                      QMessageBox.No)
        if answer == QMessageBox.No:
            return
        for tx in to_delete:
            self.wallet.remove_transaction(tx)
        self.wallet.save_transactions(write=True)
        # need to update at least: history_list, utxo_list, address_list
        self.parent.need_update.set()

    def onFileAdded(self, fn):
        try:
            with open(fn) as f:
                tx = self.parent.tx_from_text(f.read())
                self.parent.save_transaction_into_wallet(tx)
        except IOError as e:
            self.parent.show_error(e)

    def export_history_dialog(self):
        d = WindowModalDialog(self, _('Export History'))
        d.setMinimumSize(400, 200)
        vbox = QVBoxLayout(d)
        defaultname = os.path.expanduser('~/electrum-history.csv')
        select_msg = _('Select file to export your wallet transactions to')
        hbox, filename_e, csv_button = filename_field(self, self.config,
                                                      defaultname, select_msg)
        vbox.addLayout(hbox)
        vbox.addStretch(1)
        hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
        vbox.addLayout(hbox)
        #run_hook('export_history_dialog', self, hbox)
        self.update()
        if not d.exec_():
            return
        filename = filename_e.text()
        if not filename:
            return
        try:
            self.do_export_history(filename, csv_button.isChecked())
        except (IOError, os.error) as reason:
            export_error_label = _(
                "Electrum was unable to produce a transaction export.")
            self.parent.show_critical(export_error_label + "\n" + str(reason),
                                      title=_("Unable to export history"))
            return
        self.parent.show_message(
            _("Your wallet history has been successfully exported."))

    def do_export_history(self, file_name, is_csv):
        history = self.transactions.values()
        lines = []
        if is_csv:
            for item in history:
                lines.append([
                    item['txid'],
                    item.get('label', ''), item['confirmations'],
                    item['value'],
                    item.get('fiat_value', ''),
                    item.get('fee', ''),
                    item.get('fiat_fee', ''), item['date']
                ])
        with open(file_name, "w+", encoding='utf-8') as f:
            if is_csv:
                import csv
                transaction = csv.writer(f, lineterminator='\n')
                transaction.writerow([
                    "transaction_hash", "label", "confirmations", "value",
                    "fiat_value", "fee", "fiat_fee", "timestamp"
                ])
                for line in lines:
                    transaction.writerow(line)
            else:
                from electrum.util import json_encode
                f.write(json_encode(history))
Exemple #34
0
class ThekeHistoryBar(Gtk.ButtonBox):
    __gtype_name__ = "ThekeHistoryBar"

    _home_button = Gtk.Template.Child()

    def __init__(self):
        super().__init__()
        self.on_button_clicked = None

        self.history = OrderedDict()
        self.clipboard = Gtk.Clipboard.get(Gdk.SELECTION_CLIPBOARD)

        # Menu appearing right clicking on a button
        self.button_right_click_menu = Gtk.Menu()
        self.menu_copy_uri_to_clipboard = Gtk.MenuItem("Copier l'uri")
        self.menu_copy_uri_to_clipboard.connect(
            'activate', self.handle_menu_copy_uri_to_clipboard)

        self.button_right_click_menu.append(self.menu_copy_uri_to_clipboard)
        self.menu_copy_uri_to_clipboard.show()

        # Set the home button
        self._home_button.set_tooltip_text(
            theke.uri.inAppURI['welcome'].shortTitle)
        self._home_button.uri = home_uri

    def set_button_clicked_callback(self, on_button_clicked_callback):
        self.on_button_clicked = on_button_clicked_callback
        self._home_button.connect('clicked', self.on_button_clicked)

    def add_uri_to_history(self, label, uri):
        """Add an uri to the HistoryBar.

        @param label: (string) label to print in the HistoryBar
        @param uri: (ThekeUri) Uri of the new item
        """

        if uri == home_uri:
            # The home uri is not added to the history as it is alway here.
            return

        try:
            historyIndex = list(self.history.keys()).index(label)
            button = self.get_children()[historyIndex + 1]

            # The visited uri is already in the history,
            # move its button at the end of the bar
            self.history.move_to_end(label)
            self.reorder_child(button, -1)

            # Update the uri
            # (necessary if, for example, sources changed)
            self.history[label] = uri
            button.uri = uri
            button.set_tooltip_text(str(uri))

        except ValueError:
            # This uri does not exist in the history
            if len(self.history) >= MAX_NUMBER_OF_BUTTONS:
                self.history.popitem(last=False)
                self.remove(self.get_children()[1])

            self.history[label] = uri

            button = Gtk.Button(label=label, use_underline=False)
            button.uri = uri
            button.set_tooltip_text(str(uri))

            button.connect('button-release-event', self.on_button_release)
            button.connect('clicked', self.on_button_clicked)
            button.show_all()

            self.pack_start(button, False, False, 0)

    def save_scrolled_value(self, label, value) -> None:
        """Save the scrolled value of the current document view
        """
        try:
            historyIndex = list(self.history.keys()).index(label)
            button = self.get_children()[historyIndex + 1]
            button.scrolledValue = value

        except ValueError:
            # This label does not exist in the history
            pass

    def get_scrolled_value(self, label) -> int:
        """Return the scrolled value save in the entry of the history
        """
        try:
            historyIndex = list(self.history.keys()).index(label)
            button = self.get_children()[historyIndex + 1]
            return button.scrolledValue

        except ValueError:
            return 0

        except AttributeError:
            return 0

    def on_button_release(self, button, event):
        if event.type == Gdk.EventType.BUTTON_RELEASE:
            if event.button == 3:  # Right click
                self.button_right_click_menu.popup_at_widget(
                    button, Gdk.Gravity.SOUTH_WEST, Gdk.Gravity.NORTH_WEST,
                    None)
                self.menu_copy_uri_to_clipboard.uri = button.uri
                return True
            else:
                return False
        return False

    def handle_menu_copy_uri_to_clipboard(self, menu_item):
        self.clipboard.set_text(menu_item.uri.get_encoded_URI(), -1)
Exemple #35
0
class ArgumentCache(object):
    """
    >>> cache = ArgumentCache()
    >>> "foo" in cache
    False
    >>> cache['foo']
    Traceback (most recent call last):
        ...
    KeyError: 'foo'
    >>> len(cache)
    0
    >>> key = cache.add("Hello, world!")
    >>> key
    'bea2c9d7fd040292e0424938af39f7d6334e8d8a'
    >>> cache[key]
    'Hello, world!'
    >>> key in cache
    True
    >>> len(cache)
    1
    >>> cache.get_missing([
    ...    ('bar', key),
    ...    ('baz', '1111111111111111111111111111111111111111'),
    ... ])
    ['baz']
    >>> cache.add_many(['value1', 'value2'])
    ['daf626c4ebd6bdd697e043111454304e5fb1459e', '849988af22dbd04d3e353caf77f9d81241ca9ee2']
    >>> cache['daf626c4ebd6bdd697e043111454304e5fb1459e']
    'value1'
    >>> cache['849988af22dbd04d3e353caf77f9d81241ca9ee2']
    'value2'
    >>> cache[key]
    'Hello, world!'
    >>> len(cache)
    3
    >>> cache.clear()
    >>> len(cache)
    0

    Size of ArgumentCache can be limited:

    >>> cache = ArgumentCache(0)
    Traceback (most recent call last):
        ...
    ValueError: maxsize must be greater than 0
    >>> cache = ArgumentCache(2)  # limit it to 2 elements
    >>> cache.add_many(['value1', 'value2'])
    ['daf626c4ebd6bdd697e043111454304e5fb1459e', '849988af22dbd04d3e353caf77f9d81241ca9ee2']
    >>> len(cache)
    2
    >>> cache.add("Hello, world!")
    'bea2c9d7fd040292e0424938af39f7d6334e8d8a'
    >>> len(cache)
    2
    >>> cache["bea2c9d7fd040292e0424938af39f7d6334e8d8a"]
    'Hello, world!'
    >>> cache['849988af22dbd04d3e353caf77f9d81241ca9ee2']
    'value2'
    >>> cache['daf626c4ebd6bdd697e043111454304e5fb1459e']
    Traceback (most recent call last):
        ...
    KeyError: 'daf626c4ebd6bdd697e043111454304e5fb1459e'
    >>> cache.add("foo")
    'd465e627f9946f2fa0d2dc0fc04e5385bc6cd46d'
    >>> len(cache)
    2
    >>> 'bea2c9d7fd040292e0424938af39f7d6334e8d8a' in cache
    False
    """
    def __init__(self, maxsize=None):
        if maxsize is None:
            maxsize = float("+inf")
        if maxsize <= 0:
            raise ValueError("maxsize must be greater than 0")
        self.maxsize = maxsize
        self._values = OrderedDict()

    def add(self, value):
        key = self.get_key(value)
        if key in self._values:
            del self._values[key]
        else:
            while len(self._values) >= self.maxsize:
                self._values.popitem(last=False)
        self._values[key] = value
        return key

    def __getitem__(self, key):
        self._values.move_to_end(key)
        return self._values[key]

    def __contains__(self, key):
        return key in self._values

    def __len__(self):
        return len(self._values)

    def clear(self):
        self._values.clear()

    def get_missing(self, items):
        return [name for name, key in items if key not in self]

    def add_many(self, values):
        """
        Add all values from ``values`` list to cache. Return a list of keys.
        """
        return [self.add(value) for value in values]

    @classmethod
    def get_key(cls, value):
        value_json = json.dumps(value, sort_keys=True, ensure_ascii=False)
        return hashlib.sha1(value_json.encode('utf8')).hexdigest()
Exemple #36
0
class LRUCache(MutableMapping[K, V]):
    """Thread-safe LRUCache based on an OrderedDict.

    All dict operations (__getitem__, __setitem__, __contains__) update the
    priority of the relevant key and take O(1) time. The dict is iterated over
    in order from the oldest to newest key, which means that a complete pass
    over the dict should not affect the order of any entries.

    When a new item is set and the maximum size of the cache is exceeded, the
    oldest item is dropped and called with ``on_evict(key, value)``.

    The ``maxsize`` property can be used to view or adjust the capacity of
    the cache, e.g., ``cache.maxsize = new_size``.
    """

    _cache: "OrderedDict[K, V]"
    _maxsize: int
    _lock: threading.RLock
    _on_evict: Optional[Callable[[K, V], Any]]

    __slots__ = ("_cache", "_lock", "_maxsize", "_on_evict")

    def __init__(self, maxsize: int, on_evict: Callable[[K, V], Any] = None):
        """
        Parameters
        ----------
        maxsize : int
            Integer maximum number of items to hold in the cache.
        on_evict: callable, optional
            Function to call like ``on_evict(key, value)`` when items are
            evicted.
        """
        if not isinstance(maxsize, int):
            raise TypeError("maxsize must be an integer")
        if maxsize < 0:
            raise ValueError("maxsize must be non-negative")
        self._maxsize = maxsize
        self._cache = OrderedDict()
        self._lock = threading.RLock()
        self._on_evict = on_evict

    def __getitem__(self, key: K) -> V:
        # record recent use of the key by moving it to the front of the list
        with self._lock:
            value = self._cache[key]
            self._cache.move_to_end(key)
            return value

    def _enforce_size_limit(self, capacity: int) -> None:
        """Shrink the cache if necessary, evicting the oldest items.
        """
        while len(self._cache) > capacity:
            key, value = self._cache.popitem(last=False)
            if self._on_evict is not None:
                self._on_evict(key, value)

    def __setitem__(self, key: K, value: V) -> None:
        with self._lock:
            if key in self._cache:
                # insert the new value at the end
                del self._cache[key]
                self._cache[key] = value
            elif self._maxsize:
                # make room if necessary
                self._enforce_size_limit(self._maxsize - 1)
                self._cache[key] = value
            elif self._on_evict is not None:
                # not saving, immediately evict
                self._on_evict(key, value)

    def __delitem__(self, key: K) -> None:
        del self._cache[key]

    def __iter__(self) -> Iterator[K]:
        # create a list, so accessing the cache during iteration cannot change
        # the iteration order
        return iter(list(self._cache))

    def __len__(self) -> int:
        return len(self._cache)

    @property
    def maxsize(self) -> int:
        """Maximum number of items can be held in the cache."""
        return self._maxsize

    @maxsize.setter
    def maxsize(self, size: int) -> None:
        """Resize the cache, evicting the oldest items if necessary."""
        if size < 0:
            raise ValueError("maxsize must be non-negative")
        with self._lock:
            self._enforce_size_limit(size)
            self._maxsize = size
Exemple #37
0
class GPGDatabase(object):

    _resources = None

    def __init__(self):
        self._resources = OrderedDict()

    def load_default_resources(self, force=False, read_only=False):
        for (filename, secret) in DEFAULT_RESOURCES:
            self.add_resource(filename, force=force, primary=False,
                              default=True, read_only=read_only,
                              secret=secret)

    def add_resource(self, filename, force=False, primary=False,
                     default=False, read_only=False, secret=False):
        resource = get_resource(filename, force, secret, read_only, default)
        resource = self.register_resource(resource.filename, resource, primary)
        return resource

    def add_key(self, key):
        if isinstance(key, TransferablePublicKey):
            for resource in self._resources.values():
                if not resource.secret:
                    resource.add_transferrable_key(key)
                    break
        elif isinstance(key, TransferableSecretKey):
            for resource in self._resources.values():
                if resource.secret:
                    resource.add_transferrable_key(key)
                    break
        else:
            raise TypeError

    def delete_key(self, key):
        if isinstance(key, TransferablePublicKey):
            for resource in self._resources.values():
                if not resource.secret:
                    resource.delete_transferrable_key(key)
                    break
        elif isinstance(key, TransferableSecretKey):
            for resource in self._resources.values():
                if resource.secret:
                    resource.delete_transferrable_key(key)
                    break
        else:
            raise TypeError

    def update_key(self, key):
        if isinstance(key, TransferablePublicKey):
            for resource in self._resources.values():
                if not resource.secret:
                    resource.update_transferrable_key(key)
                    break
        elif isinstance(key, TransferableSecretKey):
            for resource in self._resources.values():
                if resource.secret:
                    resource.update_transferrable_key(key)
                    break
        else:
            raise TypeError

    def register_resource(self, name, resource, primary):
        resource = self._resources.setdefault(name, resource)
        if primary:
            self._resources.move_to_end(name, last=False)
        return resource

    def _matches_user_id(self, key, user_id):
        match = False
        for uid in key.user_ids:
            if user_id.lower() in uid.user_id.lower():
                match = True
                break
        return match

    def keys(self):
        for resource in self._resources.values():
            for item in resource.keys():
                yield item

    def search(self, fingerprint=None, key_id=None, user_id=None):
        results = []
        if fingerprint is None and key_id is None and user_id is None:
            return results
        for resource in self._resources.values():
            if fingerprint or key_id:
                try:
                    key = resource.get_transferrable_key(fingerprint or key_id)
                except KeyError:
                    continue
                if user_id is not None:
                    if self._matches_user_id(key, user_id):
                        results.append(key)
                else:
                    results.append(key)
            else:
                # User ID only. Be really dumb and iterate.
                for key in resource.values():
                    if self._matches_user_id(key, user_id):
                        results.append(key)
        return results
class ProcessManager(object):
    def __init__(self,
                 label='default',
                 ros_host='localhost',
                 ros_port=11311,
                 gazebo_host='localhost',
                 gazebo_port=11345,
                 output_log_dir=None):
        assert isinstance(label, str)
        assert len(label) > 0

        self.__LABEL = label
        self._logger = create_logger(name='process_manager',
                                     output_dir=output_log_dir)
        self._log_dir = get_log_dir() \
            if output_log_dir is None else output_log_dir

        if not os.path.isdir(self._log_dir):
            os.makedirs(self._log_dir)

        self._ros_config = ROSConfig(ros_host=ros_host,
                                     ros_port=ros_port,
                                     gazebo_host=gazebo_host,
                                     gazebo_port=gazebo_port)

        self._tasks = dict()
        self._logger.info('Simulation manager')

        self._stages = OrderedDict()
        # Per default, roscore is always the first stage
        self._stages['roscore'] = Stage('roscore')
        # Pre-simulation tasks must come after roscore, the list can be empty
        self._stages['pre-simulation'] = Stage('pre-simulation')
        # All tasks with no specified stage will be executed last
        self._current_stage = None
        # True if run_all_tasks is called
        self._is_running = Event()
        self._is_running.clear()
        # Run all tasks thread
        self._run_tasks_thread = None

        self._recording_filenames = list()

        try:
            signal.signal(signal.SIGTERM, self._signal_handler)
            signal.signal(signal.SIGINT, self._signal_handler)
        except ValueError as ex:
            self._logger.error(
                'Failed to link signal handler, message={}'.format(ex))

    def __del__(self):
        self._logger.info('Killing process manager and all its tasks...')
        for task in self._tasks:
            if self.is_task_running(task):
                self._logger.info('Task <{}> still running'.format(task))
                self.kill_task(task)
                self._logger.info('Task <{}> killed'.format(task))
            else:
                self._logger.info('Task <{}> not running'.format(task))
        self._logger.info('Unlocking ports')
        self._logger.info(self._ros_config)
        self._ros_config.unlock_port(self._ros_config.ros_port)
        self._ros_config.unlock_port(self._ros_config.gazebo_port)

        if self._run_tasks_thread:
            self._run_tasks_thread.join(timeout=2)
            del self._run_tasks_thread

    @property
    def ros_config(self):
        return self._ros_config

    @property
    def stages(self):
        return self._stages

    @property
    def current_stage(self):
        return self._current_stage

    @property
    def recording_filenames(self):
        return self._recording_filenames

    @property
    def log_dir(self):
        return self._log_dir

    def _signal_handler(self, signal, handler):
        self._logger.warning('SIGNAL RECEIVED=%d', int(signal))
        self.__del__()

    def _task_terminated_callback(self, name):
        if name in self._tasks:
            if self._tasks[name].required:
                self._logger.warning('Required task <{}> has been terminated,'
                                     ' killing all tasks'.format(name))
                for task in self._tasks:
                    if task == name:
                        continue
                    self.kill_task(task)
                    self._logger.info('Task <{}> killed'.format(task))

    def _run_all_tasks(self):
        assert len(self._stages) > 0, 'No stages found'
        assert list(self._stages.keys())[0] == 'roscore', 'First stage must' \
            ' be roscore'
        # TODO: Set the task timeout as configurable
        timeout = 30
        self._is_running.clear()
        for idx, stage in zip(range(len(self._stages)), self._stages):
            self._logger.info('Starting #{} stage={}'.format(idx, stage))
            self._logger.info('  Tasks={}'.format(
                self._stages[stage].get_tasks()))
            self._current_stage = stage

            if self._stages[stage].test_start_condition():
                self._logger.info(
                    'Stage #{} {} starting conditions fulfilled'.format(
                        idx, stage))
            else:
                self._logger.info(
                    'Stage #{} {} starting conditions failed'.format(
                        idx, stage))
                self.kill_all_tasks()
                self._is_running.clear()
                break

            self._logger.info('Stage #{} {} - running pre-stage '
                              'functions'.format(idx, stage))
            if not self._stages[stage].run_pre_stage_fcns():
                self._logger.info(
                    'Stage #{} {} - error while running pre-stage'
                    ' functions'.format(idx, stage))
                self.kill_all_tasks()
                self._is_running.clear()
                break

            if self.run_stage(stage, timeout):
                self._logger.info('Stage #{} {} started successfully'.format(
                    idx, stage))
            else:
                self.kill_all_tasks()
                self._is_running.clear()
                break

            if self._stages[stage].test_end_condition():
                self._logger.info(
                    'Stage #{} {} end conditions fulfilled'.format(idx, stage))
            else:
                self._logger.info('Stage #{} {} end conditions failed'.format(
                    idx, stage))
                self.kill_all_tasks()
                self._is_running.clear()
                break

            self._logger.info('Stage #{} {}  - running post-stage '
                              'functions'.format(idx, stage))
            if not self._stages[stage].run_post_stage_fcns():
                self._logger.info(
                    'Stage #{} {}  - error while running post-stage'
                    ' functions'.format(idx, stage))
                self.kill_all_tasks()
                self._is_running.clear()
                break

        self._logger.info('Stage <{}>: ALL TASKS RUNNING'.format(stage))
        self._is_running.set()
        self._logger.info('run_all_tasks: finished')

    def is_running(self):
        return self._is_running.is_set()

    def is_roscore_running(self, timeout=30):
        from . import is_roscore_running
        start_time = time()
        while time() - start_time < timeout:
            if is_roscore_running(self._ros_config.ros_master_uri):
                return True
        return is_roscore_running(self._ros_config.ros_master_uri)

    def is_gazebo_running(self, timeout=30):
        from . import is_gazebo_running
        start_time = time()
        while time() - start_time < timeout:
            if is_gazebo_running(self._ros_config.ros_master_uri):
                return True
        return is_gazebo_running(self._ros_config.ros_master_uri)

    def has_topics(self, topics, timeout=30):
        assert isinstance(topics, list), 'Topics input must be a list'
        for topic in topics:
            assert isinstance(topic, str), 'Topic name must be a string'

        start_time = time()
        has_topics = False
        while time() - start_time < timeout:
            current_topics = self.get_rostopic_list()
            has_topics = True
            if current_topics is not None:
                for topic in topics:
                    if topic not in current_topics:
                        has_topics = False
                        break
                if has_topics:
                    return True
            else:
                has_topics = False
        return has_topics

    def set_rosparam(self, params):
        from . import set_rosparam
        if not self.is_roscore_running(timeout=0):
            self._logger.error(
                'roscore is not running! Cannot publish parameter')
            return False
        assert isinstance(params, dict), 'Parameter structure must be a dict'
        set_rosparam(params, self._ros_config.ros_master_uri)
        return True

    def get_rostopic_list(self):
        from . import get_rostopic_list
        return get_rostopic_list(self._ros_config.ros_master_uri)

    def has_services(self, services, timeout=30):
        from . import get_rosservice_list
        assert timeout > 0, 'Timeout must be greater than zero'
        assert isinstance(services, list), 'Services input must be a list'
        for serv in services:
            assert isinstance(serv, str), 'Service name must be a string'

        start_time = time()
        while time() - start_time < timeout:
            current_serv = get_rosservice_list(self._ros_config.ros_master_uri)
            has_services = True
            if current_serv is not None:
                for serv in services:
                    if serv not in current_serv:
                        has_services = False
                        break
                if has_services:
                    return True
            else:
                has_services = False
        return has_services

    def has_param(self, params, timeout=30):
        from . import get_rosparam_list
        assert timeout > 0, 'Timeout must be greater than zero'
        assert isinstance(params, list), 'Parameters input must be a list'
        for param in params:
            assert isinstance(param, str), 'Parameter name must be a string'

        start_time = time()
        while time() - start_time < timeout:
            current_params = get_rosparam_list(self._ros_config.ros_master_uri)
            has_params = True
            if current_params is not None:
                for param in params:
                    if param not in current_params:
                        has_params = False
                        break
                if has_params:
                    return True
            else:
                has_params = False
        return has_params

    def rosbags_exist(self, bags=list()):
        if len(self._recording_filenames) > 0:
            for rosbag_file in self._recording_filenames:
                if not os.path.isfile(rosbag_file):
                    return False
        if len(bags) > 0:
            for rosbag_file in bags:
                if not os.path.isfile(rosbag_file):
                    return False
        return True

    def run_stage(self, stage, timeout=30):
        from . import is_roscore_running
        assert stage in self._stages, 'Invalid stage name'

        # Test if there are any tasks to run
        # It the stage is a roscore stage, a roscore task will be
        # created if not already available
        if len(self._stages[stage].get_tasks()) == 0 and stage != 'roscore':
            self._logger.info('No tasks for stage <{}>, '
                              'skipping...'.format(stage))
            return True

        if stage == 'roscore':
            self.run_roscore()
            start_time = time()
            while time() - start_time < timeout:
                if is_roscore_running(self._ros_config.ros_master_uri):
                    self._logger.info('roscore is running')
                    return True
                self._logger.info('Waiting for roscore')
                sleep(0.5)
            if not is_roscore_running(self._ros_config.ros_master_uri):
                self._logger.info('roscore is NOT running')
                return False
        else:
            for task in self._stages[stage].get_tasks():
                self._logger.info('Starting task <{}>'.format(task))
                self._tasks[task].run()
            self._logger.info('Waiting for all tasks from stage {}'
                              ' to start...'.format(stage))
            start_time = time()
            while time() - start_time < timeout:
                running = [
                    self._tasks[name].is_running()
                    for name in self._stages[stage].get_tasks()
                ]
                if sum(running) == len(self._stages[stage].get_tasks()):
                    break
                self._logger.info('Waiting for tasks={}'.format(
                    self._stages[stage].get_tasks()))
                sleep(0.5)
            running = [
                self._tasks[name].is_running()
                for name in self._stages[stage].get_tasks()
            ]
            if sum(running) != len(self._stages[stage].get_tasks()):
                self._logger.error('Error! Not all tasks started. Killing '
                                   'remaining tasks')
                return False
            return True

    def get_tasks_from_stage(self, stage_name):
        if stage_name not in self._stages:
            return None
        return self._stages[stage_name].get_tasks()

    def get_gazebo_proxy(self):
        return GazeboProxy(ros_host=self.ros_config.ros_host,
                           ros_port=self.ros_config.ros_port,
                           gazebo_host=self.ros_config.gazebo_host,
                           gazebo_port=self.ros_config.gazebo_port,
                           timeout=30)

    def create_simulation_timer_task(self, simulation_timeout):
        task = TASK_SIMULATION_TIMER.copy()
        task['params']['timeout'] = simulation_timeout
        self._logger.info('Preparing simulation timer task')
        self._logger.info('Simulation timeout={}'.format(simulation_timeout))
        return self.init_task(**task)

    def create_recording_task(self,
                              filename,
                              topics,
                              stage='pre-simulation',
                              process_timeout=None):
        assert isinstance(topics, list), 'Topics must be given as a list'
        for topic in topics:
            assert isinstance(topic, str), 'Each topic must be a string'

        task = TASK_ROSBAG_RECORD_TOPICS.copy()
        task['required'] = False
        task['process_timeout'] = process_timeout
        task['params']['filename'] = filename
        task['params']['topics'] = ' '.join(['{}'] * len(topics))
        task['params']['topics'].format(*topics)

        if self.init_task(**task):
            self._recording_filenames.append(filename)
            return True
        else:
            return False

    def create_ros_core_task(self, process_timeout=None):
        if self.is_roscore_running(0):
            self._logger.warning('roscore is already running')
            return False
        task = TASK_ROS_CORE.copy()
        # roscore is required per default
        task['required'] = True
        task['process_timeout'] = process_timeout
        task['params']['port'] = self._ros_config.ros_port
        return self.init_task(**task)

    def create_gazebo_empty_world_task(self,
                                       required=False,
                                       process_timeout=None,
                                       simulation_timeout=None,
                                       gui=True,
                                       paused=False):
        assert isinstance(required, bool), 'Input flag required must be ' \
            'a boolean'
        task = TASK_GAZEBO_EMPTY_WORLD.copy()
        task['required'] = required
        task['process_timeout'] = process_timeout
        task['simulation_timeout'] = simulation_timeout
        task['params']['gui'] = gui
        task['params']['paused'] = paused
        return self.init_task(**task)

    def create_gazebo_task(self,
                           name='gazebo',
                           world='worlds/empty.world',
                           gui=True,
                           physics='ode',
                           paused=False,
                           required=False,
                           process_timeout=None,
                           simulation_timeout=None):

        world_file = world

        # Copy the empty world template
        task_description = TASK_GAZEBO_EMPTY_WORLD.copy()
        task_description['name'] = name
        task_description['params']['world_name'] = world_file
        task_description['params']['gui'] = gui
        task_description['params']['physics'] = physics
        task_description['params']['paused'] = paused
        task_description['required'] = required
        task_description['process_timeout'] = process_timeout
        task_description['simulation_timeout'] = simulation_timeout
        return self.init_task(**task_description)

    def create_rviz_task(self, required=False, process_timeout=None):
        task = TASK_RVIZ.copy()
        task['required'] = required
        task['process_timeout'] = process_timeout
        return self.init_task(**task)

    def create_rqt_task(self, required=False, process_timeout=None):
        task = TASK_RQT.copy()
        task['required'] = required
        task['process_timeout'] = process_timeout
        return self.init_task(**task)

    def add_stage(self, name):
        if name in self._stages:
            self._logger.error('Stage <{}> already exists'.format(name))
            return False
        self._stages[name] = Stage(name)
        if name == 'last-tasks':
            # Move last-tasks always to the end of the execution list
            if sys.version_info >= (3, 2):
                self._stages.move_to_end(name)
        return True

    def init_task(self,
                  name,
                  command,
                  has_gazebo,
                  type=None,
                  params=dict(),
                  required=False,
                  process_timeout=None,
                  simulation_timeout=None,
                  stage=None):
        self._logger.info('Initialize task, name={}, command={}'.format(
            name, command))
        if name in self._tasks:
            self._logger.error('Task {} already exists'.format(name))
            return False

        self._logger.info('Test if Gazebo is already running...')
        if has_gazebo and self.is_gazebo_running(0):
            self._logger.error('An instance of Gazebo is already running for'
                               ' network configuration=')
            self._logger.error(self._ros_config)
            return False

        if stage == 'roscore':
            self._logger.info('roscore stage')
            assert 'roscore' in command, 'For the roscore stage only the ' \
                'roscore command is allowed'
            if len(self._stages['roscore'].get_tasks()) == 0:
                self._stages['roscore'].add_task(name)
            else:
                self._logger.error('There is already a roscore task for the'
                                   ' roscore stage')
                return False
        elif stage in self._stages:
            self._logger.info('Stage <{}> already exists'.format(stage))
            if name in self._stages[stage].get_tasks():
                self._logger.error('Task <{}> already exists in stage '
                                   '<{}>'.format(name, stage))
                return False
            self._stages[stage].add_task(name)
        elif stage is None:
            # No stage provided
            stage = 'last-tasks'
            if stage not in self._stages:
                if not self.add_stage(stage):
                    return False
            if name in self._stages[stage].get_tasks():
                self._logger.error('Task <{}> already exists in stage '
                                   '<{}>'.format(name, stage))
                return False

            self._logger.info('Adding <{}> to stage <{}>'.format(name, stage))
            self._stages.get(stage).add_task(name)
        else:
            # Adding a new stage
            self._logger.info('Creating <{}> stage'.format(stage))
            if not self.add_stage(stage):
                return False
            if name in self._stages[stage].get_tasks():
                self._logger.error('Task <{}> already exists in stage '
                                   '<{}>'.format(name, stage))
                return False
            self._logger.info('Adding <{}> to stage <{}>'.format(stage, name))

            self._stages[stage].add_task(name)

        self._logger.info('<{}> task added to <{}> stage'.format(name, stage))

        # Setting a minimal process timeout in case none was provided
        if simulation_timeout is not None and process_timeout is None:
            process_timeout = 100 * simulation_timeout
        elif process_timeout is None:
            process_timeout = 1e4

        self._tasks[name] = Task(
            task_name=name,
            command=command,
            params=params,
            config=self._ros_config,
            has_gazebo=True,
            type=type,
            required=required,
            process_timeout=process_timeout,
            task_killed_callback=self._task_terminated_callback,
            stage=stage,
            output_log_dir=self._log_dir,
            simulation_timeout=simulation_timeout)

        if simulation_timeout is not None:
            if simulation_timeout > 0:
                if self.create_simulation_timer_task(simulation_timeout):
                    self._logger.info('Simulation timer created')
                else:
                    self._logger.warning('Could not create simulation timer')

        self._logger.info('Task <{}> created'.format(name))
        return True

    def run_roscore(self):
        if 'roscore' not in self._tasks:
            self._logger.info('Adding a roscore task')
            self.create_ros_core_task()
            self._logger.info('roscore task added')

        if 'roscore' in self._tasks:
            if not self._tasks['roscore'].is_running():
                self._tasks['roscore'].run()

        timeout = 30
        start_time = time()
        while time() - start_time < timeout:
            if self.is_roscore_running():
                break
        if not self.is_roscore_running():
            raise RuntimeError('roscore did not start')

    def run_task(self, task_name):
        self._is_running.set()
        self.run_roscore()

        if task_name in self._tasks:
            if task_name != 'roscore':
                self._tasks[task_name].run()
                if 'simulation_timer' in self._tasks and \
                        self._tasks[task_name].has_gazebo:
                    self._tasks['simulation_timer'].run()
        self._is_running.clear()

    def run_all_tasks(self):
        if self._run_tasks_thread is not None:
            if self._run_tasks_thread.is_alive():
                self._logger.error('Run all tasks thread is still running')
                return False
            else:
                del self._run_tasks_thread
        self._run_tasks_thread = Thread(target=self._run_all_tasks)
        self._run_tasks_thread.daemon = True
        self._run_tasks_thread.start()

    def kill_task(self, task_name):
        if task_name in self._tasks:
            self._tasks[task_name].kill()
            self._logger.info('Task <{}> killed'.format(task_name))

    def kill_all_tasks(self):
        for task_name in self._tasks:
            self._tasks[task_name].kill()

    def remove_task(self, task_name):
        if task_name in self._tasks:
            self._tasks[task_name].kill()
            del self._tasks[task_name]
        else:
            self._logger.info('No task with name {} found'.format(task_name))

    def get_task_list(self):
        return list(self._tasks.keys())

    def is_task_running(self, task_name):
        if task_name in self._tasks:
            return self._tasks[task_name].is_running()
        else:
            self._logger.warning('Task <{}> does not exist'.format(task_name))
            return False

    def clear_tasks_list(self):
        for name in self._tasks:
            if self._tasks[name].is_running():
                self._tasks[name].kill()

        self._tasks = dict()

    def add_stage_start_condition(self, stage, fcn):
        assert stage in self._stages, 'Invalid stage name'
        assert callable(fcn), 'Invalid function'
        self._logger.info('Adding start condition to stage {}, fcn={}'.format(
            stage, fcn))
        self._stages[stage].add_start_condition(fcn)

    def get_num_start_conditions(self, stage):
        assert stage in self._stages, 'Invalid stage name'
        return self._stages[stage].get_num_start_conditions()

    def add_stage_end_condition(self, stage, fcn):
        assert stage in self._stages, 'Invalid stage name'
        assert callable(fcn), 'Invalid function'
        self._logger.info('Adding end condition to stage {}, fcn={}'.format(
            stage, fcn))
        self._stages[stage].add_end_condition(fcn)

    def get_num_end_conditions(self, stage):
        assert stage in self._stages, 'Invalid stage name'
        return self._stages[stage].get_num_end_conditions()

    def add_pre_stage_fcn(self, stage, fcn):
        assert stage in self._stages, 'Invalid stage name'
        assert callable(fcn), 'Invalid function'
        self._logger.info('Adding pre-stage function to stage {}, '
                          'fcn={}'.format(stage, fcn))
        self._stages[stage].add_pre_stage_fcn(fcn)

    def get_num_pre_stage_fcns(self, stage):
        assert stage in self._stages, 'Invalid stage name'
        return self._stages[stage].get_num_pre_stage_fcns()

    def add_post_stage_fcn(self, stage, fcn):
        assert stage in self._stages, 'Invalid stage name'
        assert callable(fcn), 'Invalid function'
        self._logger.info('Adding post-stage function to stage {}, '
                          'fcn={}'.format(stage, fcn))
        self._stages[stage].add_post_stage_fcn(fcn)

    def get_num_post_stage_fcns(self, stage):
        assert stage in self._stages, 'Invalid stage name'
        return self._stages[stage].get_num_post_stage_fcns()

    def has_required_tasks(self):
        has_required_tasks = False
        for task in self._tasks:
            has_required_tasks = has_required_tasks \
                or self._tasks[task].required
        return has_required_tasks

    def have_all_tasks_ended(self):
        return sum([
            not self._tasks[name].is_running() for name in self._tasks
        ]) == len(self._tasks)

    def wait(self, task_name=None, timeout=30):
        self._logger.info('Wait for tasks for finish')
        if task_name is None:
            if self._run_tasks_thread is None:
                self._logger.info('Run all tasks thread was created')
                return

            # Wait until is_running is set
            self._is_running.wait(timeout)
            self._run_tasks_thread.join()
            del self._run_tasks_thread
            self._run_tasks_thread = None

            self._logger.info('Waiting for all tasks to finish')

            # First wait for all required tasks to finish
            if self.has_required_tasks():
                required_tasks_running = True
                while required_tasks_running:
                    for name in self._tasks:
                        if self._tasks[name].required:
                            if self._tasks[name].wait(0.001):
                                self._logger.info('Required task <{}> was '
                                                  'terminated or has not '
                                                  'started!'.format(name))
                                required_tasks_running = False
                                break
                            # else:
                            # sleep(1)
            else:
                while not self.have_all_tasks_ended():
                    sleep(0.3)

            # Kill all tasks
            self.kill_all_tasks()
            self._logger.info('All tasks finished')
            self._ros_config.unlock_port(self._ros_config.ros_port)
            self._ros_config.unlock_port(self._ros_config.gazebo_port)
        elif task_name in self._tasks:
            # Wait until is_running is set
            self._is_running.wait(timeout)
            self._tasks[task_name].wait()
            self._logger.info('Task <{}> finished'.format(task_name))
            self._ros_config.unlock_port(self._ros_config.ros_port)
            self._ros_config.unlock_port(self._ros_config.gazebo_port)
        else:
            self._logger.error(
                'Task with name <{}> does not exist'.format(task_name))
Exemple #39
0
class MemCacheUnit(abc.ABC):
    """Memory Cache Unit."""
    def __init__(self, *args, **kwargs):
        self.size_limit = kwargs.pop("size_limit", 0)
        self._size = 0
        self.od = OrderedDict()

    def __setitem__(self, key, value):
        # TODO: thread safe?__setitem__ failure might cause inconsistent size?

        # precalculate the size after od.__setitem__
        self._adjust_size(key, value)

        self.od.__setitem__(key, value)

        # move the key to end,make it latest
        self.od.move_to_end(key)

        if self.limited:
            # pop the oldest items beyond size limit
            while self._size > self.size_limit:
                self.popitem(last=False)

    def __getitem__(self, key):
        v = self.od.__getitem__(key)
        self.od.move_to_end(key)
        return v

    def __contains__(self, key):
        return key in self.od

    def __len__(self):
        return self.od.__len__()

    def __repr__(self):
        return f"{self.__class__.__name__}<size_limit:{self.size_limit if self.limited else 'no limit'} total_size:{self._size}>\n{self.od.__repr__()}"

    def set_limit_size(self, limit):
        self.size_limit = limit

    @property
    def limited(self):
        """whether memory cache is limited"""
        return self.size_limit > 0

    @property
    def total_size(self):
        return self._size

    def clear(self):
        self._size = 0
        self.od.clear()

    def popitem(self, last=True):
        k, v = self.od.popitem(last=last)
        self._size -= self._get_value_size(v)

        return k, v

    def pop(self, key):
        v = self.od.pop(key)
        self._size -= self._get_value_size(v)

        return v

    def _adjust_size(self, key, value):
        if key in self.od:
            self._size -= self._get_value_size(self.od[key])

        self._size += self._get_value_size(value)

    @abc.abstractmethod
    def _get_value_size(self, value):
        raise NotImplementedError
Exemple #40
0

b = {'cat': 5, 'dog': 2, 'mouse': 4}
"Произведём сортировку по значению"
new_b = OrderedDict(sorted(b.items(), key=lambda x: x[1]))

print(new_b)
# OrderedDict([('dog', 2), ('mouse', 4), ('cat', 5)])

"Сравним два словаря. Для него важен порядок"
print(new_a == new_b)
# False


"Перенесём в конец элемент"
new_b.move_to_end('mouse')

print(new_b)
# OrderedDict([('dog', 2), ('cat', 5), ('mouse', 4)])

"Перенесём в начало элемент"
new_b.move_to_end('mouse', last=False)

print(new_b)
# OrderedDict([('mouse', 4), ('dog', 2), ('cat', 5)])


"Удалим последний элемент"
new_b.popitem()

print(new_b)
Exemple #41
0
class ConnectionState:
    def __init__(self, *, dispatch, chunker, handlers, syncer, http, loop,
                 **options):
        self.loop = loop
        self.http = http
        self.max_messages = max(options.get('max_messages', 5000), 100)
        self.dispatch = dispatch
        self.chunker = chunker
        self.syncer = syncer
        self.is_bot = None
        self.handlers = handlers
        self.shard_count = None
        self._ready_task = None
        self._fetch_offline = options.get('fetch_offline_members', True)
        self.heartbeat_timeout = options.get('heartbeat_timeout', 60.0)
        self._listeners = []

        activity = options.get('activity', None)
        if activity:
            if not isinstance(activity, _ActivityTag):
                raise TypeError(
                    'activity parameter must be one of Game, Streaming, or Activity.'
                )

            activity = activity.to_dict()

        status = options.get('status', None)
        if status:
            if status is Status.offline:
                status = 'invisible'
            else:
                status = str(status)

        self._activity = activity
        self._status = status

        self.clear()

    def clear(self):
        self.user = None
        self._users = weakref.WeakValueDictionary()
        self._emojis = {}
        self._calls = {}
        self._guilds = {}
        self._voice_clients = {}

        # LRU of max size 128
        self._private_channels = OrderedDict()
        # extra dict to look up private channels by user id
        self._private_channels_by_user = {}
        self._messages = deque(maxlen=self.max_messages)

    def process_listeners(self, listener_type, argument, result):
        removed = []
        for i, listener in enumerate(self._listeners):
            if listener.type != listener_type:
                continue

            future = listener.future
            if future.cancelled():
                removed.append(i)
                continue

            try:
                passed = listener.predicate(argument)
            except Exception as exc:
                future.set_exception(exc)
                removed.append(i)
            else:
                if passed:
                    future.set_result(result)
                    removed.append(i)
                    if listener.type == ListenerType.chunk:
                        break

        for index in reversed(removed):
            del self._listeners[index]

    def call_handlers(self, key, *args, **kwargs):
        try:
            func = self.handlers[key]
        except KeyError:
            pass
        else:
            func(*args, **kwargs)

    @property
    def self_id(self):
        u = self.user
        return u.id if u else None

    @property
    def voice_clients(self):
        return list(self._voice_clients.values())

    def _get_voice_client(self, guild_id):
        return self._voice_clients.get(guild_id)

    def _add_voice_client(self, guild_id, voice):
        self._voice_clients[guild_id] = voice

    def _remove_voice_client(self, guild_id):
        self._voice_clients.pop(guild_id, None)

    def _update_references(self, ws):
        for vc in self.voice_clients:
            vc.main_ws = ws

    def store_user(self, data):
        # this way is 300% faster than `dict.setdefault`.
        user_id = int(data['id'])
        try:
            return self._users[user_id]
        except KeyError:
            user = User(state=self, data=data)
            if user.discriminator != '0000':
                self._users[user_id] = user
            return user

    def get_user(self, id):
        return self._users.get(id)

    def store_emoji(self, guild, data):
        emoji_id = int(data['id'])
        self._emojis[emoji_id] = emoji = Emoji(guild=guild,
                                               state=self,
                                               data=data)
        return emoji

    @property
    def guilds(self):
        return list(self._guilds.values())

    def _get_guild(self, guild_id):
        return self._guilds.get(guild_id)

    def _add_guild(self, guild):
        self._guilds[guild.id] = guild

    def _remove_guild(self, guild):
        self._guilds.pop(guild.id, None)

        for emoji in guild.emojis:
            self._emojis.pop(emoji.id, None)

        del guild

    @property
    def emojis(self):
        return list(self._emojis.values())

    def get_emoji(self, emoji_id):
        return self._emojis.get(emoji_id)

    @property
    def private_channels(self):
        return list(self._private_channels.values())

    def _get_private_channel(self, channel_id):
        try:
            value = self._private_channels[channel_id]
        except KeyError:
            return None
        else:
            self._private_channels.move_to_end(channel_id)
            return value

    def _get_private_channel_by_user(self, user_id):
        return self._private_channels_by_user.get(user_id)

    def _add_private_channel(self, channel):
        channel_id = channel.id
        self._private_channels[channel_id] = channel

        if self.is_bot and len(self._private_channels) > 128:
            _, to_remove = self._private_channels.popitem(last=False)
            if isinstance(to_remove, DMChannel):
                self._private_channels_by_user.pop(to_remove.recipient.id,
                                                   None)

        if isinstance(channel, DMChannel):
            self._private_channels_by_user[channel.recipient.id] = channel

    def add_dm_channel(self, data):
        channel = DMChannel(me=self.user, state=self, data=data)
        self._add_private_channel(channel)
        return channel

    def _remove_private_channel(self, channel):
        self._private_channels.pop(channel.id, None)
        if isinstance(channel, DMChannel):
            self._private_channels_by_user.pop(channel.recipient.id, None)

    def _get_message(self, msg_id):
        return utils.find(lambda m: m.id == msg_id, reversed(self._messages))

    def _add_guild_from_data(self, guild):
        guild = Guild(data=guild, state=self)
        self._add_guild(guild)
        return guild

    def chunks_needed(self, guild):
        for _ in range(math.ceil(guild._member_count / 1000)):
            yield self.receive_chunk(guild.id)

    def _get_guild_channel(self, data):
        try:
            guild = self._get_guild(int(data['guild_id']))
        except KeyError:
            channel = self.get_channel(int(data['channel_id']))
            guild = None
        else:
            channel = guild and guild.get_channel(int(data['channel_id']))

        return channel, guild

    async def request_offline_members(self, guilds):
        # get all the chunks
        chunks = []
        for guild in guilds:
            chunks.extend(self.chunks_needed(guild))

        # we only want to request ~75 guilds per chunk request.
        splits = [guilds[i:i + 75] for i in range(0, len(guilds), 75)]
        for split in splits:
            await self.chunker(split)

        # wait for the chunks
        if chunks:
            try:
                await utils.sane_wait_for(chunks,
                                          timeout=len(chunks) * 30.0,
                                          loop=self.loop)
            except asyncio.TimeoutError:
                log.info('Somehow timed out waiting for chunks.')

    async def _delay_ready(self):
        try:
            launch = self._ready_state.launch

            # only real bots wait for GUILD_CREATE streaming
            if self.is_bot:
                while not launch.is_set():
                    # this snippet of code is basically waiting 2 seconds
                    # until the last GUILD_CREATE was sent
                    launch.set()
                    await asyncio.sleep(2, loop=self.loop)

            guilds = next(zip(*self._ready_state.guilds), [])
            if self._fetch_offline:
                await self.request_offline_members(guilds)

            for guild, unavailable in self._ready_state.guilds:
                if unavailable is False:
                    self.dispatch('guild_available', guild)
                else:
                    self.dispatch('guild_join', guild)

            # remove the state
            try:
                del self._ready_state
            except AttributeError:
                pass  # already been deleted somehow

            # call GUILD_SYNC after we're done chunking
            if not self.is_bot:
                log.info('Requesting GUILD_SYNC for %s guilds',
                         len(self.guilds))
                await self.syncer([s.id for s in self.guilds])
        except asyncio.CancelledError:
            pass
        else:
            # dispatch the event
            self.call_handlers('ready')
            self.dispatch('ready')
        finally:
            self._ready_task = None

    def parse_ready(self, data):
        if self._ready_task is not None:
            self._ready_task.cancel()

        self._ready_state = ReadyState(launch=asyncio.Event(), guilds=[])
        self.clear()
        self.user = ClientUser(state=self, data=data['user'])

        guilds = self._ready_state.guilds
        for guild_data in data['guilds']:
            guild = self._add_guild_from_data(guild_data)
            if (not self.is_bot and not guild.unavailable) or guild.large:
                guilds.append((guild, guild.unavailable))

        for relationship in data.get('relationships', []):
            try:
                r_id = int(relationship['id'])
            except KeyError:
                continue
            else:
                self.user._relationships[r_id] = Relationship(
                    state=self, data=relationship)

        for pm in data.get('private_channels', []):
            factory, _ = _channel_factory(pm['type'])
            self._add_private_channel(
                factory(me=self.user, data=pm, state=self))

        self.dispatch('connect')
        self._ready_task = asyncio.ensure_future(self._delay_ready(),
                                                 loop=self.loop)

    def parse_resumed(self, data):
        self.dispatch('resumed')

    def parse_message_create(self, data):
        channel, _ = self._get_guild_channel(data)
        message = Message(channel=channel, data=data, state=self)
        self.dispatch('message', message)
        self._messages.append(message)
        if channel and channel._type in (0, 5):
            channel.last_message_id = message.id

    def parse_message_delete(self, data):
        raw = RawMessageDeleteEvent(data)
        found = self._get_message(raw.message_id)
        raw.cached_message = found
        self.dispatch('raw_message_delete', raw)
        if found is not None:
            self.dispatch('message_delete', found)
            self._messages.remove(found)

    def parse_message_delete_bulk(self, data):
        raw = RawBulkMessageDeleteEvent(data)
        found_messages = [
            message for message in self._messages
            if message.id in raw.message_ids
        ]
        raw.cached_messages = found_messages
        self.dispatch('raw_bulk_message_delete', raw)
        if found_messages:
            self.dispatch('bulk_message_delete', found_messages)
            for msg in found_messages:
                self._messages.remove(msg)

    def parse_message_update(self, data):
        raw = RawMessageUpdateEvent(data)
        self.dispatch('raw_message_edit', raw)
        message = self._get_message(raw.message_id)
        if message is not None:
            older_message = copy.copy(message)
            if 'call' in data:
                # call state message edit
                message._handle_call(data['call'])
            elif 'content' not in data:
                # embed only edit
                message.embeds = [Embed.from_dict(d) for d in data['embeds']]
            else:
                message._update(channel=message.channel, data=data)

            self.dispatch('message_edit', older_message, message)

    def parse_message_reaction_add(self, data):
        emoji_data = data['emoji']
        emoji_id = utils._get_as_snowflake(emoji_data, 'id')
        emoji = PartialEmoji.with_state(self,
                                        animated=emoji_data['animated'],
                                        id=emoji_id,
                                        name=emoji_data['name'])
        raw = RawReactionActionEvent(data, emoji)
        self.dispatch('raw_reaction_add', raw)

        # rich interface here
        message = self._get_message(raw.message_id)
        if message is not None:
            emoji = self._upgrade_partial_emoji(emoji)
            reaction = message._add_reaction(data, emoji, raw.user_id)
            user = self._get_reaction_user(message.channel, raw.user_id)
            if user:
                self.dispatch('reaction_add', reaction, user)

    def parse_message_reaction_remove_all(self, data):
        raw = RawReactionClearEvent(data)
        self.dispatch('raw_reaction_clear', raw)

        message = self._get_message(raw.message_id)
        if message is not None:
            old_reactions = message.reactions.copy()
            message.reactions.clear()
            self.dispatch('reaction_clear', message, old_reactions)

    def parse_message_reaction_remove(self, data):
        emoji_data = data['emoji']
        emoji_id = utils._get_as_snowflake(emoji_data, 'id')
        emoji = PartialEmoji.with_state(self,
                                        animated=emoji_data['animated'],
                                        id=emoji_id,
                                        name=emoji_data['name'])
        raw = RawReactionActionEvent(data, emoji)
        self.dispatch('raw_reaction_remove', raw)

        message = self._get_message(raw.message_id)
        if message is not None:
            emoji = self._upgrade_partial_emoji(emoji)
            try:
                reaction = message._remove_reaction(data, emoji, raw.user_id)
            except (AttributeError, ValueError):  # eventual consistency lol
                pass
            else:
                user = self._get_reaction_user(message.channel, raw.user_id)
                if user:
                    self.dispatch('reaction_remove', reaction, user)

    def parse_presence_update(self, data):
        guild_id = utils._get_as_snowflake(data, 'guild_id')
        guild = self._get_guild(guild_id)
        if guild is None:
            log.warning(
                'PRESENCE_UPDATE referencing an unknown guild ID: %s. Discarding.',
                guild_id)
            return

        user = data['user']
        member_id = int(user['id'])
        member = guild.get_member(member_id)
        if member is None:
            if 'username' not in user:
                # sometimes we receive 'incomplete' member data post-removal.
                # skip these useless cases.
                return

            member, old_member = Member._from_presence_update(guild=guild,
                                                              data=data,
                                                              state=self)
            guild._add_member(member)
        else:
            old_member = Member._copy(member)
            user_update = member._presence_update(data=data, user=user)
            if user_update:
                self.dispatch('user_update', user_update[0], user_update[1])

        self.dispatch('member_update', old_member, member)

    def parse_user_update(self, data):
        self.user = ClientUser(state=self, data=data)

    def parse_channel_delete(self, data):
        guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id'))
        channel_id = int(data['id'])
        if guild is not None:
            channel = guild.get_channel(channel_id)
            if channel is not None:
                guild._remove_channel(channel)
                self.dispatch('guild_channel_delete', channel)
        else:
            # the reason we're doing this is so it's also removed from the
            # private channel by user cache as well
            channel = self._get_private_channel(channel_id)
            if channel is not None:
                self._remove_private_channel(channel)
                self.dispatch('private_channel_delete', channel)

    def parse_channel_update(self, data):
        channel_type = try_enum(ChannelType, data.get('type'))
        channel_id = int(data['id'])
        if channel_type is ChannelType.group:
            channel = self._get_private_channel(channel_id)
            old_channel = copy.copy(channel)
            channel._update_group(data)
            self.dispatch('private_channel_update', old_channel, channel)
            return

        guild_id = utils._get_as_snowflake(data, 'guild_id')
        guild = self._get_guild(guild_id)
        if guild is not None:
            channel = guild.get_channel(channel_id)
            if channel is not None:
                old_channel = copy.copy(channel)
                channel._update(guild, data)
                self.dispatch('guild_channel_update', old_channel, channel)
            else:
                log.warning(
                    'CHANNEL_UPDATE referencing an unknown channel ID: %s. Discarding.',
                    channel_id)
        else:
            log.warning(
                'CHANNEL_UPDATE referencing an unknown guild ID: %s. Discarding.',
                guild_id)

    def parse_channel_create(self, data):
        factory, ch_type = _channel_factory(data['type'])
        if factory is None:
            log.warning(
                'CHANNEL_CREATE referencing an unknown channel type %s. Discarding.',
                data['type'])
            return

        channel = None

        if ch_type in (ChannelType.group, ChannelType.private):
            channel_id = int(data['id'])
            if self._get_private_channel(channel_id) is None:
                channel = factory(me=self.user, data=data, state=self)
                self._add_private_channel(channel)
                self.dispatch('private_channel_create', channel)
        else:
            guild_id = utils._get_as_snowflake(data, 'guild_id')
            guild = self._get_guild(guild_id)
            if guild is not None:
                channel = factory(guild=guild, state=self, data=data)
                guild._add_channel(channel)
                self.dispatch('guild_channel_create', channel)
            else:
                log.warning(
                    'CHANNEL_CREATE referencing an unknown guild ID: %s. Discarding.',
                    guild_id)
                return

    def parse_channel_pins_update(self, data):
        channel_id = int(data['channel_id'])
        channel = self.get_channel(channel_id)
        if channel is None:
            log.warning(
                'CHANNEL_PINS_UPDATE referencing an unknown channel ID: %s. Discarding.',
                channel_id)
            return

        last_pin = utils.parse_time(
            data['last_pin_timestamp']) if data['last_pin_timestamp'] else None

        try:
            # I have not imported discord.abc in this file
            # the isinstance check is also 2x slower than just checking this attribute
            # so we're just gonna check it since it's easier and faster and lazier
            channel.guild
        except AttributeError:
            self.dispatch('private_channel_pins_update', channel, last_pin)
        else:
            self.dispatch('guild_channel_pins_update', channel, last_pin)

    def parse_channel_recipient_add(self, data):
        channel = self._get_private_channel(int(data['channel_id']))
        user = self.store_user(data['user'])
        channel.recipients.append(user)
        self.dispatch('group_join', channel, user)

    def parse_channel_recipient_remove(self, data):
        channel = self._get_private_channel(int(data['channel_id']))
        user = self.store_user(data['user'])
        try:
            channel.recipients.remove(user)
        except ValueError:
            pass
        else:
            self.dispatch('group_remove', channel, user)

    def parse_guild_member_add(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is None:
            log.warning(
                'GUILD_MEMBER_ADD referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])
            return

        member = Member(guild=guild, data=data, state=self)
        guild._add_member(member)
        guild._member_count += 1
        self.dispatch('member_join', member)

    def parse_guild_member_remove(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is not None:
            user_id = int(data['user']['id'])
            member = guild.get_member(user_id)
            if member is not None:
                guild._remove_member(member)
                guild._member_count -= 1
                self.dispatch('member_remove', member)
        else:
            log.warning(
                'GUILD_MEMBER_REMOVE referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])

    def parse_guild_member_update(self, data):
        guild = self._get_guild(int(data['guild_id']))
        user = data['user']
        user_id = int(user['id'])
        if guild is None:
            log.warning(
                'GUILD_MEMBER_UPDATE referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])
            return

        member = guild.get_member(user_id)
        if member is not None:
            old_member = copy.copy(member)
            member._update(data)
            self.dispatch('member_update', old_member, member)
        else:
            log.warning(
                'GUILD_MEMBER_UPDATE referencing an unknown member ID: %s. Discarding.',
                user_id)

    def parse_guild_emojis_update(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is None:
            log.warning(
                'GUILD_EMOJIS_UPDATE referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])
            return

        before_emojis = guild.emojis
        for emoji in before_emojis:
            self._emojis.pop(emoji.id, None)
        guild.emojis = tuple(
            map(lambda d: self.store_emoji(guild, d), data['emojis']))
        self.dispatch('guild_emojis_update', guild, before_emojis,
                      guild.emojis)

    def _get_create_guild(self, data):
        if data.get('unavailable') is False:
            # GUILD_CREATE with unavailable in the response
            # usually means that the guild has become available
            # and is therefore in the cache
            guild = self._get_guild(int(data['id']))
            if guild is not None:
                guild.unavailable = False
                guild._from_data(data)
                return guild

        return self._add_guild_from_data(data)

    async def _chunk_and_dispatch(self, guild, unavailable):
        chunks = list(self.chunks_needed(guild))
        await self.chunker(guild)
        if chunks:
            try:
                await utils.sane_wait_for(chunks,
                                          timeout=len(chunks),
                                          loop=self.loop)
            except asyncio.TimeoutError:
                log.info('Somehow timed out waiting for chunks.')

        if unavailable is False:
            self.dispatch('guild_available', guild)
        else:
            self.dispatch('guild_join', guild)

    def parse_guild_create(self, data):
        unavailable = data.get('unavailable')
        if unavailable is True:
            # joined a guild with unavailable == True so..
            return

        guild = self._get_create_guild(data)

        # check if it requires chunking
        if guild.large:
            if unavailable is False:
                # check if we're waiting for 'useful' READY
                # and if we are, we don't want to dispatch any
                # event such as guild_join or guild_available
                # because we're still in the 'READY' phase. Or
                # so we say.
                try:
                    state = self._ready_state
                    state.launch.clear()
                    state.guilds.append((guild, unavailable))
                except AttributeError:
                    # the _ready_state attribute is only there during
                    # processing of useful READY.
                    pass
                else:
                    return

            # since we're not waiting for 'useful' READY we'll just
            # do the chunk request here if wanted
            if self._fetch_offline:
                asyncio.ensure_future(self._chunk_and_dispatch(
                    guild, unavailable),
                                      loop=self.loop)
                return

        # Dispatch available if newly available
        if unavailable is False:
            self.dispatch('guild_available', guild)
        else:
            self.dispatch('guild_join', guild)

    def parse_guild_sync(self, data):
        guild = self._get_guild(int(data['id']))
        guild._sync(data)

    def parse_guild_update(self, data):
        guild = self._get_guild(int(data['id']))
        if guild is not None:
            old_guild = copy.copy(guild)
            guild._from_data(data)
            self.dispatch('guild_update', old_guild, guild)
        else:
            log.warning(
                'GUILD_UPDATE referencing an unknown guild ID: %s. Discarding.',
                data['id'])

    def parse_guild_delete(self, data):
        guild = self._get_guild(int(data['id']))
        if guild is None:
            log.warning(
                'GUILD_DELETE referencing an unknown guild ID: %s. Discarding.',
                data['id'])
            return

        if data.get('unavailable', False) and guild is not None:
            # GUILD_DELETE with unavailable being True means that the
            # guild that was available is now currently unavailable
            guild.unavailable = True
            self.dispatch('guild_unavailable', guild)
            return

        # do a cleanup of the messages cache
        self._messages = deque(
            (msg for msg in self._messages if msg.guild != guild),
            maxlen=self.max_messages)

        self._remove_guild(guild)
        self.dispatch('guild_remove', guild)

    def parse_guild_ban_add(self, data):
        # we make the assumption that GUILD_BAN_ADD is done
        # before GUILD_MEMBER_REMOVE is called
        # hence we don't remove it from cache or do anything
        # strange with it, the main purpose of this event
        # is mainly to dispatch to another event worth listening to for logging
        guild = self._get_guild(int(data['guild_id']))
        if guild is not None:
            try:
                user = User(data=data['user'], state=self)
            except KeyError:
                pass
            else:
                member = guild.get_member(user.id) or user
                self.dispatch('member_ban', guild, member)

    def parse_guild_ban_remove(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is not None:
            if 'user' in data:
                user = self.store_user(data['user'])
                self.dispatch('member_unban', guild, user)

    def parse_guild_role_create(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is None:
            log.warning(
                'GUILD_ROLE_CREATE referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])
            return

        role_data = data['role']
        role = Role(guild=guild, data=role_data, state=self)
        guild._add_role(role)
        self.dispatch('guild_role_create', role)

    def parse_guild_role_delete(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is not None:
            role_id = int(data['role_id'])
            try:
                role = guild._remove_role(role_id)
            except KeyError:
                return
            else:
                self.dispatch('guild_role_delete', role)
        else:
            log.warning(
                'GUILD_ROLE_DELETE referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])

    def parse_guild_role_update(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is not None:
            role_data = data['role']
            role_id = int(role_data['id'])
            role = guild.get_role(role_id)
            if role is not None:
                old_role = copy.copy(role)
                role._update(role_data)
                self.dispatch('guild_role_update', old_role, role)
        else:
            log.warning(
                'GUILD_ROLE_UPDATE referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])

    def parse_guild_members_chunk(self, data):
        guild_id = int(data['guild_id'])
        guild = self._get_guild(guild_id)
        members = data.get('members', [])
        for member in members:
            m = Member(guild=guild, data=member, state=self)
            existing = guild.get_member(m.id)
            if existing is None or existing.joined_at is None:
                guild._add_member(m)

        log.info('Processed a chunk for %s members in guild ID %s.',
                 len(members), guild_id)
        self.process_listeners(ListenerType.chunk, guild, len(members))

    def parse_guild_integrations_update(self, data):
        guild = self._get_guild(int(data['guild_id']))
        if guild is not None:
            self.dispatch('guild_integrations_update', guild)
        else:
            log.warning(
                'GUILD_INTEGRATIONS_UPDATE referencing an unknown guild ID: %s. Discarding.',
                data['guild_id'])

    def parse_webhooks_update(self, data):
        channel = self.get_channel(int(data['channel_id']))
        if channel is not None:
            self.dispatch('webhooks_update', channel)
        else:
            log.warning(
                'WEBHOOKS_UPDATE referencing an unknown channel ID: %s. Discarding.',
                data['channel_id'])

    def parse_voice_state_update(self, data):
        guild = self._get_guild(utils._get_as_snowflake(data, 'guild_id'))
        channel_id = utils._get_as_snowflake(data, 'channel_id')
        if guild is not None:
            if int(data['user_id']) == self.user.id:
                voice = self._get_voice_client(guild.id)
                if voice is not None:
                    ch = guild.get_channel(channel_id)
                    if ch is not None:
                        voice.channel = ch

            member, before, after = guild._update_voice_state(data, channel_id)
            if member is not None:
                self.dispatch('voice_state_update', member, before, after)
            else:
                log.warning(
                    'VOICE_STATE_UPDATE referencing an unknown member ID: %s. Discarding.',
                    data['user_id'])
        else:
            # in here we're either at private or group calls
            call = self._calls.get(channel_id)
            if call is not None:
                call._update_voice_state(data)

    def parse_voice_server_update(self, data):
        try:
            key_id = int(data['guild_id'])
        except KeyError:
            key_id = int(data['channel_id'])

        vc = self._get_voice_client(key_id)
        if vc is not None:
            asyncio.ensure_future(vc._create_socket(key_id, data))

    def parse_typing_start(self, data):
        channel, guild = self._get_guild_channel(data)
        if channel is not None:
            member = None
            user_id = utils._get_as_snowflake(data, 'user_id')
            if isinstance(channel, DMChannel):
                member = channel.recipient
            elif isinstance(channel, TextChannel) and guild is not None:
                member = guild.get_member(user_id)
            elif isinstance(channel, GroupChannel):
                member = utils.find(lambda x: x.id == user_id,
                                    channel.recipients)

            if member is not None:
                timestamp = datetime.datetime.utcfromtimestamp(
                    data.get('timestamp'))
                self.dispatch('typing', channel, member, timestamp)

    def parse_relationship_add(self, data):
        key = int(data['id'])
        old = self.user.get_relationship(key)
        new = Relationship(state=self, data=data)
        self.user._relationships[key] = new
        if old is not None:
            self.dispatch('relationship_update', old, new)
        else:
            self.dispatch('relationship_add', new)

    def parse_relationship_remove(self, data):
        key = int(data['id'])
        try:
            old = self.user._relationships.pop(key)
        except KeyError:
            pass
        else:
            self.dispatch('relationship_remove', old)

    def _get_reaction_user(self, channel, user_id):
        if isinstance(channel, TextChannel):
            return channel.guild.get_member(user_id)
        return self.get_user(user_id)

    def get_reaction_emoji(self, data):
        emoji_id = utils._get_as_snowflake(data, 'id')

        if not emoji_id:
            return data['name']

        try:
            return self._emojis[emoji_id]
        except KeyError:
            return PartialEmoji(animated=data['animated'],
                                id=emoji_id,
                                name=data['name'])

    def _upgrade_partial_emoji(self, emoji):
        emoji_id = emoji.id
        if not emoji_id:
            return emoji.name
        try:
            return self._emojis[emoji_id]
        except KeyError:
            return emoji

    def get_channel(self, id):
        if id is None:
            return None

        pm = self._get_private_channel(id)
        if pm is not None:
            return pm

        for guild in self.guilds:
            channel = guild.get_channel(id)
            if channel is not None:
                return channel

    def create_message(self, *, channel, data):
        return Message(state=self, channel=channel, data=data)

    def receive_chunk(self, guild_id):
        future = self.loop.create_future()
        listener = Listener(ListenerType.chunk, future,
                            lambda s: s.id == guild_id)
        self._listeners.append(listener)
        return future
Exemple #42
0
from collections import OrderedDict

user = OrderedDict()
user["name"] = "admin"
user["age"] = 23
user["weight"] = 65
print(user)

user.move_to_end("name")
print(user)

user.move_to_end("name", last=False)
print(user)
Exemple #43
0
class CachedStore(BaseStore):
    """
    A thread-safe cached persistent store of the subscriber database.
    Prerequisite: persistent_store need to be thread safe
    """
    def __init__(self, persistent_store, cache_capacity=512, loop=None):
        self._lock = threading.Lock()
        self._cache = OrderedDict()
        self._cache_capacity = cache_capacity
        self._persistent_store = persistent_store
        self._on_ready = OnDataReady(loop=loop)

    def add_subscriber(self, subscriber_data):
        """
        Method that adds the subscriber.
        """
        sid = SIDUtils.to_str(subscriber_data.sid)
        with self._lock:
            if sid in self._cache:
                raise DuplicateSubscriberError(sid)

            self._persistent_store.add_subscriber(subscriber_data)
            self._cache_put(sid, subscriber_data)
        self._on_ready.add_subscriber(subscriber_data)

    @contextmanager
    def edit_subscriber(self, subscriber_id):
        """
        Context manager to modify the subscriber data.
        """
        with self._lock:
            if subscriber_id in self._cache:
                data = self._cache_get(subscriber_id)
                subscriber_data = copy.deepcopy(data)
            else:
                subscriber_data = \
                    self._persistent_store.get_subscriber_data(subscriber_id)
            yield subscriber_data
            self._persistent_store.update_subscriber(subscriber_data)
            self._cache_put(subscriber_id, subscriber_data)

    def delete_subscriber(self, subscriber_id):
        """
        Method that deletes a subscriber, if present.
        """
        with self._lock:
            if subscriber_id in self._cache:
                del self._cache[subscriber_id]

            self._persistent_store.delete_subscriber(subscriber_id)

    def delete_all_subscribers(self):
        """
        Method that removes all the subscribers from the store
        """
        with self._lock:
            self._cache_clear()
            self._persistent_store.delete_all_subscribers()

    def resync(self, subscribers):
        """
        Method that should resync the store with the mentioned list of
        subscribers. The resync leaves the current state of subscribers
        intact.

        Args:
            subscribers - list of subscribers to be in the store.
        """
        with self._lock:
            self._cache_clear()
            self._persistent_store.resync(subscribers)
        self._on_ready.resync(subscribers)

    def get_subscriber_data(self, subscriber_id):
        """
        Method that returns the subscriber data for the subscriber.
        """
        with self._lock:
            if subscriber_id in self._cache:
                return self._cache_get(subscriber_id)
            else:
                subscriber_data = \
                    self._persistent_store.get_subscriber_data(subscriber_id)
                self._cache_put(subscriber_id, subscriber_data)
                return subscriber_data

    def list_subscribers(self):
        """
        Method that returns the list of subscribers stored.
        Note: this method is not cached since it's easier to get the whole list
        from persistent store
        """
        return self._persistent_store.list_subscribers()

    def on_ready(self):
        return self._on_ready.event.wait()

    def _cache_get(self, k):
        """
        Get from the LRU cache. Move the last hit entry to the end.
        """
        self._cache.move_to_end(k)
        return self._cache[k]

    def _cache_put(self, k, v):
        """
        Put to the LRU cache. Evict the first item if full.
        """
        if self._cache_capacity == len(self._cache):
            self._cache.popitem(last=False)
        self._cache[k] = v

    def _cache_list(self):
        return list(self._cache.keys())

    def _cache_clear(self):
        self._cache.clear()
Exemple #44
0
class lrucache():
    def __init__(self, size):
        self._table = OrderedDict()
        self._capacity = size

        # Adjust the size
        self.size(size)

    def __len__(self):
        return len(self._table)

    def clear(self):
        self._table.clear()

    def __contains__(self, key):
        return key in self._table

    def peek(self, key):
        """ Looks up a value in the cache without affecting cache order

            Return None if the key doesn't exists
        """
        # Look up the node
        return self._table.get(key)

    def __getitem__(self, key):
        """ Look up the node
        """
        # Update the list ordering
        self._table.move_to_end(key)
        return self._table[key]

    def __setitem__(self, key, value):
        """ Define a dict like setter
        """
        # First, see if any value is stored under 'key' in the cache already.
        # If so we are going to replace that value with the new one.
        if key in self._table:
            del self._table[key]

        # Check against capacity, free extra item
        while len(self._table) >= self._capacity:
            self._table.popitem(last=False)

        # Insert element
        OrderedDict.__setitem__(self._table, key, value)

    def __delitem__(self, key):
        """ Remove entry from table
        """
        del self._table[key]

    def __iter__(self):
        """ Return an iterator that returns the keys in the cache.
        
            Values are returned in order from the most recently to least recently used. 
            Does not modify the cache order.

            Make the cache behaves like a dictionary
        """
        return reversed(self._table.keys())

    def items(self):
        """ Return an iterator that returns the (key, value) pairs in the cache.

            Items are returned  in order from the most recently to least recently used. 
            Does not modify the cache order.
        """
        return reversed(self._table.items())

    def keys(self):
        """ Return an iterator that returns the keys in the cache.
        
            Keys are returned in order from the most recently to least recently used. 
            Does not modify the cache order.
        """
        return reversed(self._table.keys())

    def values(self):
        """ Return an iterator that returns the values in the cache.
            
            Values are returned  in order from the most recently to least recently used. 
            Does not modify the cache order.
        """
        return reversed(self._table.values())

    def size(self, size=None):
        """ Set the size of the cache

            :param int size: maximum number of elements in the cache
        """
        if size is not None:
            assert size > 0
            if size < self._capacity:
                d = self._table
                # Remove extra items
                while len(d) > size:
                    d.popitem(last=False)
            self._capacity = size

        return self._capacity
# defaultdict is initialized by a function
worker_dict = defaultdict(list)
for name, uni in workers:
    worker_dict[name].append(uni)

#print(worker_dict)

#Ordered Dict
o = OrderedDict()
o['Rod'] = 6
o['Sam'] = 4
o['Jen'] = 10

#print(o)

o.move_to_end('Rod')
#move to start
o.move_to_end('Jen',last=False)
#print(o)
o.popitem()
#print(o)

##Named Tuple

account_x = ('checking',20.30)
Account = namedtuple('Account',['name','amount'])
account = Account('checking',18.50)
#print(account)

accountNamedTuple = Account(*account_x)
#print(accountNamedTuple)
Exemple #46
0
class Extractor:
    def __init__(self, db, dest_dir, fatal_exceptions=False):
        self.db = db
        self.dest_dir = dest_dir
        self.queues = []
        self.dispatcher = Dispatcher(fatal_exceptions=fatal_exceptions)
        self.conn = db.connect()
        self.current_timestamp = very_old_timestamp
        self.queue_timestamp = OrderedDict()
        if dest_dir is not None:
            os.makedirs(dest_dir, exist_ok=True)
        self.dispatcher.add_dead_letter_handler(DefaultDeadLetterHandler())

    def add_queue(self, queue):
        """Add an input queue."""
        self.queues.append(queue)
        self.queue_timestamp[queue] = very_old_timestamp
        if self.dest_dir is not None:
            queue.load_state(self.dest_dir)

    def add_handler(self, handler):
        """Add a message handler."""
        self.dispatcher.add_handler(handler)

    def flush(self):
        """Flush all output handlers, and save queue state to disk."""
        self.dispatcher.flush()
        if self.dest_dir is not None:
            for queue in self.queues:
                queue.save_state(self.dest_dir)

    def idle(self):
        """Check whether all available messages have been received.

        This means that run() will not do any further processing until
        new messages are added to the input database.
        """

        # Find the most out-of-date queue.
        q = min(self.queues, key=self.queue_timestamp.get)

        # If the oldest queue timestamp is greater than the current
        # time, then all queues must now be idle.
        if self.queue_timestamp[q] > self.current_timestamp:
            return True

        # Check if this queue is stalled waiting for another queue.
        sq = q.stalling_queue()
        while sq is not None:
            q = sq
            sq = q.stalling_queue()

        # Check whether that queue is idle.
        return (self.queue_timestamp[q] > self.current_timestamp)

    def run(self):
        """Perform some amount of work.

        This will execute a small number of queries (usually only
        one), reading a batch of messages from the most out-of-date
        queue and sending those messages to the attached handlers.
        """

        # Find the most out-of-date queue.
        q = min(self.queues, key=self.queue_timestamp.get)

        # If the oldest queue timestamp is greater than the current
        # timestamp, then *all* queues must now be idle; in that case,
        # ignore timestamps and handle queues in round-robin order.
        if self.queue_timestamp[q] > self.current_timestamp:
            q = next(iter(self.queue_timestamp))
            self.queue_timestamp.move_to_end(q)

        # Retrieve and submit a batch of messages.
        try:
            cursor = self.conn.cursor()

            # Check if this queue is stalled (waiting for another
            # queue before it can proceed.)  In that case, the other
            # queue inherits this one's priority.
            origq = q
            sq = q.stalling_queue()
            while sq is not None:
                q = sq
                sq = q.stalling_queue()

            # If the original queue was stalled, and the current queue
            # is up-to-date, then check all queues to update the current
            # time.  This avoids looping indefinitely if the messages
            # we're anticipating never actually show up.
            if q is not origq and q.reached_present():
                self._update_current_time(cursor)

            self._run_queries(q, cursor)
        finally:
            cursor.close()

    def _run_queries(self, queue, cursor):
        parser = queue.next_message_parser(self.db)
        for msg in self.db.get_messages(parser, cursor=cursor):
            ts = queue.message_timestamp(msg)

            # FIXME: should disregard timestamps that are
            # completely absurd (but maybe those should be
            # thrown away at a lower level.)

            # current_timestamp = maximum timestamp of any
            # message we've seen so far
            if ts > self.current_timestamp:
                self.current_timestamp = ts

            # query_time = maximum timestamp of any
            # message we've seen in this queue
            if ts > queue.query_time:
                queue.query_time = ts

            queue.push_message(msg, self.dispatcher)

        # If this queue has reached the present time, put it to
        # sleep for some minimum time period before hitting it
        # again.  The delay time is dependent on the queue type.
        if queue.reached_present():
            queue.query_time = self.current_timestamp
            self.queue_timestamp[queue] = (self.current_timestamp +
                                           queue.idle_delay())
        else:
            self.queue_timestamp[queue] = queue.query_time

    def _update_current_time(self, cursor):
        for queue in self.queues:
            parser = queue.final_message_parser(self.db)
            for msg in self.db.get_messages(parser, cursor=cursor):
                ts = queue.message_timestamp(msg)
                if ts > self.current_timestamp:
                    self.current_timestamp = ts
Exemple #47
0
class ObjectHolderActor(WorkerActor):
    _storage_device = None
    _spill_devices = None

    def __init__(self, size_limit=0):
        super().__init__()
        self._size_limit = size_limit

        self._data_holder = OrderedDict()
        self._data_sizes = dict()

        self._total_hold = 0
        self._pinned_counter = dict()
        self._spill_pending_keys = set()

        self._total_spill = 0
        self._min_spill_size = 0
        self._max_spill_size = 0

        self._dispatch_ref = None
        self._status_ref = None
        self._storage_handler = None

    def post_create(self):
        from ..dispatcher import DispatchActor
        from ..status import StatusActor

        super().post_create()
        self.register_actors_down_handler()
        self._dispatch_ref = self.promise_ref(DispatchActor.default_uid())

        parse_num, is_percent = parse_readable_size(
            options.worker.min_spill_size)
        self._min_spill_size = int(self._size_limit *
                                   parse_num if is_percent else parse_num)
        parse_num, is_percent = parse_readable_size(
            options.worker.max_spill_size)
        self._max_spill_size = int(self._size_limit *
                                   parse_num if is_percent else parse_num)

        status_ref = self.ctx.actor_ref(StatusActor.default_uid())
        self._status_ref = status_ref if self.ctx.has_actor(
            status_ref) else None

        self._storage_handler = self.storage_client.get_storage_handler(
            self._storage_device.build_location(self.proc_id))

        self.ref().update_cache_status(_tell=True)

    def pre_destroy(self):
        for k in self._data_holder:
            self._data_holder[k] = None

    def update_cache_status(self):
        raise NotImplementedError

    def post_delete(self, session_id, data_keys):
        raise NotImplementedError

    def get_size_limit(self):
        return self._size_limit

    @promise.reject_on_exception
    @log_unhandled
    def spill_size(self, size, multiplier=1, callback=None):
        if not self._spill_devices:  # pragma: no cover
            raise SpillNotConfigured

        request_size = int(size * multiplier)
        request_size = max(request_size, self._min_spill_size)
        if request_size > self._size_limit:
            raise SpillSizeExceeded
        request_size = min(request_size, self._max_spill_size)

        spill_ref_key = tokenize((time.time(), size, multiplier))

        logger.debug('Start spilling %d(x%d) bytes in %s. ref_key==%s.',
                     request_size, multiplier, self.uid, spill_ref_key)

        if request_size + self._total_hold > self._size_limit:
            acc_free = 0
            free_keys = []
            for k in self._data_holder.keys():
                if k in self._pinned_counter or k in self._spill_pending_keys:
                    continue
                acc_free += self._data_sizes[k]
                free_keys.append(k)
                self._spill_pending_keys.add(k)
                if request_size + self._total_hold - acc_free <= self._size_limit:
                    break

            if not free_keys:
                logger.warning(
                    'Cannot spill further in %s. Rejected. request=%d ref_key=%s',
                    self.uid, request_size, spill_ref_key)
                raise NoDataToSpill

            logger.debug(
                'Decide to spill %d data keys %r in %s. request=%d ref_key=%s',
                len(free_keys), free_keys, self.uid, request_size,
                spill_ref_key)

            @log_unhandled
            def _release_spill_allocations(key):
                logger.debug(
                    'Removing reference of data %s from %s when spilling. ref_key=%s',
                    key, self.uid, spill_ref_key)
                self.delete_objects(key[0], [key[1]])

            @log_unhandled
            def _handle_spill_reject(*exc, **kwargs):
                key = kwargs['session_data_key']
                self._remove_spill_pending(*key)
                raise exc[1].with_traceback(exc[2])

            @log_unhandled
            def _spill_key(key):
                if key in self._pinned_counter or key not in self._data_holder:
                    self._remove_spill_pending(*key)
                    return
                logger.debug('Spilling key %s in %s. ref_key=%s', key,
                             self.uid, spill_ref_key)
                return self.storage_client.copy_to(key[0], [key[1]], self._spill_devices) \
                    .then(lambda *_: _release_spill_allocations(key),
                          functools.partial(_handle_spill_reject, session_data_key=key))

            @log_unhandled
            def _finalize_spill(*_):
                logger.debug('Finish spilling %d data keys in %s. ref_key=%s',
                             len(free_keys), self.uid, spill_ref_key)
                self._plasma_client.evict(request_size)
                if callback:
                    self.tell_promise(callback)
                self.update_cache_status()

            promise.all_(_spill_key(k) for k in free_keys).then(_finalize_spill) \
                .catch(lambda *exc: self.tell_promise(callback, *exc, _accept=False))
        else:
            logger.debug('No need to spill in %s. request=%d ref_key=%s',
                         self.uid, request_size, spill_ref_key)

            self._plasma_client.evict(request_size)
            if callback:
                self.tell_promise(callback)

    @log_unhandled
    def _internal_put_object(self, session_id, data_key, obj, size):
        try:
            session_data_key = (session_id, data_key)
            if session_data_key in self._data_holder:
                self._total_hold -= self._data_sizes[session_data_key]
                del self._data_holder[session_data_key]

            self._data_holder[session_data_key] = obj
            self._data_sizes[session_data_key] = size
            self._total_hold += size
        finally:
            del obj

    def _finish_put_objects(self, _session_id, data_keys):
        if logger.getEffectiveLevel() <= logging.DEBUG:  # pragma: no cover
            simplified_keys = sorted(
                set(k[0] if isinstance(k, tuple) else k for k in data_keys))
            logger.debug('Data %r registered in %s. total_hold=%d',
                         simplified_keys, self.uid, self._total_hold)
        self.update_cache_status()

    def _remove_spill_pending(self, session_id, data_key):
        try:
            self._spill_pending_keys.remove((session_id, data_key))
            logger.debug('Spill-pending key (%s, %s) removed in %s',
                         session_id, data_key, self.uid)
        except KeyError:
            pass

    @log_unhandled
    def delete_objects(self, session_id, data_keys):
        actual_removed = []
        for data_key in data_keys:
            session_data_key = (session_id, data_key)

            self._remove_spill_pending(session_id, data_key)

            try:
                del self._pinned_counter[session_data_key]
            except KeyError:
                pass

            if session_data_key in self._data_holder:
                actual_removed.append(data_key)

                data_size = self._data_sizes[session_data_key]
                self._total_hold -= data_size
                del self._data_holder[session_data_key]
                del self._data_sizes[session_data_key]

        self.post_delete(session_id, actual_removed)
        if actual_removed:
            logger.debug('Data %s unregistered in %s. total_hold=%d',
                         actual_removed, self.uid, self._total_hold)
            self.update_cache_status()

    def lift_data_keys(self, session_id, data_keys, last=True):
        for k in data_keys:
            self._data_holder.move_to_end((session_id, k), last)

    @log_unhandled
    def pin_data_keys(self, session_id, data_keys, token):
        spilling_keys = list(k for k in data_keys
                             if (session_id, k) in self._spill_pending_keys)
        if spilling_keys:
            logger.warning('Cannot pin data key %r: under spilling',
                           spilling_keys)
            raise PinDataKeyFailed
        pinned = []
        for k in data_keys:
            session_k = (session_id, k)
            if session_k not in self._data_holder:
                continue
            if session_k not in self._pinned_counter:
                self._pinned_counter[session_k] = set()
            self._pinned_counter[session_k].add(token)
            pinned.append(k)
        logger.debug('Data keys %r pinned in %s', pinned, self.uid)
        return pinned

    @log_unhandled
    def unpin_data_keys(self, session_id, data_keys, token):
        unpinned = []
        for k in data_keys:
            session_k = (session_id, k)
            try:
                self._pinned_counter[session_k].remove(token)
                if not self._pinned_counter[session_k]:
                    del self._pinned_counter[session_k]
                unpinned.append(k)
            except KeyError:
                continue
        if unpinned:
            logger.debug('Data keys %r unpinned in %s', unpinned, self.uid)
        return unpinned

    def dump_keys(self):  # pragma: no cover
        return list(self._data_holder.keys())
Exemple #48
0
class Buffer:
    def __init__(self, metric: Gauge, size: int):
        self.buffer = OrderedDict()
        self.size = size
        self.metric = metric

    def __len__(self):
        return len(self.buffer)

    def total_hashes(self) -> int:
        i = 0
        for b in self.buffer.values():
            i += len(b.hashes)
        return i

    def hashes_set(self) -> Set[str]:
        s = set()
        for b in self.buffer.values():
            s = s.union(b.hashes)
        return s

    def add(self, item: Block) -> None:
        if item.get_marker() in self.buffer:
            self.buffer[item.get_marker()].hashes.update(item.hashes)
        else:
            self.buffer[item.get_marker()] = item
        if len(self.buffer) > self.size:
            self.buffer.popitem(False)
        self.metric.observe(len(self.buffer))

    def check_marker(self, marker: str) -> bool:
        log.debug(
            f"checking marker {marker} (buffer size = {len(self.buffer)} items)"
        )
        i = 0
        res = False
        if marker in self.buffer:
            while len(self.buffer) > 0:
                k, v = self.buffer.popitem(False)
                if k == marker:
                    self.buffer[k] = v
                    self.buffer.move_to_end(k, False)
                    log.debug(f"removed {i} elements before marker...")
                    res = True
                    break
                i += 1
        self.metric.observe(len(self.buffer))
        return res

    def get_first(self) -> Block:
        k, v = self.buffer.popitem(False)
        self.buffer[k] = v
        self.buffer.move_to_end(k, False)
        self.metric.observe(len(self.buffer))
        return v

    def __str__(self) -> str:
        result = []
        for k, v in self.buffer.items():
            result.append(f"{k}={v}")
        return f"EthBuffer<{','.join(result)}>"
Exemple #49
0
def main(resources_path):
    parser = Parser()

    nlp_ready_resources = {}

    for root, dirs, files in os.walk(resources_path):
        for file_name in files:
            if file_name.endswith('.resource'):
                resource = os.path.splitext(basename(file_name))[0]
                resource = ' '.join(resource.split('-'))
                resource = ' '.join(resource.split('_'))
                parsed_resource_file = parser.parse(
                    os.path.join(root, file_name))
                nlp_ready_resources[resource] = {}
                for child in parsed_resource_file['feature']['children']:
                    if child['type'] == 'Background':
                        nlp_ready_resources[resource]['background'] = {}
                        nlp_ready_resources[resource]['background'][
                            'Given'] = []
                        for step in child['steps']:
                            sentence = step['keyword'] + step['text']
                            nlp_ready_resources[resource]['background'][
                                'Given'].append({'sentence': sentence})
                    elif child['type'] == 'Scenario':
                        ordered_step_types = OrderedDict({
                            'Given': [],
                            'When': [],
                            'Then': []
                        })
                        ordered_step_types.move_to_end('When')
                        ordered_step_types.move_to_end('Then')
                        nlp_ready_resources[resource][
                            child['name']] = ordered_step_types
                        in_step = ''
                        for step in child['steps']:
                            data_table = []
                            sentence = step['keyword'] + step['text']
                            if step['keyword'] == 'When ' or step[
                                    'keyword'] == 'Then ' or step[
                                        'keyword'] == 'Given ':
                                in_step = step['keyword'].strip(
                                )  #note: there is a space here after the keyword
                            if 'argument' in step:
                                if step['argument']['type'] == 'DataTable':
                                    data_table = parse_table(step)
                            if not in_step == 'Given':
                                nlp_ready_resources[resource][
                                    child['name']][in_step].append({
                                        'sentence':
                                        sentence,
                                        'data_table':
                                        data_table,
                                        'scenario_name':
                                        child['name']
                                    })
                                if 'description' in child:
                                    nlp_ready_resources[resource][
                                        child['name']][
                                            'Scenario Description'] = child[
                                                'description']
                            else:
                                nlp_ready_resources[resource][
                                    child['name']][in_step].append({
                                        'sentence':
                                        sentence,
                                        'scenario_name':
                                        child['name']
                                    })
    return nlp_ready_resources
## Given an ordered dict, write a programm to insert items in beginning of ordered dict.

# Method #1: Using OrderedDict.move_to_end()
from collections import OrderedDict

iniordered_dict = OrderedDict([('akshat', '1'), ('nikhil', '2')])

iniordered_dict.update({'manjeet': '3'})
iniordered_dict.move_to_end('manjeet', last=False)

print(iniordered_dict)

# Method #2: Using Naive Approach
from collections import OrderedDict

ini_dict1 = OrderedDict([('akshat', '1'), ('nikhil', '2')])
ini_dict2 = OrderedDict([("manjeet", '4'), ("akash", '4')])

merge = OrderedDict(list(ini_dict2.items()) + list(ini_dict1.items()))

print(merge)
class BufferPool:
    """
    A buffer pool implementation.

    Since the buffer pool is a cache, we do not provide any serialization methods.

    >>> schema = DBSchema('employee', [('id', 'int'), ('age', 'int')])
    >>> bp = BufferPool()
    >>> fm = Storage.FileManager.FileManager(bufferPool=bp)
    >>> bp.setFileManager(fm)

    # Check initial buffer pool size
    >>> len(bp.pool.getbuffer()) == bp.poolSize
    True

    """

    defaultPoolSize = 128 * (1 << 20)

    def __init__(self, **kwargs):
        other = kwargs.get("other", None)
        if other:
            self.fromOther(other, **kwargs)

        else:
            self.pageSize = kwargs.get("pageSize", io.DEFAULT_BUFFER_SIZE)
            self.poolSize = kwargs.get("poolSize", BufferPool.defaultPoolSize)

            self.pool = io.BytesIO(b'\x00' * self.poolSize)
            self.pageMap = OrderedDict()
            self.freeList = list(range(0, self.poolSize, self.pageSize))
            self.freeListLen = len(self.freeList)

            self.fileMgr = None

    def fromOther(self, other):
        self.pageSize = other.pageSize
        self.poolSize = other.poolSize
        self.pool = other.pool
        self.pageMap = other.pageMap
        self.freeList = other.freeList
        self.freeListLen = other.freeListLen
        self.fileMgr = other.fileMgr

    def setFileManager(self, fileMgr):
        self.fileMgr = fileMgr

    # Basic statistics

    def numPages(self):
        return math.floor(self.poolSize / self.pageSize)

    def numFreePages(self):
        return self.freeListLen

    def size(self):
        return self.poolSize

    def freeSpace(self):
        return self.numFreePages() * self.pageSize

    def usedSpace(self):
        return self.size() - self.freeSpace()

    # Buffer pool operations

    def hasPage(self, pageId):
        return pageId in self.pageMap

    # Gets a page from the buffer pool if present, otherwise reads it from a heap file.
    # This method returns both the page, as well as a boolean to indicate whether
    # there was a cache hit.
    def getPageWithHit(self, pageId, pinned=False):
        if self.fileMgr:
            if self.hasPage(pageId):
                return (self.getCachedPage(pageId, pinned)[1], True)

            else:
                # Fetch the page from the file system, adding it to the buffer pool
                if not self.freeList:
                    self.evictPage()

                self.freeListLen -= 1
                offset = self.freeList.pop(0)
                pageBuffer = self.pool.getbuffer()[offset:offset +
                                                   self.pageSize]
                page = self.fileMgr.readPage(pageId, pageBuffer)

                self.pageMap[pageId] = (offset, page, 1 if pinned else 0)
                self.pageMap.move_to_end(pageId)
                return (page, False)

        else:
            raise ValueError("Uninitalized buffer pool, no file manager found")

    # Wrapper for getPageWithHit, returning only the page.
    def getPage(self, pageId, pinned=False):
        return self.getPageWithHit(pageId, pinned)[0]

    # Returns a triple of offset, page object, and pin count
    # for pages present in the buffer pool.
    def getCachedPage(self, pageId, pinned=False):
        if self.hasPage(pageId):
            if pinned:
                self.incrementPinCount(pageId, 1)
            return self.pageMap[pageId]
        else:
            return (None, None, None)

    # Pins a page.
    def pinPage(self, pageId):
        if self.hasPage(pageId):
            self.incrementPinCount(pageId, 1)

    # Unpins a page.
    def unpinPage(self, pageId):
        if self.hasPage(pageId):
            self.incrementPinCount(pageId, -1)

    # Returns the pin count for a page.
    def pagePinCount(self, pageId):
        if self.hasPage(pageId):
            return self.pageMap[pageId][2]

    # Update the pin counter for a cached page.
    def incrementPinCount(self, pageId, delta):
        (offset, page, pinCount) = self.pageMap[pageId]
        self.pageMap[pageId] = (offset, page, pinCount + delta)

    # Removes a page from the page map, returning it to the free
    # page list without flushing the page to the disk.
    def discardPage(self, pageId):
        if self.hasPage(pageId):
            (offset, _, pinCount) = self.pageMap[pageId]
            if pinCount == 0:
                self.freeList.append(offset)
                self.freeListLen += 1
                del self.pageMap[pageId]

    # Removes a page from the page map, returning it to the free
    # page list. This method also flushes the page to disk.
    def flushPage(self, pageId):
        if self.fileMgr:
            (offset, page, pinCount) = self.getCachedPage(pageId)
            if all(map(lambda x: x is not None, [offset, page, pinCount])):
                if pinCount == 0:
                    self.freeList.append(offset)
                    self.freeListLen += 1
                    del self.pageMap[pageId]

                if page.isDirty():
                    self.fileMgr.writePage(page)
        else:
            raise ValueError("Uninitalized buffer pool, no file manager found")

    # Evict using LRU policy, considering only unpinned pages.
    # We implement LRU through the use of an OrderedDict, and by moving pages
    # to the end of the ordering every time it is accessed through getPage()
    def evictPage(self):
        if self.pageMap:
            # Find an unpinned page to evict.
            pageToEvict = None
            for (pageId, (_, _, pinCount)) in self.pageMap.items():
                if pinCount == 0:
                    pageToEvict = pageId
                    break

            if pageToEvict:
                self.flushPage(pageToEvict)

            else:
                raise ValueError(
                    "Could not find a page to evict in the buffer pool")

    def clear(self):
        for (pageId, (offset, page, _)) in list(self.pageMap.items()):
            if page.isDirty():
                self.flushPage(pageId)
Exemple #52
0
# items.sort() # python3.x 版本items返回类型不是列表,不支持sort
items = sorted(items)

for key, value in items:
    print(key, value)  # print key,dict[key]

# 有序字典:Key会按照插入的顺序排列,不是Key本身排序
od = OrderedDict([('a', 1), ('c', 3), ('b', 2)])  #
print(od)

items = od.items()
items = sorted(items)
for key, value in items:
    print(key, value)  # print key,dict[key]

# 更新元素:不存在是新增
od.update({'a': '5'})
print(od)

# 移动到尾部
od.move_to_end('a')
print(od)

# 删除元素
od.pop('a')  # 指定元素
print(od)
od.popitem()  # 有序删除:右侧
print(od)
od.clear()  # 清空
print(od)
print(od)
od = OrderedDict(sorted(d.items(), key=lambda t: t[1]))
print(od)

od = OrderedDict(sorted(d.items(), key=lambda t: len(t[0])))
print(od)

# От аргумента Last зависит с конца или с начала будет взят элемент
print(f'Извлекли: {od.popitem(last=True)}, остались: {od}')

od.popitem(last=False)
print(od)
sep('*')
od.update({'grapes': 6, 'mango': 9})
# move_to_end для перемещения элемента в конец
od.move_to_end('banana')
print(od)

# Когда в цикле for ... in ключ k встречается в первый раз, то его еще нет в словаре d и запись d[k]
# создается автоматически с помощью функции default_factory, которая возвращает пустой список.

s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)]
d = defaultdict(list)

for k, v in s:
    d[k].append(v)

print(d)
sep('*')
# Установка функции int() в качестве функции default_factory, генерирующей значений по умолчанию,
# делает defaultdict() полезным для подсчета чего либо:
Exemple #54
0
class BufferPool:
    """
  A buffer pool implementation.

  Since the buffer pool is a cache, we do not provide any serialization methods.

  >>> schema = DBSchema('employee', [('id', 'int'), ('age', 'int')])
  >>> bp = BufferPool()
  >>> fm = Storage.FileManager.FileManager(bufferPool=bp)
  >>> bp.setFileManager(fm)

  # Check initial buffer pool size
  >>> len(bp.pool.getbuffer()) == bp.poolSize
  True

  """

    # Default to a 10 MB buffer pool.
    defaultPoolSize = 10 * (1 << 20)

    # Buffer pool constructor.
    #
    # REIMPLEMENT this as desired.
    #
    # Constructors keyword arguments, with defaults if not present:
    # pageSize       : the page size to be used with this buffer pool
    # poolSize       : the size of the buffer pool
    def __init__(self, **kwargs):
        self.pageSize = kwargs.get("pageSize", io.DEFAULT_BUFFER_SIZE)
        self.poolSize = kwargs.get("poolSize", BufferPool.defaultPoolSize)
        self.pool = io.BytesIO(b'\x00' * self.poolSize)

        ####################################################################################
        # DESIGN QUESTION: what other data structures do we need to keep in the buffer pool?
        # -Ans: 1. the dict for free frames start point offset <frame_offset, page>
        #       2. a <pageId, (frame_offset, page, pinCount)> hashtable for getting existing page
        #           It should pop out the page when the page is accessed and then put it at the end of dictionary
        self.freeList = list(range(0, self.poolSize, self.pageSize))
        self.pageList = OrderedDict()

    def setFileManager(self, fileMgr):
        self.fileMgr = fileMgr

    # Basic statistics

    def numPages(self):
        return math.floor(self.poolSize / self.pageSize)

    def numFreePages(self):
        return len(self.freeList)

    def size(self):
        return self.poolSize

    def freeSpace(self):
        return self.numFreePages() * self.pageSize

    def usedSpace(self):
        return self.size() - self.freeSpace()

    # Buffer pool operations

    def hasPage(self, pageId):
        return (pageId in self.pageList)

    def getPage(self, pageId):
        # if the page is already in the bufferpool, then fetch that page in the frame
        if self.hasPage(pageId):
            # increment the pinning count
            (frame_offset, page, pinCount) = self.pageList[pageId]
            self.pageList[pageId] = (frame_offset, page, pinCount + 1)
            # return the page back
            return self.pageList[pageId][1]
        else:
            # This means the page isn't in cache, then fetch it in file
            if len(self.freeList) == 0:
                self.evictPage()
            frame_offset = self.freeList.pop(0)
            page_buffer = self.pool.getbuffer()[frame_offset:frame_offset +
                                                self.pageSize]
            page = self.fileMgr.readPage(pageId, page_buffer)
            self.pageList[pageId] = (frame_offset, page, 0)
            self.pageList.move_to_end(pageId)
            return page

    # Removes a page from the page map, returning it to the free
    # page list without flushing the page to the disk.
    def discardPage(self, pageId):
        if self.hasPage(pageId):
            (frame_offset, page, pinCount) = self.pageList[pageId]
            self.freeList.append(frame_offset)
            del self.pageList[pageId]

    def flushPage(self, pageId):
        if self.hasPage(pageId):
            (frame_offset, page, pinCount) = self.pageList[pageId]
            self.freeList.append(frame_offset)
            del self.pageList[pageId]

            if page.isDirty():
                self.fileMgr.writePage(page)

    # Evict using LRU policy.
    # We implement LRU through the use of an OrderedDict, and by moving pages
    # to the end of the ordering every time it is accessed through getPage()
    def evictPage(self):
        # find the first page in pageList
        (pageId, (frame_offset, page, _)) = self.pageList.popitem()
        self.freeList.append(frame_offset)

        if page.isDirty():
            self.fileMgr.writePage(page)
        #self.flushPage(pageId)

    # Flushes all dirty pages
    def clear(self):
        items = list(self.pageList.items())
        for (pageId, (frame_offset, page, pinCount)) in items:
            if page.isDirty():
                self.flushPage(pageId)
Exemple #55
0
class DayEditor(editor.Editor):
    n_recent_buffers = 10  # How many recent buffers to store
    _t2t_highlighting = None
    _style_scheme = None

    def __init__(self, *args, **kwargs):
        editor.Editor.__init__(self, *args, **kwargs)
        self.day = None
        # Store buffers for recently edited days - these preserve undo history
        # and cursor position. Once a buffer drops out of this, it needs to be
        # recreated: at this point, the cursor and undo are lost.
        self.recent_buffers = OrderedDict()

    def _get_t2t_highlighting(self):
        if self._t2t_highlighting is None:
            # Load our own copy of t2t syntax highlighting
            lm = GtkSource.LanguageManager.get_default()
            search_path = lm.get_search_path()
            if filesystem.files_dir not in search_path:
                search_path.insert(0, filesystem.files_dir)
                lm.set_search_path(search_path)
            self._t2t_highlighting = lm.get_language("t2t")
        return self._t2t_highlighting

    def _get_style_scheme(self):
        if self._style_scheme is None:
            # Load our customised variant of the Tango scheme
            sm = GtkSource.StyleSchemeManager.get_default()
            if filesystem.files_dir not in sm.get_search_path():
                sm.prepend_search_path(filesystem.files_dir)
            self._style_scheme = sm.get_scheme("rednotebook")
        return self._style_scheme

    def _get_buffer(self, key, text):
        """Get an editing buffer for a given item

        If key is in our cache of recently used buffers, its buffer is retrieved
        and text is ignored. Otherwise, a new buffer is constructed with text.
        """
        if key in self.recent_buffers:
            self.recent_buffers.move_to_end(key)
            return self.recent_buffers[key]

        buf = self.recent_buffers[key] = GtkSource.Buffer.new()
        buf.set_style_scheme(self._get_style_scheme())
        buf.set_language(self._get_t2t_highlighting())
        buf.create_tag("highlighter", background="Yellow")
        buf.begin_not_undoable_action()
        buf.set_text(text)
        buf.end_not_undoable_action()

        if len(self.recent_buffers) > self.n_recent_buffers:
            self.recent_buffers.popitem(last=False)

        # Only one buffer is added at a time, so the 'if' above should always
        # keep us at most n_recent_buffers. If code is added to add to the list
        # elsewhere, it should check the maximum length as well.
        assert len(self.recent_buffers) <= self.n_recent_buffers

        return buf

    def _get_buffer_for_day(self, day):
        return self._get_buffer(day.date, day.text)

    def show_day(self, new_day):
        # Show new day
        self.day = new_day
        buf = self._get_buffer_for_day(new_day)
        self.replace_buffer(buf)
        self.day_text_view.grab_focus()

        if self.search_text:
            # If a search is currently made, scroll to the text and return.
            GObject.idle_add(self.scroll_to_text, self.search_text)
            GObject.idle_add(self.highlight, self.search_text)
            return

    def show_template(self, title, text):
        buf = self._get_buffer(("template", title), text)
        self.replace_buffer(buf)
        self.day_text_view.grab_focus()

    def clear_buffers(self):
        self.recent_buffers.clear()
class NamedElementOnion(Mapping):
    """
    Add layers to an onion-shaped structure. Optionally, inject to a specific layer.
    This structure is iterable, where the outermost layer is first, and innermost is last.
    """

    def __init__(self, init_elements, valid_element=callable):
        self._queue = OrderedDict()
        for element in reversed(init_elements):
            if valid_element(element):
                self.add(element)
            else:
                self.add(*element)

    def add(self, element, name=None):
        if name is None:
            name = element

        if name in self._queue:
            if name is element:
                raise ValueError("You can't add the same un-named instance twice")
            else:
                raise ValueError("You can't add the same name again, use replace instead")

        self._queue[name] = element

    def inject(self, element, name=None, layer=None):
        """
        Inject a named element to an arbitrary layer in the onion.

        The current implementation only supports insertion at the innermost layer,
        or at the outermost layer. Note that inserting to the outermost is equivalent
        to calling :meth:`add` .
        """
        if not is_integer(layer):
            raise TypeError("The layer for insertion must be an int.")
        elif layer != 0 and layer != len(self._queue):
            raise NotImplementedError(
                "You can only insert to the beginning or end of a %s, currently. "
                "You tried to insert to %d, but only 0 and %d are permitted. " % (
                    type(self),
                    layer,
                    len(self._queue),
                )
            )

        self.add(element, name=name)

        if layer == 0:
            if name is None:
                name = element
            self._queue.move_to_end(name, last=False)
        elif layer == len(self._queue):
            return
        else:
            raise AssertionError("Impossible to reach: earlier validation raises an error")

    def clear(self):
        self._queue.clear()

    def replace(self, old, new):
        if old not in self._queue:
            raise ValueError("You can't replace unless one already exists, use add instead")
        to_be_replaced = self._queue[old]
        if to_be_replaced is old:
            # re-insert with new name in old slot
            self._replace_with_new_name(old, new)
        else:
            self._queue[old] = new
        return to_be_replaced

    def remove(self, old):
        if old not in self._queue:
            raise ValueError("You can only remove something that has been added")
        del self._queue[old]

    def _replace_with_new_name(self, old, new):
        self._queue[new] = new
        found_old = False
        for key in list(self._queue.keys()):
            if not found_old:
                if key == old:
                    found_old = True
                continue
            elif key != new:
                self._queue.move_to_end(key)
        del self._queue[old]

    def __iter__(self):
        elements = self._queue.values()
        if not isinstance(elements, Sequence):
            elements = list(elements)
        return iter(reversed(elements))

    def __add__(self, other):
        if not isinstance(other, NamedElementOnion):
            raise NotImplementedError("You can only combine with another NamedElementOnion")
        combined = self._queue.copy()
        combined.update(other._queue)
        return NamedElementOnion(combined.items())

    def __contains__(self, element):
        return element in self._queue

    def __getitem__(self, element):
        return self._queue[element]

    def __len__(self):
        return len(self._queue)

    def __reversed__(self):
        elements = self._queue.values()
        if not isinstance(elements, Sequence):
            elements = list(elements)
        return iter(elements)
Exemple #57
0
 def settings_form_fields(self):
     if self.settings.connect_client_id and not self.settings.api_key:
         # Mollie Connect
         if self.settings.access_token:
             fields = [
                 ('connect_org_name',
                  forms.CharField(label=_('Mollie account'),
                                  disabled=True)),
                 ('connect_profile',
                  forms.ChoiceField(label=_('Website profile'),
                                    choices=self.settings.get(
                                        'connect_profiles', as_type=list)
                                    or [])),
                 ('endpoint',
                  forms.ChoiceField(
                      label=_('Endpoint'),
                      initial='live',
                      choices=(
                          ('live', pgettext('mollie', 'Live')),
                          ('test', pgettext('mollie', 'Testing')),
                      ),
                  )),
             ]
         else:
             return {}
     else:
         fields = [
             ('api_key',
              forms.CharField(
                  label=_('Secret key'),
                  validators=(MollieKeyValidator(['live_', 'test_']), ),
                  widget=SecretKeyInput(
                      secret_key=self.settings.api_key or ''),
                  required=not bool(self.settings.api_key),
              )),
         ]
     d = OrderedDict(fields + [
         ('method_creditcard',
          forms.BooleanField(
              label=_('Credit card'),
              required=False,
          )),
         ('method_bancontact',
          forms.BooleanField(
              label=_('Bancontact'),
              required=False,
          )),
         ('method_banktransfer',
          forms.BooleanField(
              label=_('Bank transfer'),
              required=False,
          )),
         ('method_belfius',
          forms.BooleanField(
              label=_('Belfius Pay Button'),
              required=False,
          )),
         ('method_bitcoin',
          forms.BooleanField(
              label=_('Bitcoin'),
              required=False,
          )),
         ('method_eps', forms.BooleanField(
             label=_('EPS'),
             required=False,
         )),
         ('method_giropay',
          forms.BooleanField(
              label=_('giropay'),
              required=False,
          )),
         ('method_ideal',
          forms.BooleanField(
              label=_('iDEAL'),
              required=False,
          )),
         ('method_inghomepay',
          forms.BooleanField(
              label=_('ING Home’Pay'),
              required=False,
          )),
         ('method_kbc',
          forms.BooleanField(
              label=_('KBC/CBC Payment Button'),
              required=False,
          )),
         ('method_paysafecard',
          forms.BooleanField(
              label=_('paysafecard'),
              required=False,
          )),
         ('method_sofort',
          forms.BooleanField(
              label=_('Sofort'),
              required=False,
          )),
     ] + list(super().settings_form_fields.items()))
     d.move_to_end('_enabled', last=False)
     return d
Exemple #58
0
class DnsCache(MutableMapping):
    def __init__(self, expire=90):
        self._expire = expire
        self._io_loop = IOLoop.current()
        self._cache = OrderedDict()
        self._cachedtime = CachedTime()

    def __new__(cls):
        return object.__new__(cls)

    def __getitem__(self, key):
        return self._cache[key]

    def __setitem__(self, key, value):
        deadline = self._cachedtime.unixtime + self._expire
        try:
            item = self._cache[key]
            item.deadline = deadline
            item.value = value
            item.times += 1
            self._cache.move_to_end(key)
        except KeyError:
            self._cache[key] = DnsItem(deadline, value)
            if len(self._cache) == 1:
                self._timeout = self._io_loop.add_timeout(deadline, self._cron)

    def __delitem__(self, key):
        del self._cache[key]
        if not self._cache and self._timeout is not None:
            self._io_loop.remove_timeout(self._timeout)
            self._timeout = None

    def _cron(self):
        if self._cache:
            now = time.time()
            while self._cache:
                it = iter(self._cache)
                key = next(it)
                item = self._cache[key]
                if item.deadline > now:
                    next_timeout = max(now + 1, item.deadline)
                    self._timeout = self._io_loop.add_timeout(
                        next_timeout, self._cron)
                    break
                else:
                    del self._cache[key]
            else:
                self._timeout = None

    def __iter__(self):
        return iter(self._cache)

    def __len__(self):
        return len(self._cache)

    def __contains__(self, key):
        return key in self._cache

    def keys(self):
        return self._cache.keys()

    def items(self):
        return self._cache.items()

    def values(self):
        return self._cache.values()

    def __repr__(self):
        return repr(self._cache)

    def __str__(self):
        return str(self._cache)
Exemple #59
0
print("+", breakfast_count + lunch_counter)
print("-", breakfast_count - lunch_counter)

# collections module, test ordereddict
quotes = {
    'Curly': 'Nyuk nyuk!',
    'Moe': 'A wise guy, huh?',
    'Larry': 'Ow!',
}
for q in quotes:
    print("dict", q)

from collections import OrderedDict
order_quotes = OrderedDict(quotes)
order_quotes.move_to_end("Curly")

for oq in order_quotes:
    print("order", oq)


# collections module, test deque
def palindrome(word):
    from collections import deque
    dq = deque(word)
    while len(dq) > 1:
        if dq.popleft() != dq.pop():
            return False
    return True

# Python 3.7+ dict reserve order of insertion !!!
dict1 = {1: 30, 2: 20, 3: 20}

# Sorting Dict
# desc by value and then desc by key
print(sorted(dict1.items(), key=lambda x: (-x[1], -x[0])))
# asc by key and then asc by value
print(sorted(dict1.items(), key=lambda x: (x[0], x[1])))

# OrderDict
from collections import OrderedDict

dict2 = OrderedDict(dict1.items())
print(dict2)

dict2.move_to_end(1)
print(dict2)

dict2.popitem(last=False)  #pop first item
print(dict2)

dict2.popitem(last=True)  #pop last item
print(dict2)