def handle_encoding(self): """ Decode the headers and wrap self.fh in a decoder such that it subsequently returns only text. """ # decode the headers using ascii ascii_headers = odict.OrderedDict(( key.decode('ascii', 'replace'), value.decode('ascii', 'replace'), ) for key, value in six.iteritems(self.headers)) enc_type = ascii_headers.get('ENCODING') if not enc_type: # no encoding specified, use the ascii-decoded headers self.headers = ascii_headers # decode the body as ascii as well self.fh = codecs.lookup('ascii').streamreader(self.fh) return if enc_type == "USASCII": cp = ascii_headers.get("CHARSET", "1252") encoding = "cp%s" % (cp, ) elif enc_type in ("UNICODE", "UTF-8"): encoding = "utf-8" codec = codecs.lookup(encoding) self.fh = codec.streamreader(self.fh) # Decode the headers using the encoding self.headers = odict.OrderedDict( (key.decode(encoding), value.decode(encoding)) for key, value in six.iteritems(self.headers))
def is_signature_valid(self): try: signature_params_to_sign = collections.OrderedDict() except StandardError: signature_params_to_sign = ordereddict.OrderedDict() if self.get_api_type() == self.API_VC: signature_params = ['uid', 'currency', 'type', 'ref'] elif self.get_api_type() == self.API_GOODS: signature_params = ['uid', 'goodsid', 'slength', 'speriod', 'type', 'ref'] else: signature_params = ['uid', 'goodsid', 'type', 'ref'] self.parameters['sign_version'] = self.SIGNATURE_VERSION_2 if not 'sign_version' in self.parameters or int(self.parameters['sign_version']) == self.SIGNATURE_VERSION_1: for field in signature_params: signature_params_to_sign[field] = self.parameters[field] if field in self.parameters else None self.parameters['sign_version'] = self.SIGNATURE_VERSION_1 else: signature_params_to_sign = self.parameters signature_calculated = self.calculate_signature( signature_params_to_sign, self.get_secret_key(), self.parameters['sign_version']) signature = self.parameters['sig'] if 'sig' in self.parameters else None return signature == signature_calculated
def flumotion_stats(request): ten_mins_ago = datetime.datetime.now() - datetime.timedelta(minutes=10) flumotion = models.Flumotion.objects.order_by( 'type', 'identifier', 'ip', '-lastseen').filter(lastseen__gte=ten_mins_ago) [(x.identifier, x.lastseen, x.type) for x in flumotion] types = set() keys = {} active_servers = ordereddict.OrderedDict() for server in flumotion: types.add(server.type) if server.type not in keys: keys[server.type] = set() key = '%s-%s' % (server.identifier, server.ip) # Format the data a bit nicer server.full_data = simplejson.loads(server.data) # Append to the list of components for k in server.full_data['current'].keys(): keys[server.type].add(k) if not key in active_servers: active_servers[key] = server else: # Append history newest = active_servers[key] for k in newest.full_data['current'].keys(): if k not in server.full_data['history']: continue active_servers[key].full_data['history'][k].append(( server.full_data['current'][k][0], -1, server.full_data['current'][k][-1], )) newest.full_data['history'][k] += server.full_data['history'][ k] # Filter the history filtered_history = [('', 0, '')] for history in reversed(newest.full_data['history'][k]): if filtered_history[-1][0] != history[0]: filtered_history.append(history) newest.full_data['history'][k] = list( reversed(filtered_history[1:])) for k in keys: keys[k] = list(sorted(keys[k])) return render(request, 'flumotion.html', locals(), content_type='text/html', context_instance=template.RequestContext(request))
def get_ordereddict(*args, **kwargs): """A fix for py26 not having ordereddict.""" try: return collections.OrderedDict(*args, **kwargs) except AttributeError: import ordereddict return ordereddict.OrderedDict(*args, **kwargs)
def __init__(self, item_depth=0, item_callback=lambda *args: True, xml_attribs=True, attr_prefix='@', cdata_key='#text', force_cdata=False, cdata_separator='', postprocessor=None, dict_constructor=ordereddict.OrderedDict, strip_whitespace=True, namespace_separator=':', namespaces=None, force_list=None, comment_key='#comment'): self.path = [] self.stack = [] self.data = [] self.item = None self.item_depth = item_depth self.xml_attribs = xml_attribs self.item_callback = item_callback self.attr_prefix = attr_prefix self.cdata_key = cdata_key self.force_cdata = force_cdata self.cdata_separator = cdata_separator self.postprocessor = postprocessor self.dict_constructor = dict_constructor self.strip_whitespace = strip_whitespace self.namespace_separator = namespace_separator self.namespaces = namespaces self.namespace_declarations = ordereddict.OrderedDict() self.force_list = force_list self.comment_key = comment_key
def __init__(self, priorities): try: self.queues = collections.OrderedDict() except AttributeError: self.queues = ordereddict.OrderedDict() for key in priorities: self.queues[key] = collections.deque() self.condition = threading.Condition()
def index(request, template="index"): groups = ordereddict.OrderedDict() for group in sorted(CONFIG.groups()): groups[group] = CONFIG.config(group) config = CONFIG['config'] default = CONFIG['default'] return render_to_response('%s.html' % template, locals())
def to_dict(self): """Return dict representation (serialize into dictionary) of the class. """ items = [(name, value) for name, value in inspect.getmembers( self, lambda o: not inspect.ismethod(o)) if not name.startswith("_")] return collections.OrderedDict(items)
def __init__(self, controller, health_map): super(HealthModel, self).__init__() health_dns = collections.OrderedDict() if health_map['dns']['is_alive']: health_dns['online'] = 'true' else: health_dns['online'] = 'false' health_dns['links'] = link.Model( u'{0}/health/dns/{1}'.format(controller.base_url, health_map['dns']['dns_name']), 'self') self['dns'] = {health_map['dns']['dns_name']: health_dns} health_storage = collections.OrderedDict() if health_map['storage']['is_alive']: health_storage['online'] = 'true' else: health_storage['online'] = 'false' health_storage['links'] = link.Model( u'{0}/health/storage/{1}'.format( controller.base_url, health_map['storage']['storage_name']), 'self') self['storage'] = { health_map['storage']['storage_name']: health_storage } self['providers'] = {} for provider in health_map['providers']: health_provider = collections.OrderedDict() if provider['is_alive']: health_provider['online'] = 'true' else: health_provider['online'] = 'false' health_provider['links'] = link.Model( u'{0}/health/provider/{1}'.format(controller.base_url, provider['provider_name']), 'self') self['providers'][provider['provider_name']] = health_provider
def __init__(self, ofx_data=None): super(OfxUtil, self).__init__('OFX') self.headers = odict.OrderedDict() self.xml = "" if ofx_data: if isinstance(ofx_data, six.string_types) and not ofx_data.lower().endswith('.ofx'): self.parse(ofx_data) else: self.parse(open(ofx_data).read() if isinstance(ofx_data, six.string_types) else ofx_data.read())
def monitor(request): groups = ordereddict.OrderedDict() for group in sorted(CONFIG.groups()): groups[group] = CONFIG.config(group) config = CONFIG['config'] default = CONFIG['default'] return render_to_response( 'monitor.html', dict(groups=groups, config=config, default=default))
def get_constraints(self, linear=None): """Returns an ordered dict of constraint objects. linear: obj Set to True or False to return linear or nonlinear constraints. Default is None, for all constraints.""" return ordereddict.OrderedDict( self._eq.get_eq_constraints(linear=linear).items() + self._ineq.get_ineq_constraints(linear=linear).items())
def __init__(self): self.stages = {'1': False, '2': False, '3': False, '4': False} self.stages = ordereddict.OrderedDict(sorted( self.stages.items())) #keep stages in order self.timeout = 0 #default no timeout #Parse user arguments self.parseArgs() #Populate student list if contained in log self.logger = logger.Logger(self.baseDir) self.studentList = self.logger.getStudents()
def test_update_provider_details(self, provider_details_json, mock_session, mock_execute): provider_details_dict = {} for k, v in provider_details_json.items(): provider_detail_dict = json.loads(v) provider_details_dict[k] = provider_details.ProviderDetail( provider_service_id=( provider_detail_dict["id"]), access_urls=provider_detail_dict["access_urls"], domains_certificate_status=provider_detail_dict.get( "domains_certificate_status", {})) # mock the response from cassandra mock_execute.execute.return_value = None # this is for update_provider_details unittest code coverage arg_provider_details_dict = {} for provider_name in provider_details_dict: the_provider_detail_dict = collections.OrderedDict() the_provider_detail_dict["id"] = ( provider_details_dict[provider_name].provider_service_id) the_provider_detail_dict["access_urls"] = ( provider_details_dict[provider_name].access_urls) the_provider_detail_dict["status"] = ( provider_details_dict[provider_name].status) the_provider_detail_dict["name"] = ( provider_details_dict[provider_name].name) the_provider_detail_dict["domains_certificate_status"] = ( provider_details_dict[provider_name]. domains_certificate_status.to_dict()) the_provider_detail_dict["error_info"] = ( provider_details_dict[provider_name].error_info) the_provider_detail_dict["error_message"] = ( provider_details_dict[provider_name].error_message) arg_provider_details_dict[provider_name] = json.dumps( the_provider_detail_dict) call_args = { 'project_id': self.project_id, 'service_id': self.service_id, 'provider_details': arg_provider_details_dict } # This is to verify mock has been called with the correct arguments def assert_mock_execute_args(*args): self.assertEqual(args[0].query_string, services.CQL_UPDATE_PROVIDER_DETAILS) self.assertEqual(args[1], call_args) mock_execute.execute.side_effect = assert_mock_execute_args self.sc.update_provider_details( self.project_id, self.service_id, provider_details_dict)
def to_dict(self): result = collections.OrderedDict() result["id"] = self.provider_service_id result["access_urls"] = self.access_urls result["status"] = self.status result["name"] = self.name result["domains_certificate_status"] = ( self.domains_certificate_status.to_dict()) result["error_info"] = self.error_info result["error_message"] = self.error_message result["error_class"] = self.error_class return result
def traffic_monitor_callback(p): global x global y if IP in p: datum = ordereddict.OrderedDict([ ('x', x), ('y', y), ('src', p[IP].src), ('dst', p[IP].dst), ('payload', (str(p[IP].payload)).encode('utf-8').strip()) ]) encoded_datum = gpudb.encode_datum(type_definition, datum) gpudb.do_add(set_id, encoded_datum) x += 1 y += 1
def get_references(self, name): """Return references to component `name` in preparation for subsequent :meth:`restore_references` call. name: string Name of component being removed. """ refs = ordereddict.OrderedDict() for oname, obj in self._objectives.items(): if name in obj.get_referenced_compnames(): refs[oname] = obj return refs
def get_references(self, name): """Return references to component `name` in preparation for subsequent :meth:`restore_references` call. name: string Name of component being removed. """ refs = ordereddict.OrderedDict() for pname, param in self._parameters.items(): if name in param.get_referenced_compnames(): refs[pname] = param return refs
def mimic(self, target): """Tries to mimic the target object's constraints. Target constraints that are incompatible with this object are ignored. """ old_cnst = self._constraints self._constraints = ordereddict.OrderedDict() try: for name, cnst in target.copy_constraints().items(): self.add_existing_constraint(cnst, name) except Exception: self._constraints = old_cnst raise
def __init__(self, enemy_template): self.name = '' self.et = enemy_template self.stats = ordereddict.OrderedDict() self.stats_list = [] self.skills = [] self.folk_spells = [] self.theism_spells = [] self.sorcery_spells = [] self.hit_locations = [] self.template = enemy_template.name self.attributes = {} self.combat_styles = []
def get_references(self, name): """Return references to component `name` in preparation for subsequent :meth:`restore_references` call. name: string Name of component being referenced. """ refs = ordereddict.OrderedDict() for cname, constraint in self._constraints.items(): if name in constraint.get_referenced_compnames(): refs[cname] = constraint return refs
def importGroups(fn): try: group_db=collections.OrderedDict() except Exception: try: import ordereddict group_db=ordereddict.OrderedDict() except Exception: group_db={} for line in open(fn,'rU').xreadlines(): data = ExpressionBuilder.cleanUpLine(line) sample_filename,group_number,group_name = string.split(data,'\t') try: group_db[group_name].append(sample_filename) except Exception: group_db[group_name] = [sample_filename] return group_db
def __init__(self, cache_path=None, download=True, cache_size=500, service="MicrosoftSat", tile_delay=0.3, debug=False, max_zoom=19, refresh_age=30 * 24 * 60 * 60): if cache_path is None: try: cache_path = os.path.join(os.environ['HOME'], '.tilecache') except Exception: if 'LOCALAPPDATA' in os.environ: cache_path = os.path.join(os.environ['LOCALAPPDATA'], '.tilecache') else: import tempfile cache_path = os.path.join(tempfile.gettempdir(), '.tilecache') if not os.path.exists(cache_path): mp_util.mkdir_p(cache_path) self.cache_path = cache_path self.max_zoom = max_zoom self.min_zoom = 1 self.download = download self.cache_size = cache_size self.tile_delay = tile_delay self.service = service self.debug = debug self.refresh_age = refresh_age if service not in TILE_SERVICES: raise TileException('unknown tile service %s' % service) # _download_pending is a dictionary of TileInfo objects self._download_pending = {} self._download_thread = None self._loading = mp_icon('loading.jpg') self._unavailable = mp_icon('unavailable.jpg') try: self._tile_cache = collections.OrderedDict() except AttributeError: # OrderedDicts in python 2.6 come from the ordereddict module # which is a 3rd party package, not in python2.6 distribution import ordereddict self._tile_cache = ordereddict.OrderedDict()
def decodeDataToExport(self, data, exportProperties): #This is my star code, enyewe I had done some nice stuff during my time. Five stars for this, hata mimi am impressed by myself #quickly get the available columns from this resultset data structure records = data['records'] if not records: raise CriticalError({'message': "Sorry, no data to export."}) if not exportProperties: raise CriticalError({ 'message': "Sorry, no export properties specified for export." }) result = [] exportProperties = json.loads(exportProperties, object_pairs_hook=ordereddict.OrderedDict ) #ast.literal_eval(exportProperties) exportColumns = exportProperties.keys( ) #this will hold the export columns as per the data names e.g gf_grant__grant_number for record in records: d = ordereddict.OrderedDict() for entityProperty in exportColumns: if entityProperty in record: if isinstance(record[entityProperty], Decimal): d[exportProperties[entityProperty]] = round( record[entityProperty], 4) elif isinstance(record[entityProperty], unicode): d[exportProperties[entityProperty]] = record[ entityProperty].encode('utf-8') else: d[exportProperties[entityProperty]] = record[ entityProperty] result.append(d) #now decode the field names aka headers headers = exportProperties.values() formatted_headers = [] for header in headers: h = header.replace("<br/>", " ") formatted_headers.append(h) return formatted_headers, result
def __init__(self, fh): """ fh should be a seekable file-like byte stream object """ self.headers = odict.OrderedDict() self.fh = fh if not is_iterable(self.fh): return if not hasattr(self.fh, "seek"): return # fh is not a file object, we're doomed. with save_pos(self.fh): self.read_headers() self.handle_encoding() self.replace_NONE_headers()
def parameter_string(self): param_hash = dict(self.query_dict.items() + (self.auth_dict.items() or {})) # Convert keys to lowercase strings params_dict = {} for key, value in param_hash.iteritems(): params_dict[key.lower()] = value # Exclude signature from signature generation! if 'auth_signature' in params_dict: del params_dict['auth_signature'] params_dict = ordereddict.OrderedDict(sorted(params_dict.items())) params_list = [] for key, value in params_dict.iteritems(): params_list.append(QueryEncoder.encode_param_without_escaping(key, value)) return '&'.join(params_list)
def clean(dieset): """ Cleans the given dieset combining simila components. eg. STR+D10+1d10 bcomes STR+2d10 """ dieset = dieset.replace('+-', '-') dieset = dieset.replace('-+', '-') dieset = dieset.upper() dieset = re.sub('D(\d)', r'd\1', dieset) # Change Upper case D to lower case if it's part of a die (not DEX) components = re.findall(r"[\+\-]?[\w']+", dieset) if components[0][0] not in '+-': # Prefix with plus if no prefix components[0] = '+' + components[0] new_components = ordereddict.OrderedDict() # Add the components to a dict, counting (dict value) each different component type static_int = 0 # Holds the cumulative value of static int components for comp in components: if 'd' in comp: # It's a die, not stat (STR, DEX...) amount, die = comp.split('d') if amount == '+' or amount == '-': # Change '+' to '+1', as in '+D6' > '+1D6' amount += '1' die = 'd'+die if die not in new_components: new_components[die] = 0 new_components[die] += int(amount) else: # Stat or a static number try: # A static number static_int += int(comp) except ValueError: # Stat, eg. STR, DEX amount = 1 if comp[0] == '+' else -1 comp = comp[1:] if comp not in new_components: new_components[comp] = 0 new_components[comp] += amount out = '' for comp, amount in new_components.items(): if amount == 0: continue if 'd' in comp: # It's a die amount = str(amount) if amount < 0 else '+' + str(amount) out += '%s%s' % (amount, comp) else: # It's a stat sign = '+' if amount > 0 else '-' out += (sign+comp)*abs(amount) if static_int: out += str(static_int) if static_int < 0 else '+' + str(static_int) if out[0] == '+': out = out[1:] return out
def Create_LHS_parameter_set(nsamples): from SALib.sample import latin_hypercube from SALib.util import scale_samples, read_param_file import random as rd # Set random seed (does not affect quasi-random Sobol sampling) seed = 1 np.random.seed(seed) rd.seed(seed) #Define parameters and ranges parameters = ordereddict.OrderedDict() parameters['log10m'] = [np.log10(0.001),np.log10(0.1)] parameters['lnTe'] = [np.log(np.exp(-8.0)/3600.0),np.log(np.exp(8.0)/3600.0)] parameters['log10soil'] = [np.log10(1.0),np.log10(2.00)] parameters['sdmax'] = [0.1,2.0] #dtopmodel #Make directory if os.path.exists('LHS') == False: os.mkdir('LHS') #Prepare file with parameter range fp = open('LHS/parameters.txt','w') vars = [] for var in parameters: vars.append(var) fp.write('%s %f %f\n' % (var,parameters[var][0],parameters[var][1])) fp.close() #Read the parameter range file and generate samples param_file = 'LHS/parameters.txt' pf = read_param_file(param_file) #Generate samples (choose method here) param_values = latin_hypercube.sample(nsamples, pf['num_vars']) #Samples are given in range [0, 1] by default. Rescale them to your parameter bounds. scale_samples(param_values, pf['bounds']) #Save parameters to file np.savetxt('LHS/LHS_sampling.txt', param_values, delimiter=' ',header=" ".join(vars)) return
def seasons(season_url=common.args.url): seasons = [] base_url = season_url season_dict = ordereddict.OrderedDict({}) if 'the-tonight-show' in season_url: seasons = add_show_thetonightshow(season_url) return seasons has_episodes = False video_url = season_url + '/video' episode_url = season_url for season_url in (video_url, episode_url): season_data = connection.getURL(season_url) season_menu = re.compile( '<div class="nbc_mpx_carousel.*?" id="(nbc_mpx_carousel_\d+)">\s*<h2.*?>(.*?)</h2>', re.DOTALL).findall(season_data) for season_id, season_title in season_menu: try: if 'ALSO' not in season_title: tag = re.compile(r'<.*?>') season_title = tag.sub('', season_title) season_title = re.sub(' +', ' ', season_title) season_title = season_title.strip().title() if not (season_title == 'Full Episodes' and has_episodes): season_node = season_id.split('_')[-1] if season_title not in season_dict.keys(): season_dict[season_title] = EPISODES % season_node if 'full episodes' == season_title.lower( ) or 'Season' in season_title: has_episodes = True except: pass if not has_episodes: episode_data = connection.getURL(base_url + '/episodes') episode_menu = re.compile( 'src="(.*?)".*?<a href="([^"]*?)" class="watch-now-onion-skin">.*?(\d+) min.*?Season (\d+).*?Episode \d+(\d{2}).*?Air date (\d{2}/\d{2}/\d{2}).*?<div class="episode-title dotdotdot"><a href=".*?">(.*?)</a></div>.*?<p>(.*?)</p>', re.DOTALL).findall(episode_data) if episode_menu: seasons.append(('Full Episodes', SITE, 'episodes', base_url + '/episodes', -1, -1)) for season_title in season_dict: season_url = season_dict[season_title] seasons.append((season_title, SITE, 'episodes', season_url, -1, -1)) return seasons
def update_provider_details(self, project_id, service_id, provider_details): """update_provider_details. :param project_id :param service_id :param provider_details """ provider_detail_dict = {} for provider_name in sorted(provider_details.keys()): the_provider_detail_dict = collections.OrderedDict() the_provider_detail_dict["id"] = ( provider_details[provider_name].provider_service_id) the_provider_detail_dict["access_urls"] = ( provider_details[provider_name].access_urls) the_provider_detail_dict["status"] = ( provider_details[provider_name].status) the_provider_detail_dict["name"] = ( provider_details[provider_name].name) the_provider_detail_dict["domains_certificate_status"] = ( provider_details[provider_name].domains_certificate_status. to_dict()) the_provider_detail_dict["error_info"] = ( provider_details[provider_name].error_info) the_provider_detail_dict["error_message"] = ( provider_details[provider_name].error_message) provider_detail_dict[provider_name] = json.dumps( the_provider_detail_dict) args = { 'project_id': project_id, 'service_id': uuid.UUID(str(service_id)), 'provider_details': provider_detail_dict } # TODO(tonytan4ever): Not sure this returns a list or a single # dictionary. # Needs to verify after cassandra unittest framework has been added in # if a list, the return the first item of a list. if it is a dictionary # returns the dictionary stmt = query.SimpleStatement( CQL_UPDATE_PROVIDER_DETAILS, consistency_level=self._driver.consistency_level) self.session.execute(stmt, args)