def updateReport(_id): data = request.form data = AttrDict(data) for key in data.keys(): print("key", key, "value", data[key] ) _id = db.conn.update("report", d=data, primary_key="_id") return jsonify( id=_id )
class VLANS: ''' Manage a bunch of VLANs, as a dictionary ''' def __init__(self, vlans=None, delemiter=",", range_delemiter="-"): super().__init__() self._delemiter = delemiter self._range_delemiter = range_delemiter self._vlans = AttrDict() if vlans: self.__iadd__(vlans) def __add__(self, other): """ Add two VLANS to each other """ if not isinstance(other, VLANS): raise TypeError("Error: Can only handle object of VLANS()") tmp = self.copy() for vlan in other._vlans.values(): tmp._vlans[vlan.id] = vlan return tmp def __iadd__(self, other): if isinstance(other, VLANS): for vlan in other._vlans.values(): self._vlans[vlan.id] = vlan elif isinstance(other, VLAN): self._vlans[other.id] = other else: raise TypeError("Error: Can only handle object of VLANS() or VLAN() got %s" % type(other)) return self def __str__(self): return dict_to_vlan_str(self._vlans, delemiter=self._delemiter, range_delemiter=self._range_delemiter) def __repr__(self): s = "" for vlan in self._vlans.values(): s += "(%s)" % vlan.to_str() return "VLANS(%s)" % s def __iter__(self): return iter(self.__dict__) def items(self): for item in self._vlans.items(): yield item def keys(self): for item in self._vlans.keys(): yield item def values(self): for item in self._vlans.values(): yield item
def _do_sync(self): """ Runs as a separate thread """ # Get list of all activities on server try: srv_activities, tmp = network.request("GET", "%s/api/activity" % sett.server_url, decode=True) except network.NetworkException as err: log.error("Cannot load list of activities from server %s" % err) return # print("srvactivities", srvactivities) for srv_activity in srv_activities: srv_activity = AttrDict(srv_activity) log.debug("Server activity %s" % srv_activity) sql = "SELECT * FROM activity WHERE server_id=?" local_activity = self.localdb.select_one(sql, (srv_activity["_id"],) ) if local_activity: # we have the activity locally, check if changed changes = [] for attr in ['name', "description", "active"]: if getattr(local_activity, attr) != getattr(srv_activity, attr): changes.append(attr) if len(changes): log.debugf(DEBUG_ACTIVITYMGR, "Updating local copy of activity, changed columns %s, %s" % (changes, str(srv_activity).replace("\n", " "))) local_activity.name = srv_activity["name"] local_activity.server_id = srv_activity["_id"] local_activity.active = srv_activity["active"] try: self.localdb.update("activity", d=local_activity, primary_key="_id") except db.DbException as err: log.error("Cannot update local activity %s" % err) return else: # new activity log.debugf(DEBUG_ACTIVITYMGR, "New activity '%s' on server, saving in local database" % srv_activity.name) srv_activity.server_id = srv_activity._id srv_activity._id = -1 try: self.localdb.insert("activity", d=srv_activity, primary_key="_id") except db.DbException as err: log.error("Cannot save new activity in local database %s" % err) return self._loadList() self.sig.emit()
def __init__(self, vlans=None, delemiter=",", range_delemiter="-"): super().__init__() self._delemiter = delemiter self._range_delemiter = range_delemiter self._vlans = AttrDict() if vlans: self.__iadd__(vlans)
def media_timestamps(self, game_id, media_id): j = self.schedule(game_id=game_id) try: milestones = j["dates"][0]["games"][0]["content"]["media"]["milestones"] except: return AttrDict() start_timestamps = [] start_time = next( m["timeAbsolute"] for m in milestones["items"] if m["type"] == "BROADCAST_START" ) start_timestamps.append( ("S", start_time) ) start_offset = next( m["timeOffset"] for m in milestones["items"] if m["type"] == "BROADCAST_START" ) start_timestamps.append( ("SO", int(start_offset)) ) timestamps = AttrDict(start_timestamps) timestamps.update(AttrDict([ (m["period"] if int(m["period"]) <= 3 else "O", int(m["timeOffset"])) for m in milestones["items"] if m["type"] == "PERIOD_START" ])) # raise Exception(timestamps) return timestamps
def parse_command_line(commands): ''' Parse command line arguments. For example: gramex cmd1 cmd2 --a=1 2 -b x --c --p.q=4 returns: {"_": ["cmd1", "cmd2"], "a": [1, 2], "b": "x", "c": True, "p": {"q": [4]}} Values are parsed as YAML. Arguments with '.' are split into subgroups. For example, ``gramex --listen.port 80`` returns ``{"listen": {"port": 80}}``. ''' group = '_' args = AttrDict({group: []}) for arg in commands: if arg.startswith('-'): group, value = arg.lstrip('-'), 'True' if '=' in group: group, value = group.split('=', 1) else: value = arg value = yaml.safe_load(value) base = args keys = group.split('.') for key in keys[:-1]: base = base.setdefault(key, AttrDict()) # Add the key to the base. # If it's already there, make it a list. # If it's already a list, append to it. if keys[-1] not in base or base[keys[-1]] is True: base[keys[-1]] = value elif not isinstance(base[keys[-1]], list): base[keys[-1]] = [base[keys[-1]], value] else: base[keys[-1]].append(value) return args
def get(self): limit = int(self.getq('limit', [100])[0]) if isinstance(self.query, dict): # Bind all queries and run them in parallel args = { key: self.getq(key, [''])[0] for name, query in self.query.items() for key, _bindparams in query._bindparams.items() } stmts = AttrDict([ (key, q.bindparams( **{k: v for k, v in args.items() if k in q._bindparams})) for key, q in self.query.items() ]) if self.thread: futures = AttrDict([ (key, gramex.service.threadpool.submit(self.run, stmt, limit)) for key, stmt in stmts.items() ]) self.result = AttrDict() for key, future in futures.items(): self.result[key] = yield future else: self.result = AttrDict([(key, self.run(stmt, limit)) for key, stmt in stmts.items()]) self.renderdatas() else: # Bind query and run it args = { key: self.getq(key, [''])[0] for key in self.query._bindparams } stmt = self.query.bindparams( ** {k: v for k, v in args.items() if k in self.query._bindparams}) if self.thread: self.result = yield gramex.service.threadpool.submit( self.run, stmt, limit) else: self.result = self.run(stmt, limit) self.renderdata()
def test_init(self): self.check('/init/new', code=404) gramex.init(app=AttrDict(url=AttrDict( init_new=AttrDict(pattern='/init/new', handler='FunctionHandler', kwargs=AttrDict( function='json.dumps({"key": "val1"})'))))) self.check('/init/new', text='{"key": "val1"}') gramex.init(app=AttrDict(url=AttrDict( init_new=AttrDict(pattern='/init/new', handler='FunctionHandler', kwargs=AttrDict( function='json.dumps({"key": "val2"})'))))) self.check('/init/new', text='{"key": "val2"}')
def setup_auth(cls, auth): # auth: if there's no auth: in handler, default to app.auth if auth is None: auth = conf.app.get('auth') # Treat True as an empty dict, i.e. auth: {} if auth is True: auth = AttrDict() # Set up the auth if isinstance(auth, dict): cls._login_url = auth.get('login_url', None) cls._on_init_methods.append(cls.authorize) cls.permissions = [] # Add check for condition if auth.get('condition'): cls.permissions.append( build_transform(auth['condition'], vars=AttrDict(handler=None), filename='url:%s.auth.permission' % cls.name)) # Add check for membership memberships = auth.get('membership', []) if not isinstance(memberships, list): memberships = [memberships] if len(memberships): cls.permissions.append(check_membership(memberships)) elif auth: app_log.error('url:%s.auth is not a dict', cls.name)
def test_fn_args_var(self): def transform(x=1, y=2): result = max(x, y, 3) return result if isinstance(result, GeneratorType) else [ result, ] vars = AttrDict([('x', 1), ('y', 2)]) self.check_transform(transform, ''' function: max args: - =x - =y - 3 ''', vars=vars) self.check_transform(transform, 'function: max(x, y, 3)', vars=vars) def transform(x=1, y=2): result = x return result if isinstance(result, GeneratorType) else [ result, ] self.check_transform(transform, 'function: x', vars=vars) def transform(x=1, y=2): result = x.real return result if isinstance(result, GeneratorType) else [ result, ] self.check_transform(transform, 'function: x.real', vars=vars) def transform(x=1, y=2): result = x.conjugate() return result if isinstance(result, GeneratorType) else [ result, ] self.check_transform(transform, 'function: x.conjugate()', vars=vars) def transform(x=1, y=2): result = x.to_bytes(2, 'big') return result if isinstance(result, GeneratorType) else [ result, ] self.check_transform(transform, 'function: x.to_bytes(2, "big")', vars=vars)
def apply_commands(rule: Dict[str, dict], shapes, data: dict): ''' Apply commands in rule to change shapes using data. :arg dict rule: a dict of shape names, and commands to apply on each. e.g. ``{"Oval 1": {"fill": "red"}, "Oval 2": {"text": "OK"}}`` :arg Shapes shapes: a slide.shapes or group.shapes object on which the rule should be applied :arg dict data: data context for the commands in the rule ''' # Apply every rule to every pattern -- as long as the rule key matches the shape name for pattern, spec in rule.items(): if pattern in rule_cmdlist: continue shape_matched = False for shape in shapes: if not fnmatchcase(shape.name, pattern): continue shape_matched = True # Clone all slides into the `clones` list BEFORE applying any command. Ensures that # commands applied to the shape don't propagate into its clones clones = [] clone_seq = iterate_on(spec.get('clone-shape', [None]), data) parent_clone = data.get('clone', None) for i, (clone_key, clone_val) in enumerate(clone_seq): if i > 0: # This copies only a shape, group or image. Not table, chart, media, equation, # or zoom. But we don't see a need for these yet. el = copy.deepcopy(shape.element) shape.element.addnext(el) shape = pptx.shapes.autoshape.Shape(el, None) clones.append(AttrDict(pos=i, key=clone_key, val=clone_val, shape=shape, parent=parent_clone)) # Run commands in the spec on all cloned shapes is_group = shape.element.tag.endswith('}grpSp') for i, clone in enumerate(clones): # Include shape-level `data:`. Add shape, clone as variables shape_data = load_data( spec.get('data', {}), _default_key='function', shape=shape, clone=clone, **{k: v for k, v in data.items() if k not in {'shape', 'clone'}}) for cmd in spec: if cmd in commands.cmdlist: commands.cmdlist[cmd](clone.shape, spec[cmd], shape_data) # Warn on unknown commands. But don't warn on groups -- they have sub-shapes elif cmd not in special_cmdlist and not is_group: app_log.warn('pptgen2: Unknown command: %s on shape: %s', cmd, pattern) # If the shape is a group, apply spec to each sub-shape if is_group: apply_commands(spec, SlideShapes(clone.shape.element, shapes), shape_data) # Warn if the pattern is neither a shape nor a command if (not shape_matched and pattern not in special_cmdlist and pattern not in commands.cmdlist): app_log.warn('pptgen2: No shape matches pattern: %s', pattern)
def setUp(self): self.data = AttrDict([('Adipisci eius dolore consectetur.', 34), ('Aliquam consectetur velit dolore', 19), ('Amet ipsum quaerat numquam.', 25), ('Amet quisquam labore dolore.', 30), ('Amet velit consectetur.', 20), ('Consectetur consectetur aliquam voluptatem', 23), ('Consectetur ipsum aliquam.', 28), ('Consectetur sit neque est', 15), ('Dolore voluptatem etincidunt sit', 40), ('Dolorem porro tempora tempora.', 37), ('Eius numquam dolor ipsum', 26), ('Eius tempora etincidunt est', 12), ('Est adipisci numquam adipisci', 7), ('Est aliquam dolor.', 38), ('Etincidunt amet quisquam.', 33), ('Etincidunt consectetur velit.', 29), ('Etincidunt dolore eius.', 45), ('Etincidunt non amet.', 14), ('Etincidunt velit adipisci labore', 6), ('Ipsum magnam velit quiquia', 21), ('Ipsum modi eius.', 3), ('Labore voluptatem quiquia aliquam', 18), ('Magnam etincidunt porro magnam', 39), ('Magnam numquam amet.', 44), ('Magnam quisquam sit amet.', 27), ('Magnam voluptatem ipsum neque', 32), ('Modi est ipsum adipisci', 2), ('Neque eius voluptatem voluptatem', 42), ('Neque quisquam ipsum.', 10), ('Neque quisquam neque.', 48), ('Non dolore voluptatem.', 41), ('Non numquam consectetur voluptatem.', 35), ('Numquam eius dolorem.', 43), ('Numquam sed neque modi', 9), ('Porro voluptatem quaerat voluptatem', 11), ('Quaerat eius quiquia.', 17), ('Quiquia aliquam etincidunt consectetur.', 0), ('Quiquia ipsum sit.', 49), ('Quiquia non dolore quiquia', 8), ('Quisquam aliquam numquam dolore.', 1), ('Quisquam dolorem voluptatem adipisci.', 22), ('Sed magnam dolorem quisquam', 4), ('Sed tempora modi est.', 16), ('Sit aliquam dolorem.', 46), ('Sit modi dolor.', 31), ('Sit quiquia quiquia non.', 5), ('Sit quisquam numquam quaerat.', 36), ('Tempora etincidunt quiquia dolor', 13), ('Tempora velit etincidunt.', 24), ('Velit dolor velit.', 47)])
def get_highlight_attrs(self, highlight): timestamp = None running_time = None event_type = None inning = None plays = self.plays keywords = highlight.get("keywordsAll", None) game_start = dateutil.parser.parse(self.game_data["gameDate"]) guid = highlight.get("guid") try: play, event = next((p, pe) for p in plays for pe in p["playEvents"] if guid and pe.get("playId", None) == guid) except StopIteration: play = None event = None if play: event_type = play["result"].get("event", None) timestamp = dateutil.parser.parse(play["about"].get( "startTime", None)).astimezone( pytz.timezone(config.settings.profile.time_zone)) running_time = timestamp - game_start inning = f"{play['about']['halfInning'][:3].title()} {play['about']['inning']}" if not event_type: if any((k["type"] == "mlbtax" and k["displayName"] == "Interview" for k in keywords)): event_type = "Interview" elif any((k["type"] == "mlbtax" and k["displayName"] == "Managers" for k in keywords)): event_type = "Postgame" elif any((k["type"] == "mlbtax" and k["displayName"] == "Managers" for k in keywords)): event_type = "News Conference" else: event_type = "Other" return AttrDict( timestamp=timestamp, running_time=running_time, event_type=event_type, inning=inning # top_play = top_play, # description = play["result"].get("description", None), )
def test_download_html(self): # Note: In Python 2, pd.read_html returns .columns.inferred_type=mixed # instead of unicde. So check column type only in PY3 not PY2 out = gramex.data.download(self.dummy, format='html') result = pd.read_html(io.BytesIO(out), encoding='utf-8')[0] afe(result, self.dummy, check_column_type=six.PY3) out = gramex.data.download(AttrDict([('dummy', self.dummy), ('sales', self.sales)]), format='html') result = pd.read_html(io.BytesIO(out), encoding='utf-8') afe(result[0], self.dummy, check_column_type=six.PY3) afe(result[1], self.sales, check_column_type=six.PY3)
def teams(self, sport_code="mlb", season=None): teams_url = ("https://statsapi.web.nhl.com/api/v1/teams" "?{season}".format(season=season if season else "")) # raise Exception(self.session.get(teams_url).json()) with state.session.cache_responses_long(): teams = AttrDict( (team["abbreviation"].lower(), team["id"]) for team in sorted(self.session.get(teams_url).json()["teams"], key=lambda t: t["abbreviation"])) return teams
def ATTRIBUTES(self): return AttrDict( media_item_id={"hide": True}, feed={ "width": 32, "format_fn": lambda f: f.name if hasattr(f, "name") else "none" }, created={"width": 19}, title={ "width": ("weight", 1), "format_fn": utils.strip_emoji }, )
def crud(handler): method = handler.path_args[0] if method == 'post': conf = handler.get_argument('data', {}) conf = json.loads(conf, object_pairs_hook=AttrDict) paths = deepcopy(gramex.paths) gramex.paths = AttrDict() gramex.init(new=conf) gramex.paths = paths elif method == 'init': gramex.init() # TODO: generated keys cannot be deleted from gramex.conf return gramex.conf
def test_fn_args_var(self): def transform(x=1, y=2): result = max(x, y, 3) return result if isinstance(result, GeneratorType) else [result, ] vars = AttrDict([('x', 1), ('y', 2)]) self.check_transform(transform, ''' function: max args: - =x - =y - 3 ''', vars=vars) self.check_transform(transform, 'function: max(x, y, 3)', vars=vars)
def get_stream(self, media): headers={ "Authorization": self.access_token, "User-agent": session.USER_AGENT, "Accept": "application/vnd.media-service+json; version=1", "x-bamsdk-version": self.BAM_SDK_VERSION, "x-bamsdk-platform": self.PLATFORM, "origin": "https://www.mlb.com" } stream_url = self.STREAM_URL_TEMPLATE.format(media_id=media.media_id) logger.debug("getting stream %s" %(stream_url)) stream = self.get( stream_url, headers=headers ).json() logger.debug("stream response: %s" %(stream)) if "errors" in stream and len(stream["errors"]): raise SGStreamNotFound(stream["errors"]) stream = AttrDict(stream) stream.url = stream["stream"]["complete"] return stream
def __init__(self, config_file=None): self.log = logger.getlogger() try: self.cfg = Config(config_file) self.inv = Inventory(None, config_file) except UserException as exc: self.log.critical(exc) raise UserException(exc) # initialize ipmi list of access info self.ran_ipmi = False self.bmc_ai = {} vlan_ipmi = self.cfg.get_depl_netw_client_vlan(if_type='ipmi')[0] vlan_pxe = self.cfg.get_depl_netw_client_vlan(if_type='pxe')[0] self.dhcp_pxe_leases_file = GEN_PATH + \ 'logs/dnsmasq{}.leases'.format(vlan_pxe) self.dhcp_ipmi_leases_file = GEN_PATH + \ 'logs/dnsmasq{}.leases'.format(vlan_ipmi) self.tcp_dump_file = GEN_PATH + \ 'logs/tcpdump{}.out'.format(vlan_pxe) self.node_table_ipmi = AttrDict() self.node_table_pxe = AttrDict() self.node_list = []
def __init__(self, provider_id, proxies=None, *args, **kwargs): self.provider_id = provider_id self.session = self.SESSION_CLASS() self.cookies = LWPCookieJar() if not os.path.exists(self.COOKIES_FILE): self.cookies.save(self.COOKIES_FILE) self.cookies.load(self.COOKIES_FILE, ignore_discard=True) self.session.headers = self.HEADERS self._state = AttrDict([("proxies", proxies)]) if proxies: self.proxies = proxies self._cache_responses = False
def get_highlight_attrs(self, highlight): timestamp = None running_time = None event_type = None period = None period_time = None period_remaining = None strength = None plays = self.plays keywords = highlight.get("keywords", None) game_start = dateutil.parser.parse(self.game_data["gameDate"]) try: play_id = int( next(k["value"] for k in keywords if k["type"] == "statsEventId")) except StopIteration: play_id = None try: play = next(p for p in plays if p["about"].get("eventId", None) == play_id) except StopIteration: play = None if play: event_type = play["result"].get("event", None) timestamp = dateutil.parser.parse(play["about"].get( "dateTime", None)).astimezone( pytz.timezone(config.settings.profile.time_zone)) running_time = timestamp - game_start period = play["about"]["ordinalNum"] period_time = play["about"]["periodTime"] period_remaining = play["about"]["periodTimeRemaining"] strength = play["result"].get("strength", {}).get("name", None) return AttrDict( timestamp=timestamp, running_time=running_time, # description = play["result"].get("description", None), event_type=event_type, period=period, period_time=period_time, period_remaining=period_remaining, strength=strength, )
def test_import_levels(self): def transform(_val): import six result = six.text_type(_val) return result if isinstance(result, GeneratorType) else [ result, ] fn = self.check_transform(transform, 'function: six.text_type') eq_(fn(b'abc'), [six.text_type(b'abc')]) def transform(content): import six result = six.text_type.__add__(content, '123') return result if isinstance(result, GeneratorType) else [ result, ] fn = self.check_transform(transform, ''' function: six.text_type.__add__ args: [=content, '123'] ''', vars=AttrDict(content=None)) eq_(fn('abc'), ['abc123']) def transform(handler): result = str.endswith(handler.current_user.user, 'ta') return result if isinstance(result, GeneratorType) else [ result, ] fn = self.check_transform(transform, ''' function: str.endswith args: [=handler.current_user.user, 'ta'] ''', vars=AttrDict(handler=None))
def parse_spec(self, spec): (identifier, options) = MEDIA_SPEC_RE.search(spec).groups() try: selection, filters, identifier_options = self.parse_identifier( identifier) self.apply_identifier(selection, filters, identifier_options) except SGIncompleteIdentifier: selection, identifier_options = None, {} options = AttrDict(identifier_options, **self.parse_options(options)) self.apply_options(options) return (selection, options)
def init_config(self): super().init_config() attrs = list(self.ATTRIBUTES.items()) idx, attr = next( (i, a ) for i, a in enumerate(attrs) if a[0] == "title") self.ATTRIBUTES = AttrDict( attrs[:idx] + [ ("duration", { "width": 8, "align": "right", }) ] + attrs[idx:] )
def setup_transform(cls, transform): cls.transform = {} for pattern, trans in transform.items(): cls.transform[pattern] = { 'function': build_transform(trans, vars=AttrDict( (('content', None), ('handler', None))), filename='url:%s' % cls.name), 'headers': trans.get('headers', {}), 'encoding': trans.get('encoding'), }
def test_run(self): # When you call gramex run run-app --dir=dir1 --browser=False, ensure # that gramex.init() is run from dir1 and is passed --browser=False. # We do that by mocking gramex.init() with check_init() result = AttrDict() def check_init(**kwargs): result.cwd = os.getcwd() result.opts = kwargs.get('cmd', {}).get('app', {}) install(['run-app', self.zip_url], AttrDict()) with MockGramex(check_init): run(['run-app'], AttrDict(dir='dir1', browser=False)) self.assertEqual(result.cwd, self.appdir('run-app/dir1/')) self.assertEqual(result.opts.get('browser'), False) self.check_uninstall('run-app') # Run with --target with MockGramex(check_init): run(['run-app-target'], AttrDict(target='.', browser=True)) self.assertEqual(result.cwd, os.getcwd()) self.assertEqual(result.opts.get('browser'), True) self.check_uninstall('run-app-target', exist_check=False)
def __pos__(self): '''+config returns layers merged in order, removing null keys''' conf = AttrDict() for name, config in self.items(): if hasattr(config, '__pos__'): config.__pos__() merge(old=conf, new=config, mode='overwrite') # Remove keys where the value is None for key, value, node in list(walk(conf)): if value is None: del node[key] return conf
class MLBDetailBox(BAMDetailBox): HIGHLIGHT_TABLE_CLASS = MLBHighlightsDataTable EVENT_TYPES = AttrDict(hitting="H", pitching="P", defense="F", baserunning="R") def get_editorial_item(self, editorial): return editorial.get("mlb", None) def __repr__(self): return ""
def update_mgmt_switch_info(self): """Update management switch model information and assign class.""" self.enable_remote = self.ENABLE_REMOTE_CONFIG_MGMT self.info_list = [] self.class_list = [] for switch in self.inv.yield_switches(self.inv.SwitchType.MGMT): self.info_dict = AttrDict() self.class_dict = AttrDict() self.ipv4 = switch.ip_addr self.userid = switch.userid self.password = switch.password switch_valid = False output = self._send_cmd(self.SHOW_VERSION_MTM, 'Query MTM', False) switch_valid |= self._set_switch_info_class( r'\s+(\S+)\(config\)#', self.MODEL, output, self.supported_mgmt_switches) switch_valid |= self._set_switch_info_class( r'%s:\s+(\S+)\s+' % self.MTM_VALUE, self.MTM_VALUE, output, self.supported_mgmt_switches) if not switch_valid: if self.info_list: self.log.error('Unsupported management switch: %s' % self.info_dict) else: self.log.error('Management switch could not be identified') sys.exit(1) if self.info_list: self.inv.update_switch_model_info(self.inv.SwitchType.MGMT, self.info_list) self.inv.update_switch_class(self.inv.SwitchType.MGMT, self.class_list)
def get(self, path=None): path = self.kwargs.get('path', path) if not path and self.request.method == 'GET': yield self.login() raise tornado.gen.Return() args = {key: val[0] for key, val in self.args.items()} params = AttrDict(self.kwargs) params['access_key'] = self.get_token('access_key', self.get_from_token) params['access_secret'] = self.get_token('access_secret', self.get_from_token) client = oauth1.Client( client_key=params['key'], client_secret=params['secret'], resource_owner_key=params['access_key'], resource_owner_secret=params['access_secret']) endpoint = params.get('endpoint', 'https://api.twitter.com/1.1/') path = params.get('path', path) uri, headers, body = client.sign(url_concat(endpoint + path, args)) http = self.get_auth_http_client() response = yield http.fetch(uri, headers=headers, raise_error=False) result = yield self.social_response(response) self.set_header('Content-Type', 'application/json; charset=UTF-8') self.write(result)
def update_inning_dropdown(self, media_id): # raise Exception(media_id) self.timestamps = state.session.media_timestamps( self.game_id, media_id) del self.timestamps["S"] timestamp_map = AttrDict((k if k[0] in "TB" else "Start", k) for k in self.timestamps.keys()) timestamp_map["Live"] = False self.inning_dropdown = Dropdown( timestamp_map, label="Begin playback", default=(timestamp_map["Start"] if (not self.live_stream or self.from_beginning) else timestamp_map["Live"])) self.inning_dropdown_placeholder.original_widget = self.inning_dropdown
def metadata(source, tables=None, root=None, merge=True, **kwargs): ''' Return the metadata for the selected source as a Meta. ''' if root is None: root = os.path.join(DATA_DIR, '.metadata') if not os.path.exists(root): os.makedirs(root) # Extract base metadata along with commands to extract the data tree = Meta(source=source) scheme = urlparse(source).scheme if os.path.exists(source) or scheme in {'file'}: tree.update(metadata_file(source, root, tables)) elif scheme in {'http', 'https', 'ftp'}: target = filename(source, root) fetch(source, target) tree.update(metadata_file(target, root, tables)) else: tree.format = 'sql' tree.update(metadata_sql(source, tables)) # Extract sub-datasets dataset_list = list(datasets(tree)) for node in tqdm(dataset_list, disable=kwargs.get('tqdm_disable')): cmd = node.get('command', [None]) if cmd[0] in _preview_command: try: data = _preview_command[cmd[0]](*cmd[1:]) node.update(metadata_frame(data, **kwargs)) except Exception as e: node['error'] = str(e) logging.exception('Unable to load %s', ':'.join(cmd[1:])) # Merge column metadata of common datasets if merge: for node in datasets(tree): if 'datasets' in node: sign_lookup = {} for data in node.datasets.values(): if 'columns' in data: sign = tuple(col.name for col in data.columns.values()) if sign in sign_lookup: data.columns = AttrDict(see=sign_lookup[sign].name) else: sign_lookup[sign] = data return tree
def get_app_config(appname, args): ''' Get the stored configuration for appname, and override it with args. ``.target`` defaults to $GRAMEXDATA/apps/<appname>. ''' apps_config['cmd'] = {appname: args} app_config = AttrDict((+apps_config).get(appname, {})) app_config.setdefault('target', str(app_dir / app_config.get('target', appname))) app_config.target = os.path.abspath(app_config.target) return app_config
def table_crud_api(table): try: table_def = table_defs[table] except ValueError: return "No such table %s\n" % table, 403 primary_key = table_def.primary_key data = AttrDict(request.json) cmd = data.cmd res = AttrDict() # print("data", data) if cmd == "get-record": # get form data print("get-record", data) if int(data.recid) > 0: rows = None sql = "SELECT * FROM %s where %s=%%s" % (table, primary_key) values = (data.recid, ) # print(sql, values) try: row = db.conn.select_one(sql, values) except db.conn.dbexception as e: set_error(res, e) if row is not None: res.status = "success" res.record = row else: set_error(res, "No record with id %s" % data.recid) elif cmd == "get-records": # get list of rows print("get-records", data) rows = None sql = "SELECT * FROM %s" % table limit, offset = None, None where = [] values = [] if 'offset' in data: offset = int(data.offset) if 'limit' in data: limit = int(data.limit) if "search" in data: for search in data.search: where.append("%s=%%s" % search.field) values.append(search.value) sql += " WHERE " + " OR ".join(where) if 'sort' in data: sql += " ORDER by " addComma = False for field in data.sort: if addComma: sql += ", " addComma = True sql += "%s %s" % (field["field"], field["direction"]) if limit is not None: sql += " limit %s" % limit if offset: sql += " offset %s" % offset try: rows = db.conn.select_all(sql, values) except db.conn.dbexception as e: set_error(res, e) if rows is not None: res.status = "success" res.total = len(rows) records = [] for row in rows: record = {} for col in row: record[col] = row[col] records.append(record) res.records = records # get total number of rows sql = "select count(*) from %s" % table res.total = db.conn.count(sql) elif cmd == "save-record": # save form data print("save-record", data) # convert string to valid python/sql type d = AttrDict(data.record.items()) for col in table_def.columns: if col.name == primary_key: continue if col.type == "checkbox": d[col.name] = d[col.name] in ["true", "True", "1", "T", "t", "y", "y", "yes", 1] if int(data.recid) > 0: # UPDATE d[primary_key] = data.recid try: db.conn.update(table=table, d=d, primary_key=primary_key) res.status = 'success' except Exception as e: set_error(res, str(e)) else: # INSERT # if a value is not included or empty, and there is a default, use default for col in table_def.columns: if "default" in col: if col.name not in d or d[col.name] == "": d[col.name] = col.default try: db.conn.insert(table=table, d=d, primary_key=primary_key) res.status = 'success' except Exception as e: set_error(res, str(e)) elif cmd == "save-records": # save all changes from datagrid, can be multiple rows print("save-records", data) for values in data.changes: values[primary_key] = values.pop("recid") try: db.conn.update(table=table, d=values, primary_key=primary_key) res.status = 'success' except db.conn.dbexception as e: set_error(res, str(e)) break elif cmd == "delete-records": # print("delete-records", data) for selected in data.selected: try: sql = "DELETE FROM %s WHERE %s=%%s" % (table, primary_key) db.conn.delete(sql, (selected,)) res.status = 'success' except db.conn.dbexception as e: set_error(res, str(e)) else: set_error(res, "Unknown cmd from w2ui grid %s" % cmd) return jsonify(res)
def reports_monthly(): errors.clear() p = AttrDict() activities = Activities() # parameters, in url p.userid = request.args.get("userid", 1, type=int) p.activityid = request.args.get("activityid", -1, type=int) p.start = request.args.get("start", None) p.debug = request.args.get("debug", None) p.action = request.args.get("action", "-noaction-") p.dstart = MyDateTime() try: if p.start is not None: p.dstart.setFromStr(p.start + "-01 00:00:00") except ValueError as e: print(e) errors.append("Incorrect start date, using todays date") p.dstart.day = 1 p.prevstart = p.dstart.copy() p.prevstart.setFirstDayInMonth(-1) p.nowstart = MyDateTime() p.prevstart.setFirstDayInMonth(0) p.nextstart = p.dstart.copy() p.nextstart.setFirstDayInMonth(1) # Show filters, at top of screen p.param = "&userid=%s&activityid=%s" % (p.userid, p.activityid) if p.action == "+A" or p.action == "-A": if p.activityid < 0: if p.action == "+A": # get first activity sql = "SELECT * FROM activity ORDER BY name LIMIT 1" else: # get last activity sql = "SELECT * FROM activity ORDER BY name DESC LIMIT 1" data = db2.conn.select_one(sql, (p.activityid,)) if data: p.activityid = data._id else: direction = ">" desc = "" if p.action == "-A": direction = "<" desc = "DESC" try: # get current activity name from id sql = "SELECT * FROM activity WHERE _id=%s" data = db2.conn.select_one(sql, (p.activityid,)) if data: # get next/prev activity sql = "SELECT * FROM activity WHERE name %s %%s ORDER BY name %s LIMIT 1" % (direction, desc) try: data2 = db2.conn.select_one(sql, (data.name, )) if data2: print("data2", data2) p.activityid = data2._id else: p.activityid = -1 except db2.conn.exception as err: errors.append("db.Error %s" % err) else: errors.append("Can't get current activity from activityid %s" % p.activityid) except db2.conn.exception as err: errors.append("db.Error: %s" % err) if p.userid == None: errors.append("Please specify user") elif p.activityid is None: errors.append("Please specifiy activity") else: sql = "SELECT * FROM activity" values = [] if p.activityid > 0: sql += " WHERE _id=%s" values.append(p.activityid) sql += " ORDER BY name" try: activitylist = db2.conn.select_all(sql, values) for activity in activitylist: activityMonth = addActivity( activity=activity, userid=p.userid, start=p.dstart, debug=p.debug) if len(activityMonth.days) > 0: activities.addActivity(activityMonth) except db2.DbException as err: errors.append("Can't load list of activities %s", err) log.debug("p.dstart 2 %s" % p.dstart) log.debug("p.prevstart %s" % p.prevstart) log.debug("p.nowstart %s" % p.nowstart) log.debug("p.nextstart %s" % p.nextstart) args = { 'errors': errors, 'activities': activities, 'p': p, 'htmllib': htmllib, } return render_template('reports.html', **args)
def table_crud(table): data = AttrDict() data.params = "" old_net = request.args.get("old_net", None) if old_net is not None: data.params = "?old_net=1" if table not in table_defs: return "Table %s is not available" % table datagrid = Datagrid(name="grid1", htmldiv="grid1", table=table) table_def = table_defs[table] columns = table_def.columns sortdata = table_def.sortdata data.primary_key = table_def.primary_key data.title = table_def.title data.table = table data.url = "/table/crud/%s" % table data.columns = [] for column in table_def.columns: name = column.name col = AttrDict() col.field = name col.caption = column.title col.size = "30%" col.sortable = True col.type = column.type data.columns.append(col) data.sortdata = [] data.sortdata.append( { 'field': sortdata[0]['name'], 'direction': sortdata[0]['direction'] } ) return render_template('table_crud.html', columns=columns,\ data = data, datajson=json.dumps(data), datagrid=datagrid)
def __init__(self, *args, **kwargs): AttrDict.__init__(self, *args, **kwargs) Counter.__init__(self) self.__exclude_keys__ |= {'most_common'}
def __init__(self, default_factory, *args, **kwargs): AttrDict.__init__(self, *args, **kwargs) defaultdict.__init__(self, default_factory) self.__exclude_keys__ |= {'default_factory'}