import string DTemplate = string.Template(""" Run, chrome.exe $sUrl Sleep, 5000 SetTitleMatchMode 2 WinMove, Disney+ | Video Player - Google Chrome, , 0, 0, 640,480 ; Sleep, 6000 Click, 590,397 Sleep 100 Click, 590,397 Sleep, $sLength SetTitleMatchMode 2 IfWinExist Disney+ | Video Player - Google Chrome WinClose Sleep, 5000 """)
def register_function(function, arg_count, group, usesgeometry=False, referenced_columns=[QgsFeatureRequest.ALL_ATTRIBUTES], **kwargs): """ Register a Python function to be used as a expression function. Functions should take (values, feature, parent) as args: Example: def myfunc(values, feature, parent): pass They can also shortcut naming feature and parent args by using *args if they are not needed in the function. Example: def myfunc(values, *args): pass Functions should return a value compatible with QVariant Eval errors can be raised using parent.setEvalErrorString("Error message") :param function: :param arg_count: :param group: :param usesgeometry: :return: """ class QgsPyExpressionFunction(QgsExpressionFunction): def __init__(self, func, name, args, group, helptext='', usesGeometry=True, referencedColumns=QgsFeatureRequest.ALL_ATTRIBUTES, expandargs=False): QgsExpressionFunction.__init__(self, name, args, group, helptext) self.function = func self.expandargs = expandargs self.uses_geometry = usesGeometry self.referenced_columns = referencedColumns def func(self, values, context, parent, node): feature = None if context: feature = context.feature() try: if self.expandargs: values.append(feature) values.append(parent) if inspect.getargspec(self.function).args[-1] == 'context': values.append(context) return self.function(*values) else: if inspect.getargspec(self.function).args[-1] == 'context': self.function(values, feature, parent, context) return self.function(values, feature, parent) except Exception as ex: parent.setEvalErrorString(str(ex)) return None def usesGeometry(self, node): return self.uses_geometry def referencedColumns(self, node): return self.referenced_columns helptemplate = string.Template("""<h3>$name function</h3><br>$doc""") name = kwargs.get('name', function.__name__) helptext = kwargs.get('helpText') or function.__doc__ or '' helptext = helptext.strip() expandargs = False if arg_count == "auto": # Work out the number of args we need. # Number of function args - 2. The last two args are always feature, parent. args = inspect.getargspec(function).args number = len(args) arg_count = number - 2 if args[-1] == 'context': arg_count -= 1 expandargs = True register = kwargs.get('register', True) if register and QgsExpression.isFunctionName(name): if not QgsExpression.unregisterFunction(name): msgtitle = QCoreApplication.translate("UserExpressions", "User expressions") msg = QCoreApplication.translate("UserExpressions", "The user expression {0} already exists and could not be unregistered.").format( name) QgsMessageLog.logMessage(msg + "\n", msgtitle, Qgis.Warning) return None function.__name__ = name helptext = helptemplate.safe_substitute(name=name, doc=helptext) f = QgsPyExpressionFunction(function, name, arg_count, group, helptext, usesgeometry, referenced_columns, expandargs) # This doesn't really make any sense here but does when used from a decorator context # so it can stay. if register: QgsExpression.registerFunction(f) return f
class BaseQuery(object): """ The base class for queries. Performs a simple DB query. """ query = string.Template(""" SELECT emplid FROM ps_stdnt_car_term FETCH FIRST 10 ROWS ONLY """) default_arguments = {} filename = "query" logger = DefaultLog() @classmethod def set_logger(cls, logger): BaseQuery.logger = logger def __init__(self, db, input_clean_function=no_function, output_clean_function=no_function, query_args={}): """ db - a PEP-249 compliant DB connection. input_clean_function - a function to convert arguments into database-friendly strings output_clean_function - a function to sanitize db output query_args - an object containing the query's arguments """ self.db_connection = db # merge the query_args against the argument_defaults temp_args = copy.deepcopy(self.default_arguments) temp_args.update(query_args) self.arguments = temp_args # any arguments passed, run through the clean function for arg in self.arguments: self.arguments[arg] = input_clean_function(self.arguments[arg]) self.output_clean_function = output_clean_function @property def complete_query(self): return self.query.substitute(self.arguments) def result(self): """ Perform the query, and return the result as a Table object """ BaseQuery.logger.log("With arguments: " + str(self.arguments)) BaseQuery.logger.log("Running query: \n" + str(self.complete_query)) cursor = self.db_connection.cursor() start_time = time.time() cursor.execute(self.complete_query) self.elapsed_time = time.time() - start_time BaseQuery.logger.log(str(self.elapsed_time) + " seconds") results_table = Table() for col in cursor.description: results_table.append_column(col[0]) row = cursor.fetchone() while row: results_table.append_row( [self.output_clean_function(i) for i in row]) row = cursor.fetchone() self.rows_fetched = len(results_table) BaseQuery.logger.log(str(self.rows_fetched) + " rows fetched") return results_table def __hash__(self): return hash(self.complete_query)
def _drawReciboCaixa(self, boletoDados): """Imprime o Recibo do Caixa :param boletoDados: Objeto com os dados do boleto a ser preenchido. Deve ser subclasse de :class:`pyboleto.data.BoletoData` :type boletoDados: :class:`pyboleto.data.BoletoData` """ tpl = string.Template(self._load_template("recibo_caixa.html")) tpl_data = {} # Cabeçalho tpl_data["logo_img"] = "" if boletoDados.logo_image: tpl_data["logo_img"] = self._load_image(boletoDados.logo_image) tpl_data["codigo_dv_banco"] = boletoDados.codigo_dv_banco tpl_data["linha_digitavel"] = boletoDados.linha_digitavel # Corpo data_vencimento = boletoDados.data_vencimento tpl_data["data_vencimento"] = data_vencimento.strftime("%d/%m/%Y") # value em unicode em data.py if isinstance(boletoDados.local_pagamento, six.text_type): tpl_data["local_pagamento"] = boletoDados.local_pagamento.encode ("utf-8") else: tpl_data["local_pagamento"] = boletoDados.local_pagamento tpl_data["cedente"] = boletoDados.cedente tpl_data["agencia_conta_cedente"] = boletoDados.agencia_conta_cedente data_documento = boletoDados.data_documento tpl_data["data_documento"] = data_documento.strftime("%d/%m/%Y") tpl_data["numero_documento"] = boletoDados.numero_documento tpl_data["especie_documento"] = boletoDados.especie_documento tpl_data["aceite"] = boletoDados.aceite data_process = boletoDados.data_processamento tpl_data["data_processamento"] = data_process.strftime("%d/%m/%Y") tpl_data["nosso_numero_format"] = boletoDados.format_nosso_numero() tpl_data["carteira"] = boletoDados.carteira tpl_data["especie"] = boletoDados.especie tpl_data["quantidade"] = boletoDados.quantidade valor = self._formataValorParaExibir(boletoDados.valor) tpl_data["valor"] = valor valor_doc = self._formataValorParaExibir(boletoDados.valor_documento) tpl_data["valor_documento"] = valor_doc # Instruções tpl_data["instrucoes"] = "" for instrucao in boletoDados.instrucoes: tpl_data["instrucoes"] += "<p>{0}</p>".format(instrucao) # Rodapé tpl_data["sacado_info"] = "" for linha_sacado in boletoDados.sacado: tpl_data["sacado_info"] += "<p>{0}</p>".format(linha_sacado) # Código de barras tpl_data["barcode"] = self._codigoBarraI25(boletoDados.barcode) self.html += tpl.substitute(tpl_data)
class GeditWindowDecorator(IPreferencesMonitor): """ This class - manages the GeditTabDecorators - hooks the plugin actions as menu items and tool items - installs side and bottom panel views """ _log = getLogger("GeditWindowDecorator") # ui definition template for hooking tools in gedit's ui _tool_ui_template = string.Template("""<ui> <menubar name="MenuBar"> <menu name="ToolsMenu" action="Tools"> <placeholder name="ToolsOps_1">$items</placeholder> </menu> </menubar> </ui>""") def __init__(self, window): self._window = window self._preferences = Preferences() self._preferences.register_monitor(self) # # initialize context object # self._window_context = WindowContext(self, EDITOR_SCOPE_VIEWS) # the order is important! self._init_actions() self._init_tool_actions() self._init_views() self._init_tab_decorators() # FIXME: find another way to save a document self._save_action = self._ui_manager.get_action("/MenuBar/FileMenu/FileSaveMenu") # # listen to tab signals # self._signal_handlers = [ self._window.connect("tab_added", self._on_tab_added), self._window.connect("tab_removed", self._on_tab_removed), self._window.connect("active_tab_changed", self._on_active_tab_changed), self._window.connect("destroy", self._on_window_destroyed) ] def _init_views(self): """ """ # selection states for each TabDecorator self._selected_bottom_views = {} self._selected_side_views = {} # currently hooked editor-scope views self._side_views = [] self._bottom_views = [] # currently hooked window-scope views self._window_side_views = [] self._window_bottom_views = [] # caches window-scope View instances self._views = {} # # init the ToolView, it's always present # # TODO: position is ignored # tool_view = ToolView(self._window_context) self._views["ToolView"] = tool_view self._window.get_bottom_panel().add_item(tool_view, tool_view.label, tool_view.icon) #self._window_bottom_views.append(tool_view) # update window context self._window_context.window_scope_views = self._views def _init_actions(self): """ Merge the plugin's UI definition with the one of gedit and hook the actions """ self._ui_manager = self._window.get_ui_manager() self._action_group = gtk.ActionGroup("LaTeXPluginActions") # create action instances, hook them and build up some # hash tables self._action_objects = {} # name -> Action object self._action_extensions = {} # extension -> action names for clazz in ACTIONS: action = clazz() action.hook(self._action_group, self._window_context) self._action_objects[clazz.__name__] = action for extension in action.extensions: if extension in self._action_extensions.keys(): self._action_extensions[extension].append(clazz.__name__) else: self._action_extensions[extension] = [clazz.__name__] # merge ui self._ui_manager.insert_action_group(self._action_group, -1) self._ui_id = self._ui_manager.add_ui_from_string(UI) # hook the toolbar self._toolbar = self._ui_manager.get_widget("/LaTeXToolbar") self._toolbar.set_style(gtk.TOOLBAR_BOTH_HORIZ) self._main_box = self._window.get_children()[0] self._main_box.pack_start(self._toolbar, False) self._main_box.reorder_child(self._toolbar, 2) #self._toolbar.show() self._toolbar.hide() def _init_tab_decorators(self): """ Look for already open tabs and create decorators for them """ self._tab_decorators = {} self._active_tab_decorator = None active_view = self._window.get_active_view() views = self._window.get_views() for view in views: tab = gedit.tab_get_from_document(view.get_buffer()) decorator = self._create_tab_decorator(tab, init=True) if view is active_view: self._active_tab_decorator = decorator self._log.debug("_init_tab_decorators: initialized %s decorators" % len(views)) if len(views) > 0 and not self._active_tab_decorator: self._log.warning("_init_tab_decorators: no active decorator found") def _init_tool_actions(self): """ - Load defined Tools - create and init ToolActions from them - hook them in the window UI - create a map from extensions to lists of ToolActions """ # add a MenuToolButton with the tools menu to the toolbar afterwards # FIXME: this is quite hacky menu = gtk.Menu() # this is used for enable/disable actions by name # None stands for every extension self._tool_action_extensions = { None : [] } self._tool_action_group = gtk.ActionGroup("LaTeXPluginToolActions") items_ui = "" self._action_handlers = {} i = 1 # counting tool actions accel_counter = 1 # counting tool actions without custom accel for tool in self._preferences.tools: # hopefully unique action name name = "Tool%sAction" % i # update extension-tool mapping for extension in tool.extensions: try: self._tool_action_extensions[extension].append(name) except KeyError: # extension not yet mapped self._tool_action_extensions[extension] = [name] # create action action = ToolAction(tool) gtk_action = gtk.Action(name, action.label, action.tooltip, action.stock_id) self._action_handlers[gtk_action] = gtk_action.connect("activate", lambda gtk_action, action: action.activate(self._window_context), action) if not tool.accelerator is None and len(tool.accelerator) > 0: # TODO: validate accelerator! self._tool_action_group.add_action_with_accel(gtk_action, tool.accelerator) else: self._tool_action_group.add_action_with_accel(gtk_action, "<Ctrl><Alt>%s" % accel_counter) accel_counter += 1 # add to MenuToolBar menu # FIXME: GtkWarning: gtk_accel_label_set_accel_closure: assertion `gtk_accel_group_from_accel_closure (accel_closure) != NULL' failed menu.add(gtk_action.create_menu_item()) # add UI definition items_ui += """<menuitem action="%s" />""" % name i += 1 tool_ui = self._tool_ui_template.substitute({"items" : items_ui}) self._ui_manager.insert_action_group(self._tool_action_group, -1) self._tool_ui_id = self._ui_manager.add_ui_from_string(tool_ui) # add a MenuToolButton with the tools menu to the toolbar self._menu_tool_button = gtk.MenuToolButton(gtk.STOCK_CONVERT) self._menu_tool_button.set_menu(menu) self._menu_tool_button.show_all() self._toolbar.insert(self._menu_tool_button, -1) def save_file(self): """ Trigger the 'Save' action (used by ToolAction before tool run) """ self._save_action.activate() def _on_tools_changed(self): # FIXME: tools reload doesn't work # UPDATE: should work now # see IPreferencesMonitor._on_tools_changed self._log.debug("_on_tools_changed") # remove tool actions and ui self._ui_manager.remove_ui(self._tool_ui_id) for gtk_action in self._action_handlers: gtk_action.disconnect(self._action_handlers[gtk_action]) self._tool_action_group.remove_action(gtk_action) self._ui_manager.remove_action_group(self._tool_action_group) # remove MenuToolButton self._toolbar.remove(self._menu_tool_button) # re-init tool actions self._init_tool_actions() # re-adjust action states self.adjust(self._active_tab_decorator) def activate_tab(self, file): """ Activate the GeditTab containing the given File or open a new tab for it (this is called by the WindowContext) @param file: a File object """ for tab, tab_decorator in self._tab_decorators.iteritems(): if tab_decorator.file and tab_decorator.file == file: self._window.set_active_tab(tab) return # not found, open file in a new tab... uri = file.uri self._log.debug("GeditWindow.create_tab_from_uri(%s)" % uri) if gedit.utils.uri_is_valid(uri): self._window.create_tab_from_uri(file.uri, gedit.encoding_get_current(), 1, False, True) else: self._log.error("gedit.utils.uri_is_valid(%s) = False" % uri) def disable(self): """ Called if there are no more tabs after tab_removed """ self._toolbar.hide() # disable all actions for name in self._action_objects.iterkeys(): self._action_group.get_action(name).set_visible(False) # disable all tool actions for l in self._tool_action_extensions.values(): for name in l: self._tool_action_group.get_action(name).set_sensitive(False) # remove all side views side_views = self._window_side_views + self._side_views for view in side_views: self._window.get_side_panel().remove_item(view) if view in self._side_views: self._side_views.remove(view) if view in self._window_side_views: self._window_side_views.remove(view) # remove all bottom views bottom_views = self._window_bottom_views + self._bottom_views for view in bottom_views: self._window.get_bottom_panel().remove_item(view) if view in self._bottom_views: self._bottom_views.remove(view) if view in self._window_bottom_views: self._window_bottom_views.remove(view) def adjust(self, tab_decorator): """ Adjust actions and views according to the currently active TabDecorator (the file type it contains) Called by * _on_active_tab_changed() * GeditTabDecorator when the Editor instance changes """ # TODO: improve and simplify this! extension = tab_decorator.extension self._log.debug("---------- ADJUST: %s" % (extension)) # FIXME: a hack again... # the toolbar should hide when it doesn't contain any visible items latex_extensions = self._preferences.get("LatexExtensions", ".tex").split(" ") show_toolbar = self._preferences.get_bool("ShowLatexToolbar", True) if show_toolbar and extension in latex_extensions: self._toolbar.show() else: self._toolbar.hide() # # adjust actions # # FIXME: we always get the state of the new decorator after tab change # but we need to save the one of the old decorator # # FIXME: we are dealing with sets so saving the index as selection state # is nonsense # # disable all actions for name in self._action_objects: self._action_group.get_action(name).set_visible(False) # disable all tool actions for l in self._tool_action_extensions.values(): for name in l: self._tool_action_group.get_action(name).set_sensitive(False) # enable the actions for all extensions for name in self._action_extensions[None]: self._action_group.get_action(name).set_visible(True) # enable the actions registered for the extension if extension: try: for name in self._action_extensions[extension]: self._action_group.get_action(name).set_visible(True) except KeyError: pass # enable the tool actions that apply for all extensions for name in self._tool_action_extensions[None]: self._tool_action_group.get_action(name).set_sensitive(True) # enable the tool actions that apply for this extension if extension: try: for name in self._tool_action_extensions[extension]: self._tool_action_group.get_action(name).set_sensitive(True) except KeyError: pass # # save selection state # self._selected_bottom_views[tab_decorator] = self._get_selected_bottom_view() self._selected_side_views[tab_decorator] = self._get_selected_side_view() # # adjust editor-scope views # # determine set of side/bottom views BEFORE before_side_views = set(self._side_views) before_bottom_views = set(self._bottom_views) # determine set of side/bottom views AFTER after_side_views = set() after_bottom_views = set() if tab_decorator.editor: editor_views = self._window_context.editor_scope_views[tab_decorator.editor] for id, view in editor_views.iteritems(): if isinstance(view, BottomView): after_bottom_views.add(view) elif isinstance(view, SideView): after_side_views.add(view) else: raise RuntimeError("Invalid view type: %s" % view) # remove BEFORE.difference(AFTER) for view in before_side_views.difference(after_side_views): self._window.get_side_panel().remove_item(view) self._side_views.remove(view) for view in before_bottom_views.difference(after_bottom_views): self._window.get_bottom_panel().remove_item(view) self._bottom_views.remove(view) # add AFTER.difference(BEFORE) for view in after_side_views.difference(before_side_views): self._window.get_side_panel().add_item(view, view.label, view.icon) self._side_views.append(view) for view in after_bottom_views.difference(before_bottom_views): self._window.get_bottom_panel().add_item(view, view.label, view.icon) self._bottom_views.append(view) # # adjust window-scope views # # determine set of side/bottom views BEFORE before_window_side_views = set(self._window_side_views) before_window_bottom_views = set(self._window_bottom_views) # determine set of side/bottom views AFTER after_window_side_views = set() after_window_bottom_views = set() try: for id, clazz in WINDOW_SCOPE_VIEWS[extension].iteritems(): # find or create View instance view = None try: view = self._views[id] except KeyError: view = clazz.__new__(clazz) clazz.__init__(view, self._window_context) self._views[id] = view if isinstance(view, BottomView): after_window_bottom_views.add(view) elif isinstance(view, SideView): after_window_side_views.add(view) else: raise RuntimeError("Invalid view type: %s" % view) except KeyError: self._log.debug("No window-scope views for this extension") # remove BEFORE.difference(AFTER) for view in before_window_side_views.difference(after_window_side_views): self._window.get_side_panel().remove_item(view) self._window_side_views.remove(view) for view in before_window_bottom_views.difference(after_window_bottom_views): self._window.get_bottom_panel().remove_item(view) self._window_bottom_views.remove(view) # add AFTER.difference(BEFORE) for view in after_window_side_views.difference(before_window_side_views): self._window.get_side_panel().add_item(view, view.label, view.icon) self._window_side_views.append(view) for view in after_window_bottom_views.difference(before_window_bottom_views): self._window.get_bottom_panel().add_item(view, view.label, view.icon) self._window_bottom_views.append(view) # # update window context # self._window_context.window_scope_views = self._views # # restore selection state # self._set_selected_bottom_view(self._selected_bottom_views[tab_decorator]) self._set_selected_side_view(self._selected_side_views[tab_decorator]) # update latex_previews latex_previews = self._window_context.latex_previews if latex_previews != None and latex_previews.is_shown(tab_decorator.tab): latex_previews.reparent(tab_decorator.tab) def _get_selected_bottom_view(self): notebook = self._window.get_bottom_panel().get_children()[0].get_children()[0] assert type(notebook) is gtk.Notebook return notebook.get_current_page() def _get_selected_side_view(self): notebook = self._window.get_side_panel().get_children()[1] assert type(notebook) is gtk.Notebook return notebook.get_current_page() def _set_selected_bottom_view(self, view): notebook = self._window.get_bottom_panel().get_children()[0].get_children()[0] assert type(notebook) is gtk.Notebook self._log.debug("_set_selected_bottom_view: %s" % view) notebook.set_current_page(view) def _set_selected_side_view(self, view): notebook = self._window.get_side_panel().get_children()[1] assert type(notebook) is gtk.Notebook self._log.debug("_set_selected_side_view: %s" % view) notebook.set_current_page(view) def _on_tab_added(self, window, tab): """ A new tab has been added @param window: gedit.Window object @param tab: gedit.Tab object """ self._log.debug("tab_added") if tab in self._tab_decorators: self._log.warning("There is already a decorator for tab %s" % tab) return self._create_tab_decorator(tab) def _on_tab_removed(self, window, tab): """ A tab has been closed @param window: GeditWindow @param tab: the closed GeditTab """ self._log.debug("tab_removed") # properly remove the latex preview, if any latex_previews = self._window_context.latex_previews if latex_previews != None and latex_previews.is_shown(tab): latex_previews.hide(tab) # As we don't call GeditWindowDecorator.adjust() if the new # tab is not the active one (for example, when opening several # files at once, see GeditTabDecorator._adjust_editor()), # it may happen that self._selected_side_views[tab] is not set. if self._tab_decorators[tab] in self._selected_side_views: del self._selected_side_views[self._tab_decorators[tab]] if self._tab_decorators[tab] in self._selected_bottom_views: del self._selected_bottom_views[self._tab_decorators[tab]] self._tab_decorators[tab].destroy() if self._active_tab_decorator == self._tab_decorators[tab]: self._active_tab_decorator = None del self._tab_decorators[tab] if len(self._tab_decorators) == 0: # no more tabs self.disable() def _on_active_tab_changed(self, window, tab): """ The active tab has changed @param window: the GeditWindow @param tab: the activated GeditTab """ self._log.debug("active_tab_changed") if tab in self._tab_decorators.keys(): decorator = self._tab_decorators[tab] else: # (on gedit startup 'tab-changed' comes before 'tab-added') # remember: init=True crashes the plugin here! decorator = self._create_tab_decorator(tab) self._active_tab_decorator = decorator # adjust actions and views self.adjust(decorator) def _create_tab_decorator(self, tab, init=False): """ Create a new GeditTabDecorator for a GeditTab """ decorator = GeditTabDecorator(self, tab, init) self._tab_decorators[tab] = decorator return decorator def _on_window_destroyed(self, object): """ The gtk.Window received the 'destroy' signal as a gtk.Object """ self._log.debug("received 'destroy'") self.destroy() def destroy(self): # save preferences and stop listening self._preferences.save() self._preferences.remove_monitor(self) # destroy tab decorators self._active_tab_decorator = None for tab in self._tab_decorators: self._tab_decorators[tab].destroy() self._tab_decorators = {} # disconnect from tab signals for id in self._signal_handlers: self._window.disconnect(id) del self._signal_handlers # remove all views self.disable() # destroy all window scope views # (the editor scope views are destroyed by the editor) for i in self._window_context.window_scope_views: self._window_context.window_scope_views[i].destroy() self._window_context.window_scope_views = {} # remove toolbar self._toolbar.destroy() # remove tool actions self._ui_manager.remove_ui(self._tool_ui_id) for gtk_action in self._action_handlers: gtk_action.disconnect(self._action_handlers[gtk_action]) self._tool_action_group.remove_action(gtk_action) self._ui_manager.remove_action_group(self._tool_action_group) # remove actions self._ui_manager.remove_ui(self._ui_id) for clazz in self._action_objects: self._action_objects[clazz].unhook(self._action_group) self._ui_manager.remove_action_group(self._action_group) # unreference the gedit window del self._window # destroy the window context self._window_context.destroy() del self._window_context def __del__(self): self._log.debug("Properly destroyed %s" % self)
import string import unittest import integrationtest_support import yadt_status_answer STATUS_TEMPLATE = string.Template(""" { "hostname":"$host", "fqdn":"$host_fqdn", "current_artefacts":[ "yit/0:0.0.1", "yat/0:0.0.7" ], "next_artefacts":{ "foo/0:0.0.0":"yit/0:0.0.1", "yat/0:0.0.8":"yat/0:0.0.7" }, "services":[ "service":{ "needs_services": ["service://foo/readonly"], } ] } """) class Test (integrationtest_support.IntegrationTestSupport): def test(self): self.write_target_file('it01.domain')
def create_proxyauth_extension(self, proxy_host, proxy_port, proxy_username, proxy_password, scheme='http', plugin_path=None): """Proxy Auth Extension args: proxy_host (str): domain or ip address, ie proxy.domain.com proxy_port (int): port proxy_username (str): auth username proxy_password (str): auth password kwargs: scheme (str): proxy scheme, default http plugin_path (str): absolute path of the extension return str -> plugin_path """ import string import zipfile if plugin_path is None: plugin_path = '/home/hanye/vimm_chrome_proxyauth_plugin_%s.zip' % int( datetime.datetime.now().timestamp() * 1e3) manifest_json = """ { "version": "1.0.0", "manifest_version": 2, "name": "Chrome Proxy", "permissions": [ "proxy", "tabs", "unlimitedStorage", "storage", "<all_urls>", "webRequest", "webRequestBlocking" ], "background": { "scripts": ["background.js"] }, "minimum_chrome_version":"22.0.0" } """ background_js = string.Template(""" var config = { mode: "fixed_servers", rules: { singleProxy: { scheme: "${scheme}", host: "${host}", port: parseInt(${port}) }, bypassList: ["foobar.com"] } }; chrome.proxy.settings.set({value: config, scope: "regular"}, function() {}); function callbackFn(details) { return { authCredentials: { username: "******", password: "******" } }; } chrome.webRequest.onAuthRequired.addListener( callbackFn, {urls: ["<all_urls>"]}, ['blocking'] ); """).substitute( host=proxy_host, port=proxy_port, username=proxy_username, password=proxy_password, scheme=scheme, ) with zipfile.ZipFile(plugin_path, 'w') as zp: zp.writestr("manifest.json", manifest_json) zp.writestr("background.js", background_js) self.plugin_path = plugin_path return plugin_path
def genHelp_(lang, q, outfile): print "Attempting to generate", outfile, "... ", help, examples = q.getSchemeHelp() def exLister(context, data): for input, output, note in data: context.tag[T.tr[T.td(class_="input")[input.encode('utf-8')], T.td(class_="output")[output.encode('utf-8')], T.td(class_="note")[note.encode('utf-8')]]] return context.tag def processEx(e): if len(e) < 1: return return "Examples", T.ul[map(lambda x: T.li[x.encode('utf-8')], e)] def processTR(l): if len(l) < 1: return return T.tr[map( lambda x: T.td[x[0].encode('utf-8'), T.em[x[1].encode('utf-8')]], l)] def processTable(t): if len(t) < 1: return # If list of lists if type(t[0]) == type([]): return T.table(cellspacing="3", cellpadding="4")[map(lambda x: processTR(x), t)] else: return T.table(cellspacing="3", cellpadding="4")[processTR(t)] def helpLister(context, data): ret = [] for label, noteex, eachList in help: ret.append(T.h4[label.encode('utf-8')]) for i in noteex: if type(i) == type("") or type(i) is unicode: ret.append(T.p(class_="note")[i.encode('utf-8')]) else: ret.append(processEx(i)) ret.append(processTable(eachList)) return ret t = T.div( id="help" )[T.p( class_="heading" )["QuillPad allows users to type freely without having to follow any strict typing rules. While QuillPad predicts most words correctly, there may be a few cases where the desired word may not appear in the predicted options. Such words can be composed by entering the words in an ITRANS like scheme."], T.p(style="font-size: 12px") ["The following examples demonstrate how to write words using the ITRANS like scheme"], T.table(cellspacing="3", cellpadding="4")[ T.thead[T.th["Input"], T.th["Output"], T.th], T.tbody(render=exLister, data=examples)], T.h3["Scheme Tables"], T.div(render=helpLister, data=help)] ts = open('help_template.html').read() f = open(outfile, "w") lang = lang[0].upper() + lang[1:] f.write( string.Template(ts).substitute(lang=lang, content=prettyXHTML( nevow.flat.flatten(t)))) f.close() print "done"
template_h = string.Template(header + """\ #ifndef CONTENT_BROWSER_DEVTOOLS_PROTOCOL_DEVTOOLS_PROTOCOL_DISPATCHER_H_ #define CONTENT_BROWSER_DEVTOOLS_PROTOCOL_DEVTOOLS_PROTOCOL_DISPATCHER_H_ #include <utility> #include "content/browser/devtools/protocol/devtools_protocol_client.h" namespace content { class DevToolsProtocolDispatcher; namespace devtools { extern const char kProtocolVersion[]; bool IsSupportedProtocolVersion(const std::string& version); template<typename T> base::Value* CreateValue(const T& param) { return new base::FundamentalValue(param); } template<class T> base::Value* CreateValue(scoped_ptr<T>& param) { return param.release(); } template<class T> base::Value* CreateValue(scoped_refptr<T> param) { return param->ToValue().release(); } template<typename T> base::Value* CreateValue(const std::vector<T> param) { base::ListValue* result = new base::ListValue(); for (auto& item : param) { result->Append(CreateValue(item)); } return result; } template<> base::Value* CreateValue(const std::string& param); ${types}\ } // namespace devtools class DevToolsProtocolDispatcher { public: using CommandHandler = base::Callback<bool(DevToolsCommandId, scoped_ptr<base::DictionaryValue>)>; explicit DevToolsProtocolDispatcher(DevToolsProtocolDelegate* notifier); ~DevToolsProtocolDispatcher(); CommandHandler FindCommandHandler(const std::string& method); ${setters}\ private: using Response = DevToolsProtocolClient::Response; using CommandHandlers = std::map<std::string, CommandHandler>; ${methods}\ DevToolsProtocolDelegate* notifier_; DevToolsProtocolClient client_; CommandHandlers command_handlers_; ${fields}\ }; } // namespace content #endif // CONTENT_BROWSER_DEVTOOLS_PROTOCOL_DEVTOOLS_PROTOCOL_DISPATCHER_H_ """)
def get_changesets(self, start=None, end=None, start_date=None, end_date=None, branch_name=None, reverse=False): """ Returns iterator of ``GitChangeset`` objects from start to end (both are inclusive), in ascending date order (unless ``reverse`` is set). :param start: changeset ID, as str; first returned changeset :param end: changeset ID, as str; last returned changeset :param start_date: if specified, changesets with commit date less than ``start_date`` would be filtered out from returned set :param end_date: if specified, changesets with commit date greater than ``end_date`` would be filtered out from returned set :param branch_name: if specified, changesets not reachable from given branch would be filtered out from returned set :param reverse: if ``True``, returned generator would be reversed (meaning that returned changesets would have descending date order) :raise BranchDoesNotExistError: If given ``branch_name`` does not exist. :raise ChangesetDoesNotExistError: If changeset for given ``start`` or ``end`` could not be found. """ if branch_name and branch_name not in self.branches: raise BranchDoesNotExistError("Branch '%s' not found" \ % branch_name) # actually we should check now if it's not an empty repo to not spaw # subprocess commands if self._empty: raise EmptyRepositoryError("There are no changesets yet") # %H at format means (full) commit hash, initial hashes are retrieved # in ascending date order cmd_template = 'log --date-order --reverse --pretty=format:"%H"' cmd_params = {} if start_date: cmd_template += ' --since "$since"' cmd_params['since'] = start_date.strftime('%m/%d/%y %H:%M:%S') if end_date: cmd_template += ' --until "$until"' cmd_params['until'] = end_date.strftime('%m/%d/%y %H:%M:%S') if branch_name: cmd_template += ' $branch_name' cmd_params['branch_name'] = branch_name else: rev_filter = settings.GIT_REV_FILTER cmd_template += ' %s' % (rev_filter) cmd = string.Template(cmd_template).safe_substitute(**cmd_params) revs = self.run_git_command(cmd)[0].splitlines() start_pos = 0 end_pos = len(revs) if start: _start = self._get_revision(start) try: start_pos = revs.index(_start) except ValueError: pass if end is not None: _end = self._get_revision(end) try: end_pos = revs.index(_end) except ValueError: pass if None not in [start, end] and start_pos > end_pos: raise RepositoryError('start cannot be after end') if end_pos is not None: end_pos += 1 revs = revs[start_pos:end_pos] if reverse: revs = reversed(revs) return CollectionGenerator(self, revs)
def genHelp(lang, q, outfile): print "Attempting to generate", outfile, "... ", help, examples = q.getSchemeHelp() s = '<div id="help">\n' s += '<p class="heading">QuillPad allows users to type freely without having to follow any strict typing rules. While QuillPad predicts most words correctly, there may be a few cases where the desired word may not appear in the predicted options. Such words can be composed by entering the words in an ITRANS like scheme.</p>\n' s += '<p style="font-size: 12px">The following examples demonstrate how to write words using the ITRANS like scheme</p>\n' s += '<table class="big" cellspacing="3" cellpadding="4">\n' s += '<thead>\n<th>Input</th><th>Output</th><th></th></thead>\n' for (input, output, note) in examples: outLine = '<td class="input">%s</td> <td class="output">%s</td> <td class="note">%s</td></tr>' % ( input, output.encode('utf-8'), note.encode('utf-8')) s += '<tr>' + outLine + '</tr>' s += "</table>\n" def processTR(l): if len(l) < 1: return return "<tr>\n%s</tr>\n" % (reduce( lambda x, y: x + '<td class="special">' + y[0].encode('utf-8') + " <em>(" + y[1].encode('utf-8') + ")</em></td>", l, "")) def processTable(t): if len(t) < 1: return out = '<table class="big" cellspacing="3" cellpadding="4">\n' # If list of lists if type(t[0]) == type([]): for i in t: out += processTR(i) else: out += processTR(t) return out + '</table>\n' def processEx(e): if len(e) < 1: return return "Examples<ul>\n%s</ul>\n" % (reduce( lambda x, y: x + "<li>" + y.encode('utf-8') + "</li>", e, "")) s += '<h3>Scheme Table</h3>\n' for (label, noteex, eachList) in help: s += '<h4>%s</h4>\n' % label.encode('utf-8') for i in noteex: if type(i) == type("") or type(i) is unicode: s += '<p class="note">%s</p>' % (i.encode('utf-8')) else: s += processEx(i) s += processTable(eachList) s += "</div>\n" print "done" ts = open('help_template.html').read() f = open(outfile, "w") lang = lang[0].upper() + lang[1:] f.write(string.Template(ts).substitute(lang=lang, content=s)) f.close()
def testConvertSliceMetricsToProtoFromLegacyStrings(self): slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3) slice_metrics = { 'accuracy': 0.8, metric_keys.AUPRC: 0.1, metric_keys.lower_bound_key(metric_keys.AUPRC): 0.05, metric_keys.upper_bound_key(metric_keys.AUPRC): 0.17, metric_keys.AUC: 0.2, metric_keys.lower_bound_key(metric_keys.AUC): 0.1, metric_keys.upper_bound_key(metric_keys.AUC): 0.3 } expected_metrics_for_slice = text_format.Parse( string.Template(""" slice_key { single_slice_keys { column: 'age' int64_value: 5 } single_slice_keys { column: 'language' bytes_value: 'english' } single_slice_keys { column: 'price' float_value: 0.3 } } metrics { key: "accuracy" value { double_value { value: 0.8 } } } metrics { key: "$auc" value { bounded_value { lower_bound { value: 0.1 } upper_bound { value: 0.3 } value { value: 0.2 } methodology: RIEMANN_SUM } } } metrics { key: "$auprc" value { bounded_value { lower_bound { value: 0.05 } upper_bound { value: 0.17 } value { value: 0.1 } methodology: RIEMANN_SUM } } }""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC), metrics_for_slice_pb2.MetricsForSlice()) got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto( (slice_key, slice_metrics), [post_export_metrics.auc(), post_export_metrics.auc(curve='PR')]) self.assertProtoEquals(expected_metrics_for_slice, got)
def testConvertSliceMetricsToProtoMetricsRanges(self): slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3) slice_metrics = { 'accuracy': types.ValueWithTDistribution(0.8, 0.1, 9, 0.8), metric_keys.AUPRC: 0.1, metric_keys.lower_bound_key(metric_keys.AUPRC): 0.05, metric_keys.upper_bound_key(metric_keys.AUPRC): 0.17, metric_keys.AUC: 0.2, metric_keys.lower_bound_key(metric_keys.AUC): 0.1, metric_keys.upper_bound_key(metric_keys.AUC): 0.3 } expected_metrics_for_slice = text_format.Parse( string.Template(""" slice_key { single_slice_keys { column: 'age' int64_value: 5 } single_slice_keys { column: 'language' bytes_value: 'english' } single_slice_keys { column: 'price' float_value: 0.3 } } metrics { key: "accuracy" value { bounded_value { value { value: 0.8 } lower_bound { value: 0.5737843 } upper_bound { value: 1.0262157 } methodology: POISSON_BOOTSTRAP } confidence_interval { lower_bound { value: 0.5737843 } upper_bound { value: 1.0262157 } t_distribution_value { sample_mean { value: 0.8 } sample_standard_deviation { value: 0.1 } sample_degrees_of_freedom { value: 9 } unsampled_value { value: 0.8 } } } } } metrics { key: "$auc" value { bounded_value { lower_bound { value: 0.1 } upper_bound { value: 0.3 } value { value: 0.2 } methodology: RIEMANN_SUM } } } metrics { key: "$auprc" value { bounded_value { lower_bound { value: 0.05 } upper_bound { value: 0.17 } value { value: 0.1 } methodology: RIEMANN_SUM } } }""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC), metrics_for_slice_pb2.MetricsForSlice()) got = metrics_plots_and_validations_writer.convert_slice_metrics_to_proto( (slice_key, slice_metrics), [post_export_metrics.auc(), post_export_metrics.auc(curve='PR')]) self.assertProtoEquals(expected_metrics_for_slice, got)
def create_container(ci_file_path, *args): ci_state = load_ci_file(ci_file_path) ci_jobs = ci_stages_and_jobs(ci_state) stage,name = extract_stage_job_from_cmdline(*args) if not stage in ci_jobs: print('Unable to find stage: ', stage) print('Valid stages are:', list(ci_jobs.keys())) exit(1) if not name in ci_jobs[stage]: print('Unable to find job: ', name) print('Valid jobs are:', ci_jobs[stage]) exit(1) #we now have the relevant subset of the yml #fully expanded into a single definition subset = subset_yml(ci_state, stage, name) runner_name = stage+":"+name runner = subset[runner_name] src_dir = get_root_dir() gitlab_env = [ k + '="' + v + '"' for k,v in runner['variables'].items()] # propagate any https/http proxy info if os.getenv('http_proxy'): gitlab_env = [ 'http_proxy=' + os.getenv('http_proxy') ] + gitlab_env if os.getenv('https_proxy'): gitlab_env = [ 'https_proxy=' + os.getenv('https_proxy') ] + gitlab_env # The script and before_script could be anywhere! script_search_locations = [ci_state, subset, runner] for loc in script_search_locations: if 'before_script' in loc: before_script = loc['before_script'] if 'script' in loc: script = loc['script'] docker_template = string.Template(''' FROM $image ENV GITLAB_CI=1 \ GITLAB_CI_EMULATION=1 \ CI_PROJECT_DIR=. \ CI_JOB_NAME=$job_name #Copy all of this project to the src directory COPY . /src ENV $gitlab_env WORKDIR /src #Let git fix issues from copying across OS (such as windows EOL) #Note that this will remove any changes not committed. RUN echo "$before_script || true" >> /setup-gitlab-env.sh && \ echo "$script || true" >> /run-gitlab-stage.sh && \ git reset --hard && \ bash /setup-gitlab-env.sh ''') docker_content = docker_template.substitute(image=runner['image'], job_name='local-build'+runner_name, src_dir=src_dir, gitlab_env= " ".join(gitlab_env), before_script=" && ".join(before_script), script=" && ".join(script)) # Write out the file docker_file = tempfile.NamedTemporaryFile(delete=False) docker_file.write(bytes(docker_content, 'utf-8')) docker_file.close() # now we need to run docker and build this image with a name equal to the # ci name, and the docker context to be the current git repo root dir so # we can copy the current project src automagically try: subprocess_call_docker(['build', '-f', docker_file.name, '-t', runner_name, src_dir], cwd=src_dir) except subprocess.CalledProcessError: print('Unable to build the docker image for: ', runner_name) exit(1) finally: # remove the temp file os.remove(docker_file.name)
def readLicenseFile(filename): ''' Return a regular expresion which can be used to validate a license ''' # This is a bit hacky but license have regular expression key values so we need to escape them but not $ since that is used by the template return re.compile(string.Template(re.escape(readFile(filename)).replace('\\$', '$')).substitute(**LICESE_FILE_VALUES))
model_file = "./model.yaml" param_file = "/mnt/gv7/16winter/16winter/ijcai/resnet101/model.parrots" mapping = dict( gpu='2:4', bs=8 * 2, ) # read model file with open(model_file) as fin: model_text = fin.read() # read session file with open(session_file, 'r') as fcfg: cfg_templ = fcfg.read() cfg_templ = string.Template(cfg_templ) cfg_text = cfg_templ.substitute(mapping) # create model model = dnn.Model.from_yaml_text(model_text) # create session session = dnn.Session.from_yaml_text(model, cfg_text) session.setup() # uncommend following line to load parameter session.flow('val').load_param(param_file) with session.flow('val') as f: f.forward()
polytemplate = string.Template(''' from __future__ import division REL_IMPORT polyutils as pu import numpy as np class $name(pu.PolyBase) : """A $name series class. Parameters ---------- coef : array_like $name coefficients, in increasing order. For example, ``(1, 2, 3)`` implies ``P_0 + 2P_1 + 3P_2`` where the ``P_i`` are a graded polynomial basis. domain : (2,) array_like Domain to use. The interval ``[domain[0], domain[1]]`` is mapped to the interval ``$domain`` by shifting and scaling. Attributes ---------- coef : (N,) array $name coefficients, from low to high. domain : (2,) array_like Domain that is mapped to ``$domain``. Class Attributes ---------------- maxpower : int Maximum power allowed, i.e., the largest number ``n`` such that ``p(x)**n`` is allowed. This is to limit runaway polynomial size. domain : (2,) ndarray Default domain of the class. Notes ----- It is important to specify the domain for many uses of graded polynomial, for instance in fitting data. This is because many of the important properties of the polynomial basis only hold in a specified interval and thus the data must be mapped into that domain in order to benefit. Examples -------- """ # Limit runaway size. T_n^m has degree n*2^m maxpower = 16 # Default domain domain = np.array($domain) # Don't let participate in array operations. Value doesn't matter. __array_priority__ = 0 def __init__(self, coef, domain=$domain) : [coef, domain] = pu.as_series([coef, domain], trim=False) if len(domain) != 2 : raise ValueError("Domain has wrong number of elements.") self.coef = coef self.domain = domain def __repr__(self): format = "%s(%s, %s)" coef = repr(self.coef)[6:-1] domain = repr(self.domain)[6:-1] return format % ('$name', coef, domain) def __str__(self) : format = "%s(%s, %s)" return format % ('$nick', str(self.coef), str(self.domain)) # Pickle and copy def __getstate__(self) : ret = self.__dict__.copy() ret['coef'] = self.coef.copy() ret['domain'] = self.domain.copy() return ret def __setstate__(self, dict) : self.__dict__ = dict # Call def __call__(self, arg) : off, scl = pu.mapparms(self.domain, $domain) arg = off + scl*arg return ${nick}val(arg, self.coef) def __iter__(self) : return iter(self.coef) def __len__(self) : return len(self.coef) # Numeric properties. def __neg__(self) : return self.__class__(-self.coef, self.domain) def __pos__(self) : return self def __add__(self, other) : """Returns sum""" if isinstance(other, self.__class__) : if np.all(self.domain == other.domain) : coef = ${nick}add(self.coef, other.coef) else : raise PolyDomainError() else : try : coef = ${nick}add(self.coef, other) except : return NotImplemented return self.__class__(coef, self.domain) def __sub__(self, other) : """Returns difference""" if isinstance(other, self.__class__) : if np.all(self.domain == other.domain) : coef = ${nick}sub(self.coef, other.coef) else : raise PolyDomainError() else : try : coef = ${nick}sub(self.coef, other) except : return NotImplemented return self.__class__(coef, self.domain) def __mul__(self, other) : """Returns product""" if isinstance(other, self.__class__) : if np.all(self.domain == other.domain) : coef = ${nick}mul(self.coef, other.coef) else : raise PolyDomainError() else : try : coef = ${nick}mul(self.coef, other) except : return NotImplemented return self.__class__(coef, self.domain) def __div__(self, other): # set to __floordiv__ /. return self.__floordiv__(other) def __truediv__(self, other) : # there is no true divide if the rhs is not a scalar, although it # could return the first n elements of an infinite series. # It is hard to see where n would come from, though. if isinstance(other, self.__class__) : if len(other.coef) == 1 : coef = div(self.coef, other.coef) else : return NotImplemented elif np.isscalar(other) : coef = self.coef/other else : return NotImplemented return self.__class__(coef, self.domain) def __floordiv__(self, other) : """Returns the quotient.""" if isinstance(other, self.__class__) : if np.all(self.domain == other.domain) : quo, rem = ${nick}div(self.coef, other.coef) else : raise PolyDomainError() else : try : quo, rem = ${nick}div(self.coef, other) except : return NotImplemented return self.__class__(quo, self.domain) def __mod__(self, other) : """Returns the remainder.""" if isinstance(other, self.__class__) : if np.all(self.domain == other.domain) : quo, rem = ${nick}div(self.coef, other.coef) else : raise PolyDomainError() else : try : quo, rem = ${nick}div(self.coef, other) except : return NotImplemented return self.__class__(rem, self.domain) def __divmod__(self, other) : """Returns quo, remainder""" if isinstance(other, self.__class__) : if np.all(self.domain == other.domain) : quo, rem = ${nick}div(self.coef, other.coef) else : raise PolyDomainError() else : try : quo, rem = ${nick}div(self.coef, other) except : return NotImplemented return self.__class__(quo, self.domain), self.__class__(rem, self.domain) def __pow__(self, other) : try : coef = ${nick}pow(self.coef, other, maxpower = self.maxpower) except : raise return self.__class__(coef, self.domain) def __radd__(self, other) : try : coef = ${nick}add(other, self.coef) except : return NotImplemented return self.__class__(coef, self.domain) def __rsub__(self, other): try : coef = ${nick}sub(other, self.coef) except : return NotImplemented return self.__class__(coef, self.domain) def __rmul__(self, other) : try : coef = ${nick}mul(other, self.coef) except : return NotImplemented return self.__class__(coef, self.domain) def __rdiv__(self, other): # set to __floordiv__ /. return self.__rfloordiv__(other) def __rtruediv__(self, other) : # there is no true divide if the rhs is not a scalar, although it # could return the first n elements of an infinite series. # It is hard to see where n would come from, though. if len(self.coef) == 1 : try : quo, rem = ${nick}div(other, self.coef[0]) except : return NotImplemented return self.__class__(quo, self.domain) def __rfloordiv__(self, other) : try : quo, rem = ${nick}div(other, self.coef) except : return NotImplemented return self.__class__(quo, self.domain) def __rmod__(self, other) : try : quo, rem = ${nick}div(other, self.coef) except : return NotImplemented return self.__class__(rem, self.domain) def __rdivmod__(self, other) : try : quo, rem = ${nick}div(other, self.coef) except : return NotImplemented return self.__class__(quo, self.domain), self.__class__(rem, self.domain) # Enhance me # some augmented arithmetic operations could be added here def __eq__(self, other) : res = isinstance(other, self.__class__) \ and len(self.coef) == len(other.coef) \ and np.all(self.domain == other.domain) \ and np.all(self.coef == other.coef) return res def __ne__(self, other) : return not self.__eq__(other) # # Extra numeric functions. # def degree(self) : """The degree of the series. Notes ----- .. versionadded:: 1.5.0 """ return len(self) - 1 def cutdeg(self, deg) : """Truncate series to the given degree. Reduce the degree of the $name series to `deg` by discarding the high order terms. If `deg` is greater than the current degree a copy of the current series is returned. This can be useful in least squares where the coefficients of the high degree terms may be very small. Parameters ---------- deg : non-negative int The series is reduced to degree `deg` by discarding the high order terms. The value of `deg` must be a non-negative integer. Returns ------- new_instance : $name New instance of $name with reduced degree. Notes ----- .. versionadded:: 1.5.0 """ return self.truncate(deg + 1) def convert(self, domain=None, kind=None) : """Convert to different class and/or domain. Parameters ---------- domain : {None, array_like} The domain of the new series type instance. If the value is is ``None``, then the default domain of `kind` is used. kind : {None, class} The polynomial series type class to which the current instance should be converted. If kind is ``None``, then the class of the current instance is used. Returns ------- new_series_instance : `kind` The returned class can be of different type than the current instance and/or have a different domain. Examples -------- Notes ----- Conversion between domains and class types can result in numerically ill defined series. """ if kind is None : kind = $name if domain is None : domain = kind.domain return self(kind.identity(domain)) def mapparms(self) : """Return the mapping parameters. The returned values define a linear map ``off + scl*x`` that is applied to the input arguments before the series is evaluated. The of the map depend on the domain; if the current domain is equal to the default domain ``$domain`` the resulting map is the identity. If the coeffients of the ``$name`` instance are to be used separately, then the linear function must be substituted for the ``x`` in the standard representation of the base polynomials. Returns ------- off, scl : floats or complex The mapping function is defined by ``off + scl*x``. Notes: ------ If the current domain is the interval ``[l_1, r_1]`` and the default interval is ``[l_2, r_2]``, then the linear mapping function ``L`` is defined by the equations: L(l_1) = l_2 L(r_1) = r_2 """ return pu.mapparms(self.domain, $domain) def trim(self, tol=0) : """Remove small leading coefficients Remove leading coefficients until a coefficient is reached whose absolute value greater than `tol` or the beginning of the series is reached. If all the coefficients would be removed the series is set to ``[0]``. A new $name instance is returned with the new coefficients. The current instance remains unchanged. Parameters ---------- tol : non-negative number. All trailing coefficients less than `tol` will be removed. Returns ------- new_instance : $name Contains the new set of coefficients. """ return self.__class__(pu.trimcoef(self.coef, tol), self.domain) def truncate(self, size) : """Truncate series to length `size`. Reduce the $name series to length `size` by discarding the high degree terms. The value of `size` must be a positive integer. This can be useful in least squares where the coefficients of the high degree terms may be very small. Parameters ---------- size : positive int The series is reduced to length `size` by discarding the high degree terms. The value of `size` must be a positive integer. Returns ------- new_instance : $name New instance of $name with truncated coefficients. """ isize = int(size) if isize != size or isize < 1 : raise ValueError("size must be a positive integer") if isize >= len(self.coef) : return self.__class__(self.coef, self.domain) else : return self.__class__(self.coef[:isize], self.domain) def copy(self) : """Return a copy. A new instance of $name is returned that has the same coefficients and domain as the current instance. Returns ------- new_instance : $name New instance of $name with the same coefficients and domain. """ return self.__class__(self.coef, self.domain) def integ(self, m=1, k=[], lbnd=None) : """Integrate. Return an instance of $name that is the definite integral of the current series. Refer to `${nick}int` for full documentation. Parameters ---------- m : non-negative int The number of integrations to perform. k : array_like Integration constants. The first constant is applied to the first integration, the second to the second, and so on. The list of values must less than or equal to `m` in length and any missing values are set to zero. lbnd : Scalar The lower bound of the definite integral. Returns ------- integral : $name The integral of the series using the same domain. See Also -------- `${nick}int` : similar function. `${nick}der` : similar function for derivative. """ off, scl = self.mapparms() if lbnd is None : lbnd = 0 else : lbnd = off + scl*lbnd coef = ${nick}int(self.coef, m, k, lbnd, 1./scl) return self.__class__(coef, self.domain) def deriv(self, m=1): """Differentiate. Return an instance of $name that is the derivative of the current series. Refer to `${nick}der` for full documentation. Parameters ---------- m : non-negative int The number of integrations to perform. Returns ------- derivative : $name The derivative of the series using the same domain. See Also -------- `${nick}der` : similar function. `${nick}int` : similar function for integration. """ off, scl = self.mapparms() coef = ${nick}der(self.coef, m, scl) return self.__class__(coef, self.domain) def roots(self) : """Return list of roots. Return ndarray of roots for this series. See `${nick}roots` for full documentation. Note that the accuracy of the roots is likely to decrease the further outside the domain they lie. See Also -------- `${nick}roots` : similar function `${nick}fromroots` : function to go generate series from roots. """ roots = ${nick}roots(self.coef) return pu.mapdomain(roots, $domain, self.domain) def linspace(self, n=100): """Return x,y values at equally spaced points in domain. Returns x, y values at `n` equally spaced points across domain. Here y is the value of the polynomial at the points x. This is intended as a plotting aid. Paramters --------- n : int, optional Number of point pairs to return. The default value is 100. Returns ------- x, y : ndarrays ``x`` is equal to linspace(self.domain[0], self.domain[1], n) ``y`` is the polynomial evaluated at ``x``. .. versionadded:: 1.5.0 """ x = np.linspace(self.domain[0], self.domain[1], n) y = self(x) return x, y @staticmethod def fit(x, y, deg, domain=None, rcond=None, full=False, w=None) : """Least squares fit to data. Return a `$name` instance that is the least squares fit to the data `y` sampled at `x`. Unlike ${nick}fit, the domain of the returned instance can be specified and this will often result in a superior fit with less chance of ill conditioning. See ${nick}fit for full documentation of the implementation. Parameters ---------- x : array_like, shape (M,) x-coordinates of the M sample points ``(x[i], y[i])``. y : array_like, shape (M,) or (M, K) y-coordinates of the sample points. Several data sets of sample points sharing the same x-coordinates can be fitted at once by passing in a 2D-array that contains one dataset per column. deg : int Degree of the fitting polynomial domain : {None, [beg, end], []}, optional Domain to use for the returned $name instance. If ``None``, then a minimal domain that covers the points `x` is chosen. If ``[]`` the default domain ``$domain`` is used. The default value is $domain in numpy 1.4.x and ``None`` in later versions. The ``'[]`` value was added in numpy 1.5.0. rcond : float, optional Relative condition number of the fit. Singular values smaller than this relative to the largest singular value will be ignored. The default value is len(x)*eps, where eps is the relative precision of the float type, about 2e-16 in most cases. full : bool, optional Switch determining nature of return value. When it is False (the default) just the coefficients are returned, when True diagnostic information from the singular value decomposition is also returned. w : array_like, shape (M,), optional Weights. If not None the contribution of each point ``(x[i],y[i])`` to the fit is weighted by `w[i]`. Ideally the weights are chosen so that the errors of the products ``w[i]*y[i]`` all have the same variance. The default value is None. .. versionadded:: 1.5.0 Returns ------- least_squares_fit : instance of $name The $name instance is the least squares fit to the data and has the domain specified in the call. [residuals, rank, singular_values, rcond] : only if `full` = True Residuals of the least-squares fit, the effective rank of the scaled Vandermonde matrix and its singular values, and the specified value of `rcond`. For more details, see `linalg.lstsq`. See Also -------- ${nick}fit : similar function """ if domain is None : domain = pu.getdomain(x) elif domain == [] : domain = $domain xnew = pu.mapdomain(x, domain, $domain) res = ${nick}fit(xnew, y, deg, w=w, rcond=rcond, full=full) if full : [coef, status] = res return $name(coef, domain=domain), status else : coef = res return $name(coef, domain=domain) @staticmethod def fromroots(roots, domain=$domain) : """Return $name object with specified roots. See ${nick}fromroots for full documentation. See Also -------- ${nick}fromroots : equivalent function """ if domain is None : domain = pu.getdomain(roots) rnew = pu.mapdomain(roots, domain, $domain) coef = ${nick}fromroots(rnew) return $name(coef, domain=domain) @staticmethod def identity(domain=$domain) : """Identity function. If ``p`` is the returned $name object, then ``p(x) == x`` for all values of x. Parameters: ----------- domain : array_like The resulting array must be if the form ``[beg, end]``, where ``beg`` and ``end`` are the endpoints of the domain. Returns: -------- identity : $name object """ off, scl = pu.mapparms($domain, domain) coef = ${nick}line(off, scl) return $name(coef, domain) '''.replace('REL_IMPORT', rel_import))
def env(m={}): """ Assemble environment information needed for correct operation. In particular, ensure that directories containing binaries are included in PATH. """ e = os.environ.copy() e.update(_env_ext) e.update(m) roots = [os.path.normpath(s) for s in gs.lst(e.get('GOPATH', '').split(psep), e.get('GOROOT', ''))] e['GS_GOPATH'] = gs_gopath(gs.getwd(), roots) or gs_gopath(gs.attr('last_active_go_fn', ''), roots) uenv = gs.setting('env', {}) for k in uenv: try: uenv[k] = string.Template(uenv[k]).safe_substitute(e) except Exception as ex: gs.println('%s: Cannot expand env var `%s`: %s' % (NAME, k, ex)) e.update(uenv) e.update(m) if e['GS_GOPATH'] and gs.setting('use_gs_gopath') is True: e['GOPATH'] = e['GS_GOPATH'] # For custom values of GOPATH, installed binaries via go install # will go into the "bin" dir of the corresponding GOPATH path. # Therefore, make sure these paths are included in PATH. add_path = [bin_dir()] for s in gs.lst(e.get('GOROOT', ''), e.get('GOPATH', '').split(psep)): if s: s = os.path.join(s, 'bin') if s not in add_path: add_path.append(s) gobin = e.get('GOBIN', '') if gobin and gobin not in add_path: add_path.append(gobin) for s in e.get('PATH', '').split(psep): if s and s not in add_path: add_path.append(s) if gs.os_is_windows(): l = [ '~\\bin', '~\\go\\bin', 'C:\\Go\\bin', ] else: l = [ '~/bin', '~/go/bin', '/usr/local/go/bin', '/usr/local/opt/go/bin', '/usr/local/bin', '/usr/bin', ] for s in l: s = os.path.expanduser(s) if s not in add_path: add_path.append(s) e['PATH'] = psep.join(add_path) fn = gs.attr('active_fn', '') wd = gs.getwd() e.update({ 'PWD': wd, '_wd': wd, '_dir': os.path.dirname(fn), '_fn': fn, '_vfn': gs.attr('active_vfn', ''), '_nm': fn.replace('\\', '/').split('/')[-1], }) if not e.get('GOPATH'): gp = os.path.expanduser('~/go') e['GOPATH'] = gp _print('GOPATH is not set... setting it to the default: %s' % gp) # Ensure no unicode objects leak through. The reason is twofold: # * On Windows, Python 2.6 (used by Sublime Text) subprocess.Popen # can only take bytestrings as environment variables in the # "env" parameter. Reference: # https://github.com/DisposaBoy/GoSublime/issues/112 # http://stackoverflow.com/q/12253014/1670 # * Avoids issues with networking too. clean_env = {} for k, v in e.items(): try: clean_env[gs.astr(k)] = gs.astr(v) except Exception as ex: gs.println('%s: Bad env: %s' % (NAME, ex)) return clean_env
background-color: #3366cc; width: 100%; height: 6px; } //--></style> </head> <body> <div class=header><span>Message</span></div> <div><blockquote> <H1>$banner</H1> $detail </blockquote></div> <div class=foot><span></span></div> </body></html> ''' MESSAGE_TEMPLATE = string.Template(MESSAGE_TEMPLATE).substitute def message_html(title, banner, detail=''): return MESSAGE_TEMPLATE(title=title, banner=banner, detail=detail) def spawn_later(seconds, target, *args, **kwargs): def wrap(*args, **kwargs): sleep(seconds) try: target(*args, **kwargs) except Exception as e: logging.warning('%s.%s 错误:%s', target.__module__, target.__name__, e)
def expand_here_template(content, here=None): if here and content: content = string.Template(content).safe_substitute({"__HERE__": here}) return content
def testSerializeMetricsRanges(self): slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3) slice_metrics = { 'accuracy': types.ValueWithConfidenceInterval(0.8, 0.7, 0.9), metric_keys.AUPRC: 0.1, metric_keys.lower_bound_key(metric_keys.AUPRC): 0.05, metric_keys.upper_bound_key(metric_keys.AUPRC): 0.17, metric_keys.AUC: 0.2, metric_keys.lower_bound_key(metric_keys.AUC): 0.1, metric_keys.upper_bound_key(metric_keys.AUC): 0.3 } expected_metrics_for_slice = text_format.Parse( string.Template(""" slice_key { single_slice_keys { column: 'age' int64_value: 5 } single_slice_keys { column: 'language' bytes_value: 'english' } single_slice_keys { column: 'price' float_value: 0.3 } } metrics { key: "accuracy" value { bounded_value { value { value: 0.8 } lower_bound { value: 0.7 } upper_bound { value: 0.9 } methodology: POISSON_BOOTSTRAP } } } metrics { key: "$auc" value { bounded_value { lower_bound { value: 0.1 } upper_bound { value: 0.3 } value { value: 0.2 } methodology: RIEMANN_SUM } } } metrics { key: "$auprc" value { bounded_value { lower_bound { value: 0.05 } upper_bound { value: 0.17 } value { value: 0.1 } methodology: RIEMANN_SUM } } }""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC), metrics_for_slice_pb2.MetricsForSlice()) got = metrics_and_plots_evaluator._serialize_metrics( (slice_key, slice_metrics), [post_export_metrics.auc(), post_export_metrics.auc(curve='PR')]) self.assertProtoEquals( expected_metrics_for_slice, metrics_for_slice_pb2.MetricsForSlice.FromString(got))
# Python 2/3 compatibility from __future__ import (absolute_import, division, print_function, unicode_literals) # ~ from builtins import * import codecs import string import re import nltk from rstconverter.rs3.rs3tree import RSTTree MULTISAT_RELNAME = 'MONONUC-MULTISAT' RSTSEGMENT_TEMPLATE = string.Template( """\\rstsegment{$segment}""") # \rstsegment{Foo} NUC_TEMPLATE = string.Template("""{}{$nucleus}""") SAT_TEMPLATE = string.Template("""{$relation}{$satellite}""") MULTINUC_TEMPLATE = string.Template( """\multirel{$relation}$nucleus_segments""") RSTLATEX_TREE_RE = re.compile("\\\(dirrel|multirel)") class RSTLatexFileWriter(object): def __init__(self, tree, output_filepath=None): self.tree = tree self.rstlatextree = rsttree2rstlatex(tree) if output_filepath is not None:
def _get_simple_reduction_kernel( name, block_size, reduce_type, params, identity, pre_map_expr, reduce_expr, post_map_expr, type_preamble, input_expr, output_expr, preamble, options): if identity is None: identity = '' module_code = string.Template(''' ${type_preamble} ${preamble} #define REDUCE(a, b) (${reduce_expr}) #define POST_MAP(a) (${post_map_expr}) typedef ${reduce_type} _type_reduce; extern "C" __global__ void ${name}(${params}) { if (_out_clp2_size > 256) { CUPY_FOR(_i, _out_ind.size()) { _type_reduce _s = _type_reduce(${identity}); for (int _j = _i, _J = 0; _j < _in_ind.size(); _j += _out_ind.size(), _J++) { _in_ind.set(_j); ${input_expr} _type_reduce _a = ${pre_map_expr}; _s = REDUCE(_s, _a); } _out_ind.set(_i); ${output_expr} POST_MAP(_s); } } else { extern __shared__ _type_reduce _sdata_raw[]; _type_reduce *_sdata = _sdata_raw; int _tid = threadIdx.x; _sdata[_tid] = _type_reduce(${identity}); unsigned int _i = _tid % _out_clp2_size; if (_i >= _out_ind.size()) return; _type_reduce _s = _type_reduce(${identity}); int _J_offset = _tid / _out_clp2_size; int _j_offset = _J_offset * _out_ind.size(); int _J_stride = ${block_size} / _out_clp2_size; int _j_stride = _J_stride * _out_ind.size(); for (int _j = _i + _j_offset, _J = _J_offset; _j < _in_ind.size(); _j += _j_stride, _J += _J_stride) { _in_ind.set(_j); ${input_expr} _type_reduce _a = ${pre_map_expr}; _s = REDUCE(_s, _a); } _sdata[_tid] = _s; __syncthreads(); if (_tid >= 256) return; _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 256]); __syncthreads(); if (_out_clp2_size <= 128) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 128]); __syncthreads(); if (_out_clp2_size <= 64) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 64]); __syncthreads(); if (_out_clp2_size <= 32) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 32]); if (_out_clp2_size <= 16) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 16]); if (_out_clp2_size <= 8) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 8]); if (_out_clp2_size <= 4) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 4]); if (_out_clp2_size <= 2) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 2]); if (_out_clp2_size <= 1) { _sdata[_tid] = REDUCE(_sdata[_tid], _sdata[_tid + 1]); } } } } } } } } _s = _sdata[_tid]; if (_tid >= _out_ind.size()) return; _out_ind.set(_i); ${output_expr} POST_MAP(_s); } }''').substitute( name=name, block_size=block_size, reduce_type=reduce_type, params=params, identity=identity, reduce_expr=reduce_expr, pre_map_expr=pre_map_expr, post_map_expr=post_map_expr, type_preamble=type_preamble, input_expr=input_expr, output_expr=output_expr, preamble=preamble) module = carray.compile_with_cache(module_code, options) return module.get_function(name)
def get_templates(self,default_job_template_name, default_shell_template_name, default_runtime_config_template_name): """ this method examines the run-time arguments supplied to tardis, and from these figures out a job template, shell template and runtime config template. The job template is used to create a job file for the scheduler (e.g. slurm) , for each job to be launched The shell template is used to create a wrapper shell (i.e. run1.sh, run2.sh etc) , for each task The runtime config template is used to generate source to be included in the wrapper shell - i.e. in run1.sh, run2.sh etc. The run time arguments examined by this method specify whether the user wants to a) use one of the named, hard-coded (in tutils.py) templates b) supply the name of a file containing templating c) don't supply either a or b , in which case a default is used. (if they specify both a and b, this is an error) """ (job_template, shell_script_template, runtime_config_template) = (None, None, None) if self.controller.options is not None: # figure out a job template from the options. (You can specify one of the inbuilt templates by name, or # supply a file containing a custom template) job_template_name = self.controller.options.get("job_template_name",None) job_template_filename = self.controller.options.get("jobtemplatefile",None) # use default if there is one and its needed if default_job_template_name is not None: if job_template_name is None and job_template_filename is None: #use the default job template job_template_name = default_job_template_name # check we have at least named template or template file but not both if job_template_name is not None and job_template_filename is not None: raise tutils.tardisException("error both job_template_name (%s) and job_template_filename (%s) defined - only define one of these"%(job_template_name,job_template_filename) ) elif job_template_name is None and job_template_filename is None: raise tutils.tardisException("error neither job_template_name nor job_template_filename are defined (and no default available") if job_template_name is not None: job_template = tutils.getTemplateContent(self.controller.options, job_template_name, logWriter=self.logWriter) else: if not os.path.isfile(job_template_filename): raise tutils.tardisException("error job template file %s not found"%job_template_filename ) job_template = string.join(file(job_template_filename,"r"),"") if job_template is None: raise tutils.tardisException("hpcJob: Error job template is null after templating") job_template = string.Template(job_template) # figure out a shell template from the options. (You can specify one of the inbuilt templates by name, or # supply a file containing a custom template) shell_template_name = self.controller.options.get("shell_template_name",None) shell_template_filename = self.controller.options.get("shelltemplatefile",None) if shell_template_name is None and shell_template_filename is None: #use the default local shell template shell_template_name = default_shell_template_name if shell_template_name is not None and shell_template_filename is not None: raise tutils.tardisException("error both shell_template_name (%s) and shell_template_filename (%s) defined - only define one of these"%(shell_template_name,shell_template_filename) ) if shell_template_name is not None: shell_script_template = tutils.getTemplateContent(self.controller.options, shell_template_name, logWriter=self.logWriter) else: shell_script_template = string.join(file(shell_template_filename,"r"),"") if shell_script_template is None: raise tutils.tardisException("hpcJob : Error shell template is null after templating") shell_script_template = string.Template(shell_script_template) # figure out run-time configuration code (You can specify one of the inbuilt configs by name, or # supply a file containing a custom config) runtime_config_template_name = self.controller.options.get("runtime_config_name",None) runtime_config_template_filename = self.controller.options.get("runtimeconfigsourcefile",None) # use default if available and needed. Note this logic means that if you supply a run-time config, then the # default will not be used - so if for example the default loads a base env, thne if you supply your own , # you will need to explicitly load the base before doing your own # this is based on the assumption that its easier to do than to undo if default_runtime_config_template_name is not None: if runtime_config_template_name is None and runtime_config_template_filename is None: #use the default - for example this might load a deafult conda env or load a default module (site dependent) runtime_config_template_name = default_runtime_config_template_name # don't want both named, and a file if runtime_config_template_name is not None and runtime_config_template_filename is not None: raise tutils.tardisException("error both runtime_config_template_name (%s) and runtime_config_template_filename (%s) defined - only define one of these"%(runtime_config_template_name,runtime_config_template_filename) ) if runtime_config_template_name is not None: runtime_config_template = tutils.getTemplateContent(self.controller.options, runtime_config_template_name, logWriter=self.logWriter) else: runtime_config_template = string.join(file(runtime_config_template_filename,"r"),"") if runtime_config_template is None: raise tutils.tardisException("hpcJob : Error config template is null after templating") runtime_config_template = string.Template(runtime_config_template) return (job_template, shell_script_template, runtime_config_template)
def testSerializeMetrics(self): slice_key = _make_slice_key('age', 5, 'language', 'english', 'price', 0.3) slice_metrics = { 'accuracy': 0.8, metric_keys.AUPRC: 0.1, metric_keys.lower_bound(metric_keys.AUPRC): 0.05, metric_keys.upper_bound(metric_keys.AUPRC): 0.17, metric_keys.AUC: 0.2, metric_keys.lower_bound(metric_keys.AUC): 0.1, metric_keys.upper_bound(metric_keys.AUC): 0.3 } expected_metrics_for_slice = text_format.Parse( string.Template(""" slice_key { single_slice_keys { column: 'age' int64_value: 5 } single_slice_keys { column: 'language' bytes_value: 'english' } single_slice_keys { column: 'price' float_value: 0.3 } } metrics { key: "accuracy" value { double_value { value: 0.8 } } } metrics { key: "$auc" value { bounded_value { lower_bound { value: 0.1 } upper_bound { value: 0.3 } value { value: 0.2 } } } } metrics { key: "$auprc" value { bounded_value { lower_bound { value: 0.05 } upper_bound { value: 0.17 } value { value: 0.1 } } } }""").substitute(auc=metric_keys.AUC, auprc=metric_keys.AUPRC), metrics_for_slice_pb2.MetricsForSlice()) got = serialization._serialize_metrics( (slice_key, slice_metrics), [post_export_metrics.auc(), post_export_metrics.auc(curve='PR')]) self.assertProtoEquals( expected_metrics_for_slice, metrics_for_slice_pb2.MetricsForSlice.FromString(got))
cleanname = re.sub('\W+', '', classname) # Output file base name for HTML help builder. htmlhelp_basename = cleanname + 'doc' # -- Options for LaTeX output --------------------------------------------- front_cover_image = 'front_cover' back_cover_image = 'back_cover' front_cover_image_path = os.path.join('_static', front_cover_image + '.png') back_cover_image_path = os.path.join('_static', back_cover_image + '.png') latex_additional_files = [front_cover_image_path, back_cover_image_path] template = string.Template(open('preamble.tex').read()) latex_contents = r""" \frontcoverpage \contentspage """ backcover_latex_contents = r""" \backcoverpage """ latex_elements = { 'papersize': 'letterpaper', 'pointsize': '10pt',
def templated_query(self, context=None): if context and self.is_templated: query = string.Template(self.query).safe_substitute( context.get_values()) return query return self.query
def outputPage(cfg): in_file = open("template.html", "r") text_template = in_file.read() in_file.close() html_template = string.Template(text_template) d = dict(location_latitude=cfg.location_latitude) #[General] d.update(offline=globalvars.offline) d.update(station_name=cfg.station_name) d.update(config_web_server=cfg.config_web_server) d.update(set_system_time_from_ntp_server_at_startup=cfg. set_system_time_from_ntp_server_at_startup) d.update(set_time_at_boot=cfg.set_time_at_boot) d.update(ntp_server=cfg.ntp_server) d.update(reboot_at=cfg.reboot_at) d.update(shutdown_at=cfg.shutdown_at) d.update(shutdown_hour_before_sunset=cfg.shutdown_hour_before_sunset) d.update(location_longitude=cfg.location_longitude) d.update(location_altitude=cfg.location_altitude) d.update(wifi_reset_if_down=cfg.wifi_reset_if_down) d.update(config_web_server_port=cfg.config_web_server_port) d.update(wind_speed_units=cfg.wind_speed_units) d.update(set_time_at_boot=cfg.set_time_at_boot) d.update(ntp_url=cfg.ntp_url) d.update(disable_hdmi=cfg.disable_hdmi) # [Dongle] d.update(usedongle=cfg.usedongle) d.update(AlwaysOnInternet=cfg.AlwaysOnInternet) d.update(UseDongleNet=cfg.UseDongleNet) d.update(operator=cfg.operator) d.update(dongledataport=cfg.dongleDataPort) d.update(dongleaudioport=cfg.dongleAudioPort) d.update(donglectrlport=cfg.dongleCtrlPort) #[Security] d.update(SMSPwd=cfg.SMSPwd) #[DataLogging] d.update(logdata=cfg.logdata) d.update(serverfile=cfg.serverfile) #[Upload] d.update(upload_data=cfg.upload_data) d.update(upload_folder=cfg.upload_folder) # [Sensors] d.update(sensor_type=cfg.sensor_type) d.update(use_wind_sensor=cfg.use_wind_sensor) d.update(number_of_measure_for_wind_dir_average=cfg. number_of_measure_for_wind_dir_average) d.update(windspeed_offset=cfg.windspeed_offset) d.update(windspeed_gain=cfg.windspeed_gain) d.update(windmeasureinterval=cfg.windmeasureinterval) d.update(use_bmp085=cfg.use_bmp085) d.update(use_bme280=cfg.use_bme280) d.update(use_tmp36=cfg.use_tmp36) d.update(use_dht=cfg.use_dht) d.update(dht_type=cfg.dht_type) d.update( number_of_measure_for_wind_trend=cfg.number_of_measure_for_wind_trend) d.update(wind_trend_limit=cfg.wind_trend_limit) d.update(number_of_measure_for_wind_average_gust_calculation=cfg. number_of_measure_for_wind_average_gust_calculation) d.update(solarsensor=cfg.solarsensor) d.update(uvsensor=cfg.uvsensor) d.update(external_sensor_path=cfg.external_sensor_path) d.update(anemometer_pin=cfg.anemometer_pin) # [mcp3002] d.update(mcp3002_spiDev=cfg.mcp3002_spiDev) # [LoRa] d.update(use_LoRa=cfg.use_LoRa) d.update(LoRa_spiDev=cfg.LoRa_spiDev) d.update(LoRa_frequency=cfg.LoRa_frequency) d.update(LoRa_power=cfg.LoRa_power) d.update(LoRa_ID=cfg.LoRa_ID) d.update(LoRa_BW=cfg.LoRa_BW) d.update(LoRa_CR=cfg.LoRa_CR) d.update(LoRa_SF=cfg.LoRa_SF) d.update(LoRa_mode=cfg.LoRa_mode) # [Sensor_PCE-FWS20] d.update(set_system_time_from_WeatherStation=cfg. set_system_time_from_WeatherStation) # [Sensor Serial] d.update(sensor_serial_port=cfg.sensor_serial_port) # [Sensor_NEVIO8-16] # [RFM01] d.update(rfm01_frequenzy=cfg.rfm01_frequenzy) d.update(rfm01_band=cfg.rfm01_band) d.update(rfm01_lna=cfg.rfm01_lna) d.update(rfm01_rssi=cfg.rfm01_rssi) # [RTL-SDR] d.update(rtlsdr_frequency=cfg.rtlsdr_frequency) d.update(rtlsdr_bdl=cfg.rtlsdr_bdl) d.update(rtlsdr_ppm=cfg.rtlsdr_ppm) d.update(rtlsdr_timesync=cfg.rtlsdr_timesync) #[WebCam] d.update(webcamDevice1=cfg.webcamDevice1) d.update(webcamDevice2=cfg.webcamDevice2) d.update(webcamLogo=cfg.webcamLogo) d.update(sendImagesToServer=cfg.sendImagesToServer) d.update(WebCamInterval=cfg.WebCamInterval) d.update(webcamdevice1captureresolution=cfg.webcamdevice1captureresolution) d.update(webcamdevice2captureresolution=cfg.webcamdevice2captureresolution) d.update(webcamdevice1finalresolution=cfg.webcamdevice1finalresolution) d.update(webcamdevice2finalresolution=cfg.webcamdevice2finalresolution) d.update(sendallimagestoserver=cfg.sendallimagestoserver) d.update(delete_images_on_sd=cfg.delete_images_on_sd) d.update(captureprogram=cfg.captureprogram) #[Camera] d.update(usecameradivice=cfg.usecameradivice) d.update(cameradivicefinalresolution=cfg.cameradivicefinalresolution) d.update(gphoto2options=cfg.gphoto2options) d.update(gphoto2options_Night=cfg.gphoto2options_Night) d.update(reset_usb=cfg.reset_usb) d.update(clear_all_sd_cards_at_startup=cfg.clear_all_sd_cards_at_startup) d.update(start_camera_number=cfg.start_camera_number) d.update(gphoto2_capture_image_and_download=cfg. gphoto2_capture_image_and_download) d.update(use_camera_resetter=cfg.use_camera_resetter) d.update(camera_resetter_normaly_on=cfg.camera_resetter_normaly_on) d.update(on_off_camera=cfg.on_off_camera) #[CameraPI] d.update(use_cameraPI=cfg.use_cameraPI) d.update(cameraPI_day_settings=cfg.cameraPI_day_settings) d.update(cameraPI_night_settings=cfg.cameraPI_night_settings) # [ftp] d.update(ftpserver=cfg.ftpserver) d.update(ftpserverDestFolder=cfg.ftpserverDestFolder) d.update(ftpserverLogin=cfg.ftpserverLogin) d.update(ftpserverPassowd=cfg.ftpserverPassowd) d.update( use_thread_for_sending_to_server=cfg.use_thread_for_sending_to_server) # [Radio] d.update(useradio=cfg.useradio) d.update(radiointerval=cfg.radiointerval) d.update(radio_verbosity=cfg.radio_verbosity) # [Mail] d.update(gmail_user=cfg.gmail_user) d.update(gmail_pwd=cfg.gmail_pwd) d.update(mail_to=cfg.mail_to) d.update(use_mail=cfg.use_mail) d.update(mail_ip=cfg.mail_ip) # [SMS] d.update(send_IP_by_sms=cfg.send_IP_by_sms) d.update(number_to_send=cfg.number_to_send) #[WeatherUnderground] d.update(WeatherUnderground_logdata=cfg.WeatherUnderground_logdata) d.update(WeatherUnderground_ID=cfg.WeatherUnderground_ID) d.update(WeatherUnderground_password=cfg.WeatherUnderground_password) #[CWOP] d.update(CWOP_logdata=cfg.CWOP_logdata) d.update(CWOP_ID=cfg.CWOP_ID) d.update(CWOP_password=cfg.CWOP_password) #[WindFinder] d.update(WindFinder_logdata=cfg.WindFinder_logdata) d.update(WindFinder_ID=cfg.WindFinder_ID) d.update(WindFinder_password=cfg.WindFinder_password) #[PWS] d.update(PWS_logdata=cfg.PWS_logdata) d.update(PWS_ID=cfg.PWS_ID) d.update(PWS_password=cfg.PWS_password) #[DNS Exit] d.update(use_DNSExit=cfg.use_DNSExit) d.update(DNSExit_uname=cfg.DNSExit_uname) d.update(DNSExit_pwd=cfg.DNSExit_pwd) d.update(DNSExit_hname=cfg.DNSExit_hname) #[IP CAM] d.update(IPCamInterval=cfg.IPCamInterval) d.update(IPCamCfg=cfg.IPCamCfg) d.update(IPCamIP1=cfg.IPCamIP1) d.update(IPCamUS1=cfg.IPCamUS1) d.update(IPCamPW1=cfg.IPCamPW1) d.update(IPCamSN1=cfg.IPCamSN1) d.update(IPCamIP2=cfg.IPCamIP2) d.update(IPCamUS2=cfg.IPCamUS2) d.update(IPCamPW2=cfg.IPCamPW2) d.update(IPCamSN2=cfg.IPCamSN2) d.update(IPCamZZZ=cfg.IPCamZZZ) d.update(IPCamPosN=cfg.IPCamPosN) d.update(IPCamPosNE=cfg.IPCamPosNE) d.update(IPCamPosE=cfg.IPCamPosE) d.update(IPCamPosSE=cfg.IPCamPosSE) d.update(IPCamPosS=cfg.IPCamPosS) d.update(IPCamPosSW=cfg.IPCamPosSW) d.update(IPCamPosW=cfg.IPCamPosW) d.update(IPCamPosNW=cfg.IPCamPosNW) #[LAYOUT] d.update(LayColorTBC=cfg.LayColorTBC) d.update(LayColorTTC=cfg.LayColorTTC) d.update(LayColorBBC=cfg.LayColorBBC) d.update(LayColorBTC=cfg.LayColorBTC) html = html_template.safe_substitute(d) print(html)
def __init__(self, config, story): BaseStoryWriter.__init__(self, config, story) self.HTML_FILE_START = string.Template('''<!DOCTYPE html> <html> <head> <meta charset="UTF-8"> <title>${title} by ${author}</title> <style type="text/css"> ${output_css} </style> </head> <body> <h1><a href="${storyUrl}">${title}</a> by ${authorHTML}</h1> ''') self.HTML_COVER = string.Template(''' <img src="${coverimg}" alt="cover" /> ''') self.HTML_TITLE_PAGE_START = string.Template(''' <table class="full"> ''') self.HTML_TITLE_ENTRY = string.Template(''' <tr><td><b>${label}:</b></td><td>${value}</td></tr> ''') self.HTML_TITLE_PAGE_END = string.Template(''' </table> ''') self.HTML_TOC_PAGE_START = string.Template(''' <a name="TOCTOP"><h2>Table of Contents</h2></a> <p> ''') self.HTML_TOC_ENTRY = string.Template(''' <a href="#section${index04}">${chapter}</a><br /> ''') self.HTML_TOC_PAGE_END = string.Template(''' </p> ''') self.HTML_CHAPTER_START = string.Template(''' <a name="section${index04}"><h2>${chapter}</h2></a> ''') self.HTML_CHAPTER_END = string.Template('') self.HTML_FILE_END = string.Template(''' </body> </html>''')
import string values = {'var': 'foo'} t = string.Template("$var is here but $missing is not provided") try: print('substitue() :', t.substitute(values)) except KeyError as err: print('ERROR:', str(err)) print('safe_substitute()', t.safe_substitute(values))