def test_patch_instance(): Foo, PatchedFoo = simple_classes() f = Foo("val", True) f2 = Foo("val", True) patch(f, PatchedFoo) assert f.compute_string() == "(val)_patched" assert not f.compute_bool() f.added_method() assert f2.compute_string() == "(val)" assert f2.compute_bool() with pytest.raises(Exception): f2.added_method() unpatch(f) assert f.compute_string() == "(val)" assert f.compute_bool() with pytest.raises(Exception): f.added_method()
def test_patch_class_before_construction(): Foo, PatchedFoo = simple_classes() patch(Foo, PatchedFoo) f = Foo("val", True) assert f.compute_string() == "(val)_patched" assert not f.compute_bool() f.added_method()
def test_patch_class_after_construction(): Foo, PatchedFoo = simple_classes() f = Foo("val", True) assert f.compute_string() == "(val)" assert f.compute_bool() with pytest.raises(Exception): f.added_method() patch(Foo, PatchedFoo) assert f.compute_string() == "(val)_patched" assert not f.compute_bool() f.added_method()
def ready(self): for x in app_settings.SUBDOMAINS: # We add a literal period to the end of every pattern to avoid rather # unwieldy escaping in every definition. x['_regex'] = re.compile(r'%s(\.|$)' % x['regex']) x['_callback'] = import_string(x['callback']) if not app_settings.EMULATE: return monkeypatch.patch(HttpRequest__get_host, HttpRequest, 'get_host') monkeypatch.patch(RequestFactory__generic, RequestFactory, 'generic') # Inject our URLs for x in app_settings.SUBDOMAINS: urlconf_module = import_string(x['urlconf']) urlconf_module.urlpatterns = list(urlconf_module.urlpatterns) + [ url(r'^_/subdomains/', include('dynamic_subdomains.urls', namespace='dynamic-subdomains')), ]
def test_multiple_call(): Foo, PatchedFoo = simple_classes() f = Foo("val", True) patch(Foo, PatchedFoo) patch(Foo, PatchedFoo) patch(Foo, PatchedFoo) assert f.compute_string() == "(val)_patched" assert not f.compute_bool() f.added_method() unpatch(Foo) assert f.compute_string() == "(val)" assert f.compute_bool() with pytest.raises(Exception): f.added_method() unpatch(Foo) assert f.compute_string() == "(val)" assert f.compute_bool() with pytest.raises(Exception): f.added_method()
def ready(self): for x in app_settings.SUBDOMAINS: # We add a literal period to the end of every pattern to avoid rather # unwieldy escaping in every definition. x['_regex'] = re.compile(r'%s(\.|$)' % x['regex']) x['_callback'] = import_string(x['callback']) if not app_settings.EMULATE: return monkeypatch.patch(HttpRequest__get_host, HttpRequest, 'get_host') monkeypatch.patch(RequestFactory__generic, RequestFactory, 'generic') # Inject our URLs for x in app_settings.SUBDOMAINS: urlconf_module = import_string(x['urlconf']) urlconf_module.urlpatterns = list(urlconf_module.urlpatterns) + [ url( r'^_/subdomains/', include('dynamic_subdomains.urls', namespace='dynamic-subdomains')), ]
def test_force_repatch(): Foo, PatchedFoo = simple_classes() class OtherPatch: def compute_string(self): return "second" f = Foo("val", True) patch(Foo, PatchedFoo) assert f.compute_string() == "(val)_patched" patch(Foo, OtherPatch) assert f.compute_string() == "(val)_patched" patch(Foo, OtherPatch, True) assert f.compute_string() == "second" unpatch(Foo) assert f.compute_string() == "(val)"
if not startFrom: startFrom = 0 ns = self.site().category_namespaces() catsdone = [] catstodo = [(self, recurse)] # Get subcats and articles for (cat, recurselevel) in catstodo: if type(recurselevel) == type(1): newrecurselevel = recurselevel - 1 else: newrecurselevel = recurselevel catsdone.append(cat) wikipedia.output("Getting [[%s]] from %s..." % (cat.title(), cat.site().dbName())) for page in toolserver.Generators.getCategoryMembers(cat, startFrom): if type(page) == catlib.Category: if recurselevel and page not in catsdone: catstodo.append((page, newrecurselevel)) yield catlib.SUBCATEGORY, page.title() else: yield catlib.ARTICLE, page.title() # Get supercats for supercat in toolserver.Generators.getCategories(self): yield catlib.SUPERCATEGORY, supercat.title() patches = {"catlib.Category._parseCategory": "_catlib_Category__parseCategory"} monkeypatch.patch(patches, globals(), locals())
# specific language governing permissions and limitations # under the License. import logging import os import re try: from elftools.elf.elffile import ELFFile except ImportError as e: # Handle pre-python2.7s' lack of collections.OrderedDict, which we include in # impala-python as ordereddict.OrderedDict. if 'cannot import name OrderedDict' == str(e): import monkeypatch from ordereddict import OrderedDict monkeypatch.patch(OrderedDict, 'collections', 'OrderedDict') from elftools.elf.elffile import ELFFile else: raise e LOG = logging.getLogger('tests.common.environ') # See if Impala is running with legacy aggregations and/or hash joins. This is kind of a # hack. It would be better to poll Impala whether it is doing so. test_start_cluster_args = os.environ.get("TEST_START_CLUSTER_ARGS", "") old_agg_regex = "enable_partitioned_aggregation=false" old_hash_join_regex = "enable_partitioned_hash_join=false" USING_OLD_AGGS_JOINS = re.search(old_agg_regex, test_start_cluster_args) is not None or \ re.search(old_hash_join_regex, test_start_cluster_args) is not None
def _patch(cls, *args, **argd): """Placeholder to import patch lazily.""" import monkeypatch cls._patch = staticmethod(monkeypatch.patch) return monkeypatch.patch(*args, **argd)
rev['revision'], rev['date'], rev['user']) yield (rev['date'], rev['user'], self.getEditPage(get_redirect=True, oldid=rev['revision'])[0]) @monkeypatch.bak def _wikipedia_Page_fullVersionHistory(self): return [entry for entry in VersionHistoryGenerator(self)] patches = { "wikipedia.Site.dbName": "_wikipedia_Site_dbName", "wikipedia.Page.getEditPage": "_wikipedia_Page_getEditPage", "wikipedia.Page.latestRevision": "_wikipedia_Page_latestRevision", "wikipedia.Page.exists": "_wikipedia_Page_exists", "wikipedia.Page.isRedirectPage": "_wikipedia_Page_isRedirectPage", "wikipedia.Page.isEmpty": "_wikipedia_Page_isEmpty", "wikipedia.Page.botMayEdit": "_wikipedia_Page_botMayEdit", "wikipedia.Page.getReferences": "_wikipedia_Page_getReferences", "wikipedia.Page.interwiki": "_wikipedia_Page_interwiki", "wikipedia.Page.categories": "_wikipedia_Page_categories", "wikipedia.Page.linkedPages": "_wikipedia_Page_linkedPages", "wikipedia.Page.imagelinks": "_wikipedia_Page_imagelinks", "wikipedia.Page.templates": "_wikipedia_Page_templates", "wikipedia.Page.templatePages": "_wikipedia_Page_templatePages", "wikipedia.Page.getVersionHistory": "_wikipedia_Page_getVersionHistory", "wikipedia.Page.fullVersionHistory": "_wikipedia_Page_fullVersionHistory" } monkeypatch.patch(patches, globals(), locals())
import monkeypatch from .views import serve monkeypatch.patch(serve, 'django.contrib.staticfiles.views', 'serve')
raise NoReverseMatch(str(e) + (" Possible match: %s" % (self.reverse_dict[lookup_view],))) else: if callable(lookup_view): raise NoReverseMatch(str(e) + "\n" + ("No such key %s in %s\n\n" % (lookup_view, [k for k in self.reverse_dict.keys() if callable(k)])) + ("Complete reverse map: %s\n" % pp.pformat(self.reverse_dict))) else: raise NoReverseMatch(str(e) + "\n" + ("No such key %s in %s\n" % (lookup_view, [k for k in self.reverse_dict.keys() if not callable(k)])) + ("Complete reverse map: %s\n" % pp.pformat(self.reverse_dict))) if '_reverse_with_prefix' in dir(RegexURLResolver): # support for Django 1.4: patch(RegexURLResolver, '_reverse_with_prefix', reverse_with_debugging) @after(RegexURLResolver, '_populate') def populate_reverse_dict_with_module_function_names(self): from django.utils.translation import get_language language_code = get_language() reverse_dict = self._reverse_dict[language_code] for pattern in reversed(self.url_patterns): if not isinstance(pattern, RegexURLResolver): # import pdb; pdb.set_trace() for reverse_item in reverse_dict.getlist(pattern.callback): function_name = "%s.%s" % (pattern.callback.__module__, pattern.callback.__name__) reverse_dict.appendlist(function_name, reverse_item) class FieldlineWithCustomReadOnlyField(object):