def view_thing(self, e): url = e.request.url if not url.endswith('/'): url += '/' url2 = url + 'views/syntax/' logger.debug('Redirect to ' + url2) raise HTTPFound(url2)
def get_next_unescaped_appearance(s, d1, search_from, next_char_not_word=False): while True: if not d1 in s[search_from:]: # print('nope, no %r in s[%s:] = %r' % (d1,search_from, s[search_from:])) # print('cannot find %r in s o f len = %s starting from %s' % (d1, len(s), search_from)) raise NotFound() maybe = s.index(d1, search_from) if s[maybe - 1] == '\\': if 'space' in d1: w = Where(s, maybe, maybe + len(d1)) msg = 'Skipping escaped sequence:\n\n' + w.__str__() logger.debug(msg) # print('found escaped match of %r (prev chars = %r)' % (d1, s[:maybe])) search_from = maybe + 1 else: assert s[maybe:].startswith(d1) nextchar_i = maybe + len(d1) nextchar = s[nextchar_i] if nextchar_i < len(s) else 'o' if next_char_not_word and can_be_used_in_command(nextchar): #print('skipping because nextchar = %r' % nextchar) search_from = maybe + 1 continue # print('found %r at %r ' % (d1, s[maybe:])) return maybe
def subfloat_replace(args, opts): contents = args[0] caption = opts[0] check_isinstance(contents, str) if caption is None: label = None else: caption, label = get_s_without_label(caption, labelprefix="fig:") if label is None: caption, label = get_s_without_label(caption, labelprefix="subfig:") if label is not None and not label.startswith('subfig:'): msg = 'Subfigure labels should start with "subfig:"; found %r.' % ( label) label = 'sub' + label msg += 'I will change to %r.' % label logger.debug(msg) # we need to make up an ID if label is None: label = 'subfig:' + get_md5(contents) # print('making up label %r' % label) # if label is not None: idpart = ' id="%s"' % label # else: # idpart = "" if caption is None: caption = 'no subfloat caption' res = '<figure class="subfloat"%s>%s<figcaption>%s</figcaption></figure>' % ( idpart, contents, caption) return res
def disk_events_from_list_append(disk_map, view, _id, who, name, value): logger.debug('list append to %s for value %s' % (name, value)) view_parent = get_view_node(view, name) schema_parent = view_parent._schema check_isinstance(schema_parent, SchemaList) hint = disk_map.get_hint(schema_parent) if isinstance(hint, HintDir): sub = disk_map.create_hierarchy_(schema_parent.prototype, value) dirname = disk_map.dirname_from_data_url_(view._schema, name) next_index = len(view_parent._data) filename = hint.filename_for_key(str(next_index)) if isinstance(sub, ProxyFile): contents = sub.contents disk_event = disk_event_file_create(_id, who, dirname, filename, contents) return [disk_event] elif isinstance(sub, ProxyDirectory): # create hierarchy events = list( disk_events_for_creating(_id, who, sub, tuple(dirname) + (filename, ))) e = disk_event_disk_event_group(_id, who, events=events) return [e] else: assert False else: raise NotImplementedError(hint)
def write_data_to_file(data, filename, quiet=False): """ Writes the data to the given filename. If the data did not change, the file is not touched. """ from mcdp import logger if not isinstance(data, str): msg = 'Expected "data" to be a string, not %s.' % type(data).__name__ raise ValueError(msg) if len(filename) > 256: msg = 'Invalid argument filename: too long. Did you confuse it with data?' raise ValueError(msg) filename = expand_all(filename) make_sure_dir_exists(filename) if os.path.exists(filename): current = open(filename).read() if current == data: if not 'assets' in filename: if not quiet: logger.debug('already up to date %s' % friendly_path(filename)) return with open(filename, 'w') as f: f.write(data) if not quiet: size = '%.1fMB' % (len(data) / (1024 * 1024)) logger.debug('Written %s to: %s' % (size, friendly_path(filename)))
def get_ext_for_mime(mime): """ Returns the extension (without the dot) """ if False: if mime == 'image/jpg': logger.debug('warning: the correct mime is image/jpeg not "jpg".') known = { 'image/svg+xml': 'svg', 'image/jpeg': 'jpg', 'image/jpg': 'jpg', 'text/plain': 'txt', 'image/png': 'png', 'application/pdf': 'pdf', } if mime in known: return known[mime] suffix = mimetypes.guess_extension(mime) if not suffix: raise Exception('Cannot guess extension for MIME %r.' % mime) # comes with leading dot assert suffix.startswith('.') ext = suffix[1:] # fix some problems if ext == 'svgz': ext = 'svg' return ext
def disk_events_from_hash_set(disk_map, view, _id, who, name, value): v = view.get_descendant(name) from .memdata_events import event_dict_setitem, event_dict_delitem # let's break it down to delete keys and add keys equiv_events = [] # let's work on a copy of the data data = deepcopy(view._data) # delete the ones that should not be there for k in v: if not k in value: logger.debug('Deleting element k = %r' % k) e = event_dict_delitem(name=name, key=k, _id=_id, who=who) # simulate del data[k] equiv_events.append(e) # add the ones that for k in value: if (not k in v) or (v[k] != value[k]): logger.debug('Setting element k = %r' % k) e = event_dict_setitem(name=name, key=k, value=value[k], _id=_id, who=who) data[k] = value equiv_events.append(e) de = [] for e in equiv_events: des = disk_events_from_data_event(disk_map, view._schema, view._data, e) de.extend(des) return de
def view_exception(self, exc, request): request.response.status = 500 # Internal Server Error if hasattr(request, 'context'): if isinstance(request.context, Resource): logger.debug(context_display_in_detail(request.context)) compact = (DPSemanticError, DPSyntaxError) if isinstance(exc, compact): s = exc.__str__() else: s = traceback.format_exc(exc) self.note_exception(exc, request=request) u = unicode(s, 'utf-8') logger.error(u.encode('utf8')) root = self.get_root_relative_to_here(request) res = { 'exception': u, # 'url_refresh': url_refresh, 'root': root, 'static': root + '/static' } return res
def extract_img_to_file_(soup, savefile, tagname, attrname): n = 0 tot = 0 for tag in soup.select(tagname): tot += 1 src = tag[attrname] if not src.startswith('data:'): continue mime, data = get_mime_data_from_base64_string(src) # now we should make up the data if tag.has_attr('id'): basename = tag['id'] else: md5 = get_md5(data) basename = 'data-from-%s-%s' % (tagname, md5) # Guess extension ext = get_ext_for_mime(mime) filename = basename + '.' + ext src = "%s" % filename # ask what we should be using use_src = savefile(filename, data) check_isinstance(use_src, str) tag[attrname] = use_src n += 1 logger.debug(('extract_img_to_file: extracted %d/%d images from %r tags, ' ' attribute %r.') % (n, tot, tagname, attrname))
def load_spec(self, repo_name, shelf_name, library_name, spec_name, thing_name, context): db_view = self.db_view key0 = (repo_name, shelf_name, library_name, spec_name, thing_name) if not key0 in self.evaluated: x = get_source(db_view, repo_name, shelf_name, library_name, spec_name, thing_name) source = x['data'] # We can do the parsing only once. It only depends on the string # and nothing else key = (spec_name, source) if not key in self.parsing_cache: t0 = time.clock() self.parsing_cache[key] = \ self.parse_source(spec_name, source, context) t1 = time.clock() dms = 1000 * (t1 - t0) self.parsing_cache_time_ms[key] = dms logger.warn('Parsing %s: %s ms' % (thing_name, dms)) else: dms = self.parsing_cache_time_ms[key] logger.debug('Parsing %s: saved %s' % (thing_name, dms)) parsed = self.parsing_cache[key] parse_eval = specs[spec_name].parse_eval res = parse_eval(parsed, context) self.evaluated[key0] = res return self.evaluated[key0]
def applies(self, dp1, dp2): # first must be Mux if not isinstance(dp1, Mux): return False # second must be Loop from mcdp_dp.dp_loop2 import DPLoop2 if not isinstance(dp2, DPLoop2): return False # the first one inside Loop must be Mux, otherwise it # doesn't simplify dp1s = unwrap_series(dp2.dp1) if not isinstance(dp1s[0], Mux): return False if dp1.coords == [()]: pass else: msg = 'Could not implement simplification' \ ' for dp1.coords = {}'.format(dp1.coords) logger.debug(msg) return False return True
def mcdplib_run_make(mcdplib): makefile = os.path.join(mcdplib, 'Makefile') assert os.path.exists(makefile) cwd = mcdplib cmd = [ 'make', 'clean', 'all', ] # do not use too many resources circle = 'CIRCLECI' in os.environ parallel = not circle if parallel: cmd.append('-j') from system_cmd.meat import system_cmd_result logger.debug('$ cd %s' % cwd) env = os.environ.copy() if all_disabled(): env['DISABLE_CONTRACTS'] = '1' msg = ('Disabling contracts in environment by adding ' 'DISABLE_CONTRACTS=%r.' % env['DISABLE_CONTRACTS']) logger.debug(msg) system_cmd_result(cwd, cmd, display_stdout=True, display_stderr=True, raise_on_error=True, env=env)
def __call__(self, data_event): from mcdp_hdb.disk_map_disk_events_from_data_events import disk_events_from_data_event from mcdp_hdb.disk_events import apply_disk_event_to_filesystem s = yaml_dump(data_event) logger.debug('Event #%d:\n%s' % (len(self.data_events), indent(s, '> ')) ) self.data_events.append(data_event) disk_events = disk_events_from_data_event(disk_map=self.disk_map, schema=self.view._schema, data_rep=self.view._data, data_event=data_event) for disk_event in disk_events: logger.debug('Disk event:\n%s' % yaml_dump(disk_event)) wd = self.repo.working_dir apply_disk_event_to_filesystem(wd, disk_event, repo=self.repo) message = yaml_dump(data_event) who = data_event['who'] if who is not None: actor = who['actor'] host = who['host'] instance = who['instance'] else: actor = 'system' host = host_name() instance = 'unspecified' author = Actor(actor, '%s@%s' % (actor, instance)) committer = Actor(instance, '%s@%s' % (instance, host)) _commit = self.repo.index.commit(message, author=author, committer=committer)
def logout(self, request): logger.info('logging out') headers = forget(request) logger.debug('headers: %s' % headers) came_from = request.referrer if came_from is None: came_from = self.get_root_relative_to_here(request) raise HTTPFound(location=came_from, headers=headers)
def go(self): logger.setLevel(logging.DEBUG) options = self.get_options() if not options.contracts: logger.debug('Disabling PyContrats. Use --contracts to enable.') disable_all() if options.expect_nimp is not None: options.imp = True params = options.get_extra() if len(params) < 1: raise ValueError('Please specify model name.') model_name = params[0] # drop the extension if '.mcdp' in model_name or '/' in model_name: msg = 'The model name should not contain extension or /.' raise UserError(msg) max_steps = options.max_steps _exp_advanced = options.advanced expect_nres = options.expect_nres lower = options.lower upper = options.upper out_dir = options.out query_strings = params[1:] intervals = options.intervals imp = options.imp expect_nimp = options.expect_nimp make = options.make if make: imp = True plot = options.plot do_movie = options.movie expect_res = None config_dirs = options.config_dirs.split(":") maindir = options.maindir if options.cache: if out_dir is None: out_dir = 'out-mcdp_solve' cache_dir = os.path.join(out_dir, '_cached', 'solve') else: cache_dir = None solve_main(logger, config_dirs, maindir, cache_dir, model_name, lower, upper, out_dir, max_steps, query_strings, intervals, _exp_advanced, expect_nres, imp, expect_nimp, plot, do_movie, expect_res, make)
def enumerate_test_libraries(): """ Libraries on which we need to run tests. Returns list of (bigpath, short_name, path) """ librarian = get_test_librarian() found = [] libraries = librarian.get_libraries() for short in list(libraries): data = libraries[short] path = data['path'] f = os.path.join(path, '.mcdp_test_ignore') if os.path.exists(f): continue found.append(short) i, n = get_test_index() if n == 1: uselibs = found else: assert n > 1 # 0 only gets the basic tests if i == 0: return [] else: n_effective = n - 1 i_effective = i - 1 assert 0 <= i_effective < n_effective uselibs = [] buckets = [[] for _ in range(n_effective)] for j, libname in enumerate(found): #do = j % n_effective == i_effective which = int(math.floor((float(j) / len(found)) * n_effective)) assert 0 <= which < n_effective, (j, which, n_effective) buckets[which].append(libname) # do = math.floor((j - 1) / n_effective) == i_effective for libname in found: do = libname in buckets[i_effective] if do: uselibs.append(libname) s = 'will do' else: s = 'skipped because of parallelism' logger.debug('%20s: %s' % (libname, s)) ntot = sum(len(_) for _ in buckets) assert ntot == len(found) return uselibs
def needs_remake(src, target): if not os.path.exists(target): logger.debug('Target does not exist: %s' % target) return True target_time = os.path.getmtime(target) src_time = os.path.getmtime(src) src_is_newer = target_time < src_time if src_is_newer: logger.debug('Source is newer.') return src_is_newer
def go(self): options = self.get_options() if options.config is not None: logger.info('Reading configuration from %s' % options.config) logger.warn('Other options from command line will be ignored. ') parser = RawConfigParser() parser.read(options.config) sections = parser.sections() logger.info('sections: %s' % sections) s = 'app:main' if not s in sections: msg = 'Could not find section "%s": available are %s.' % ( s, format_list(sections)) msg += '\n file %s' % options.config raise Exception(msg) # XXX settings = dict((k, parser.get(s, k)) for k in parser.options(s)) prefix = 'mcdp_web.' mcdp_web_settings = get_only_prefixed(settings, prefix, delete=True) # mcdp_web_settings = {} # for k,v in list(settings.items()): # if k.startswith(prefix): # mcdp_web_settings[k[len(prefix):]] = v # del settings[k] options = parse_mcdpweb_params_from_dict(mcdp_web_settings) logger.debug('Using these options: %s' % options) else: logger.info('No configuration .ini specified (use --config).') settings = {} wa = WebApp(options, settings=settings) # Write warning messages now wa.get_authomatic_config() msg = """Welcome to PyMCDP! To access the interface, open your browser at the address http://localhost:%s/ Use Chrome, Firefox, or Opera - Internet Explorer is not supported. """ % options.port logger.info(msg) if options.delete_cache: pass # XXX: warning deprecated # logger.info('Deleting cache...') # wa._refresh_library(None) wa.serve(port=options.port)
def view_forbidden(self, request): # if using as argument, context is the HTTPForbidden exception context = request.context e = Environment(context, request) logger.error('forbidden url: %s' % request.url) logger.error('forbidden referrer: %s' % request.referrer) logger.error('forbidden exception: %s' % request.exception.message) logger.error('forbidden result: %s' % request.exception.result) request.response.status = 403 config = self.get_authomatic_config() # Bug! this must be front-facing url_internal = request.url if self.options.url_base_internal is not None: url_external = url_internal.replace(self.options.url_base_internal, self.options.url_base_public) else: url_external = url_internal logger.debug('next_location:\n internal: %s\n external: %s' % (url_internal, url_external)) config['next_location'] = url_external res = {} res['request_exception_message'] = request.exception.message res['request_exception_result'] = request.exception.result # path_qs The path of the request, without host but with query string res['came_from'] = request.path_qs res['referrer'] = request.referrer res['login_form'] = self.make_relative(request, URL_LOGIN) res['url_logout'] = self.make_relative(request, URL_LOGOUT) res['root'] = e.root res['static'] = e.root + '/static' # XXX DRY providers = self.get_authomatic_config() other_logins = {} for x in providers: other_logins[x] = e.root + '/authomatic/' + x res['other_logins'] = other_logins if context is not None: res['context_detail'] = context_display_in_detail(context) logger.error(res['context_detail']) else: res['context_detail'] = 'no context provided' if e.username is not None: # res['error'] = '' res['user_struct'] = e.user else: res['error'] = 'You need to login to access this resource.' res['user_struct'] = None return res
def cndp_abstract(ndp): from .connection import get_connection_multigraph G = get_connection_multigraph(ndp.get_connections()) cycles = list(simple_cycles(G)) if len(cycles) > 0: logger.debug('cndp_abstract: %d cycles' % len(cycles)) if not cycles: return dpgraph_making_sure_no_reps(ndp.context) else: return cndp_abstract_loop2(ndp)
def with_library_cache_dir(library, prefix='with_library_cache_dir'): mcdp_tmp_dir = get_mcdp_tmp_dir() tmpdir = tempfile.mkdtemp(dir=mcdp_tmp_dir, prefix=prefix) library.use_cache_dir(tmpdir) try: yield except: logger.debug('Keeping %s' % tmpdir) pass else: shutil.rmtree(tmpdir)
def write_file_encoded_as_utf8(filename, data): check_isinstance(data, str) d = os.path.dirname(filename) if not os.path.exists(d): # pragma: no cover os.makedirs(d) u = unicode(data, 'utf-8') with codecs.open(filename, encoding='utf-8') as f: f.write(u) logger.debug('Written %s' % filename)
def get_test_librarian(): package = dir_from_package_name('mcdp_data') folder = os.path.join(package, 'bundled.mcdp_repo') if not os.path.exists(folder): raise_desc(ValueError, 'Test folders not found.', folder=folder) librarian = Librarian() librarian.find_libraries(folder) libraries = librarian.libraries n = len(libraries) if n <= 1: msg = 'Expected more libraries.' raise_desc(ValueError, msg, folder, libraries=libraries) orig = list(libraries) vname = MCDPConstants.ENV_TEST_LIBRARIES if vname in os.environ: use = os.environ[vname].split(",") logger.debug('environment variable %s = %s' % (vname, use)) logger.info('Because %s is set, I will use only %s instead of %s.' % (vname, use, orig)) for _ in orig: if not _ in use: del libraries[_] else: pass #logger.debug('environment variable %s is unset' % vname) vname2 = MCDPConstants.ENV_TEST_LIBRARIES_EXCLUDE if vname2 in os.environ: exclude = os.environ[vname2].split(',') logger.debug('environment variable %s = %s' % (vname2, exclude)) else: exclude = [] # logger.debug('environment variable %s is unset' % vname2) if exclude: for a in exclude: if not a in libraries: msg = '%s = %s but %r is not a library.' % (vname2, exclude, a) logger.error(msg) else: logger.info('Excluding %s' % vname2) del libraries[a] return librarian
def add_lib_by_path(self, path): short, data = self._load_entry(path) if short in self.libraries: entry = self.libraries[short] if entry['path'] != data['path']: msg = 'I already know library "%s".\n' % short msg += 'Current entry path: %s\n' % data['path'] msg += 'Previous entry path: %s\n' % entry['path'] raise_desc(ValueError, msg) else: msg = 'Reached library "%s" twice (path = %s).' % (short, path) logger.debug(msg) self.libraries[short] = data
def subwith(name_, s): result = bs(s.encode('utf8')) result.name = 'div' pre = result.find('pre') pre.name = 'code' Pre = Tag(name='pre') add_class(Pre, 'syntax_highlight') add_class(Pre, name_) Pre.append(pre) try: code.parent.replace_with(Pre) except: logger.debug(str(code.parent)) raise
def view_authomatic_(self, config, e): response = Response() provider_name = e.context.name logger.info('using provider %r' % provider_name) if not provider_name in config: msg = 'I got to the URL for provider %r even though it is not in the config.' % provider_name raise ValueError(msg) authomatic = Authomatic(config=config, secret='some random secret string') url_base_public = self.options.url_base_public url_base_internal = self.options.url_base_internal if not ((url_base_public is None) == (url_base_public is None)): msg = 'Only one of url_base_public and url_base_internal is specified.' raise Exception(msg) result = authomatic.login( MyWebObAdapter(e.request, response, url_base_internal, url_base_public), provider_name) if not result: return response # If there is result, the login procedure is over and we can write to response. response.write('<a href="..">Home</a>') if result.error: # Login procedure finished with an error. msg = result.error.message return self.show_error(e, msg, status=500) elif result.user: # OAuth 2.0 and OAuth 1.0a provide only limited user data on login, # We need to update the user to get more info. # if not (result.user.name and result.user.id): result.user.update() s = "user info: \n" for k, v in result.user.__dict__.items(): s += '\n %s : %s' % (k, v) logger.debug(s) next_location = config.get('next_location', e.root) handle_auth_success(self, e, provider_name, result, next_location) # # response.write('<pre>'+s+'</pre>') # # Welcome the user. # response.write(u'<h1>Hi {0}</h1>'.format(result.user.name)) # response.write(u'<h2>Your id is: {0}</h2>'.format(result.user.id)) # response.write(u'<h2>Your email is: {0}</h2>'.format(result.user.email)) # just regular login return response
def add_where_information(where): """ Adds where field to DPSyntaxError or DPSemanticError thrown by code. """ active = True if not active: logger.debug('Note: Error tracing disabled in add_where_information().') if not active: mcdp_dev_warning('add_where_information is disabled') yield else: try: yield except MCDPExceptionWithWhere as e: mcdp_dev_warning('add magic traceback handling here') _, _, tb = sys.exc_info() raise_with_info(e, where, tb)
def parse_ndp_filename(filename, context=None): """ Reads the file and returns as NamedDP. The exception are annotated with filename. """ with open(filename) as f: contents = f.read() try: return parse_ndp(contents, context) except MCDPExceptionWithWhere as e: active = True if active: # http://stackoverflow.com/questions/1350671/inner-exception-with-traceback-in-python e = e.with_filename(filename) raise type(e), e.args, sys.exc_info()[2] else: # pragma: no cover logger.debug('Deactivated trace in parse_ndp_filename().') raise
def use_cache_dir(self, cache_dir): try: if not os.path.exists(cache_dir): os.makedirs(cache_dir) fn = os.path.join(cache_dir, 'touch') if os.path.exists(fn): os.unlink(fn) with open(fn, 'w') as f: f.write('touch') os.unlink(fn) except Exception: logger.debug('Cannot write to folder %r. Not using caches.' % cache_dir) self.cache_dir = None else: self.cache_dir = cache_dir
def substitute_task_marker_p(p, sub, klass): try: for element in p.descendants: if not isinstance(element, NavigableString): continue s = element.string if sub in s: add_class(p, klass) s2 = s.replace(sub, '') ns = NavigableString(s2) element.replaceWith(ns) except AttributeError as e: # a bug with bs4 msg = 'Bug with descendants: %s' % e logger.debug(msg) pass