def __pow__(self, other): """ Returns the proxied function raised at an integer power. :param other: Object to raise the proxied with :return: Value of raised proxied function """ if isinstance(other, int) and other >= 0: def anon_pow_times(*args): """ Multiplies the proxied function as many times as the integer used to raise it. :param args: The args used for the original function :return: Proxied function multiplied as many times as other """ # Return the proxied function at least once (in case other==0) result = self for _ in range(other-1): result *= self return result(*args) log_info("{0} raised {1} times".format(self.func.__name__, other)) return Composable(anon_pow_times) elif isinstance(other, int) and other < 0: log_error("Raised {0} w/ negative int".format(self.func.__name__)) raise ValueError("Composable can only be raised w/ positive int") log_error("Attempt to raise {0} with type {1}".format( self.func.__name__, type(other))) raise TypeError("Composable instance can only be raised w/ integers")
def _run_xpath(xpath): _, twill_locals = get_twill_glocals() browser = get_browser() html = browser.get_html() tree = lxml.html.document_fromstring(html) try: results = tree.xpath(xpath) except XPathEvalError: err_msg = "Invalid xpath expression: '%s'" % xpath log_error(err_msg) raise TwillException(err_msg) # XXX we aggregate all the values together and warn when there is more than # one result if results: if len(results) > 1: log_warn("xpath '%s' found multiple results: using all of them" % xpath) result = "\n".join(lxml.html.tostring(r) for r in results) else: log_error("xpath '%s' found no results") result = "" # in case we want to cache it at some point twill_locals["__xpath_result__"] = result twill_locals["__xpath_expr__"] = xpath return result
def handle_exception(msg, e): maybe_print_stack() log_error("Caught exception at %s" % time.strftime('%Y-%m-%d %H:%M:%S', time.localtime())) html = twill.get_browser().get_html() if options.dump_file == '-': print html else: dump_file_name = os.path.expanduser(options.dump_file) try: if html is not None: if options.show_error_in_browser: # If we are showing it in the browser, lets get the # paths right (even if if means changing the HTML a # little) base_href = '\n<!-- added by flunc: --><base href="%s">' % twill.get_browser().get_url() match = re.search('<head.*?>', html, re.I|re.S) if match: html = html[:match.end()] + base_href + html[match.end():] else: html = base_href + html f = open(dump_file_name, 'wb') f.write(html) f.close() log_info("saved error html to: %s" % dump_file_name) except IOError, e: log_warn("Unable to save error HTML to: %s" % dump_file_name)
def def_inspect(module): """ Scans a module's functions with inspect, returns their signatures, if any. :param module: Module, or name of module to be imported :return: String detailing the signature of each function in the module """ # Take the __name__ attr of the module if the argument is not a string mod_name = module if isinstance(module, str) else module.__name__ log_info("Attempting to inspect functions in module {}".format(mod_name)) try: imported = import_module(str(mod_name)) except ImportError as e: log_error(e) raise ImportError(e) result = [] functions = inspect.getmembers(imported, inspect.isfunction) # For each function name, description tuple retrieved... for func, desc in functions: # ...Use getattr to take each function's signature func_txt = inspect.formatargspec( *inspect.getfullargspec(getattr(imported, func))) result.append("'def {0}{1}'".format(func, func_txt)) return "\n".join(result)
def __mul__(self, other): """ Return the composition of proxied and another function. :param other: Object to multiply the proxied with :return: Composition of the proxied function and the passed object """ if type(other) is Composable: def anon(x): return self.func(other.func(x)) log_info("{0} multiplied by Composable obj {1}".format( self.func.__name__, other)) return Composable(anon) elif type(other) is types.FunctionType: def anon(x): return self.func(other(x)) log_info("{0} multiplied by function {1}".format( self.func.__name__, other)) return Composable(anon) log_error("Attempt to multiply {0} by {1}".format( self.func.__name__, other)) raise TypeError("Illegal operands for multiplication")
def timetable(request): debug(request.__dict__) license = request.GET.get('license') tournament_id = request.GET.get('tid') if tournament_id: tournament = Tournament.objects.get(id=tournament_id) else: str_error = '------There is no tournament on the request ------\n' log_error(str_error + str(request.__dict__)) raise Exception(str_error) player = None if license: player = Player.objects.filter(license=license) if license and player: player = Player.objects.get(license=license) debug(Match.objects.all()) matches = list(Match.objects.filter(player1=player, tournament=tournament)) matches = matches + list(Match.objects.filter(player2=player, tournament=tournament)) matches.sort(key=lambda x: x.match_time) for p in matches: if p.player1 == player: p.player1_saw_timetable = True elif p.player2 == player: p.player2_saw_timetable = True elif p.player3 == player: p.player3_saw_timetable = True elif p.player4 == player: p.player4_saw_timetable = True p.save() template = '' if request.is_ajax(): template = 'includes/timetable.html' else: template = 'timetable.html' return render(request, template, {'player': player, 'matches': matches, 'tournament': tournament}) elif request.is_ajax(): return render(request, 'includes/error.html') else: return redirect(reverse('home', kwargs={'tournament':tournament.name}) + '?error=True')
def find(self, key, recursive=False): """ Locates a node and returns the data associated with it. :param key: Node identifier :return: _data associated with the node """ # result keeps the value returned by each recursive use of find() result = None if self.key == key: # Key was found, save this data result = self._data if not result: result = _NoDataNode() log_info("Found value {0} for node {1}".format(result, key)) if not result and self.left: for _ in self.left.walk(): result = self.left.find(key, True) if result: break if not result and self.right: for _ in self.right.walk(): result = self.right.find(key, True) if result: break if isinstance(result, _NoDataNode) and not recursive: # The key was found, but it had no data associated log_info("Search done, no data inside node {0}".format(key)) return None elif not result and not recursive: # No key was found msg = "No node with key '{0}' was found".format(key) log_error(msg) raise KeyError(msg) return result
def insert(self, key, data=None): """ Inserts a new element into the tree in the correct position. :param key: Object to be used as the identifier/key of this node """ if key < self.key: if self.left: self.left.insert(key, data) else: self.left = Tree(key, data) elif key > self.key: if self.right: self.right.insert(key, data) else: self.right = Tree(key, data) else: msg = "Attempt to insert a duplicate key node" log_error(msg) raise ValueError(msg)
def destroy_posts(projurl): """delete all blog posts in a project""" log_warn("(wordpress) Deleting posts") url = '%s/blog/wp-admin/edit.php' % projurl def delete_links(): go(url) return [link.url for link in get_browser()._browser.links() if link.text == 'Delete'] try: for link in delete_links(): go('%s/blog/wp-admin/%s' % (projurl, link)) except: if options.verbose: raise log_error("Error removing posts from '%s'" % projurl) assert not delete_links()
def zope_delobject(container, obj, admin_user, admin_pw): # use a the 'cleanup_base_url', which may be different than the base_url base_url = get_twill_var('cleanup_base_url') prepath = get_twill_var('prepath') log_warn("(zope) Deleting %s from %s on %s" % (obj, container, base_url)) scheme, uri = urllib.splittype(base_url) host, path = urllib.splithost(uri) if prepath is not None: path = prepath + path auth_url = "%s://%s:%s@%s%s/" % (scheme, admin_user, admin_pw, host, path) portal = XMLRPCServer(auth_url) try: getattr(portal, container).manage_delObjects([obj]) except Fault, e: ignorable = '%s does not exist' % obj if str(e).count(ignorable): log_warn("(zope) can't delete %s/%s/%s, it didn't exist" % (uri, container, obj)) elif options.verbose: raise else: log_error("Error removing '%s' from '%s': %s" % (obj, container, str(e)))
def main(self): argparser = self.get_arg_parser() args = self.args = argparser.parse_args(sys.argv[1:]) if args.debug: logging.basicConfig(level=logging.DEBUG) log_debug("args: {}".format(args)) if not getattr(args, 'cmd'): argparser.print_usage() elif args.cmd == 'version': print("{prog} {version}".format( prog=sys.argv[0], version=version)) else: parsed_spec_path = os.path.join( self.tmpdir, "rpmspectool-" + os.path.basename( self.args.specfile.name)) spechandler = RPMSpecHandler( self.tmpdir, args.specfile, parsed_spec_path) try: specfile_res = spechandler.eval_specfile(self.args.define) except RPMSpecEvalError as e: specpath, returncode, stderr = e.args if args.debug: errmsg = _( "Error parsing intermediate spec file '{specpath}'.") else: errmsg = _("Error parsing intermediate spec file.") print(errmsg.format(specpath=specpath), file=sys.stderr) if args.verbose: print( _("RPM error:\n{stderr}").format(stderr=stderr), file=sys.stderr) sys.exit(2) sources, patches = self.filter_sources_patches( args, specfile_res['sources'], specfile_res['patches']) if args.cmd == 'list': for prefix, what in ( ("Source", sources), ("Patch", patches)): for i in sorted(what): print("{}{}: {}".format(prefix, i, what[i])) elif args.cmd == 'get': if getattr(args, 'sourcedir'): where = specfile_res['srcdir'] else: where = getattr(args, 'directory') for what in sources, patches: for i in sorted(what): url = what[i] if is_url(url): try: download( url, where=where, dry_run=args.dry_run, insecure=args.insecure, force=args.force) except DownloadError as e: log_error(e.args[0]) return 1 except FileExistsError as e: log_error("{}: {}".format(e.args[1], getattr( e, 'filename2', e.filename))) return 1 return 0
def subscribe(self, subscriber): if subscriber in self.subscribers: log_error("Attempt to re-sub an existing subscriber to Publisher") raise ValueError("Multiple subscriptions are not allowed") self.subscribers.append(subscriber)
def unsubscribe(self, subscriber): if subscriber not in self.subscribers: log_error("Can't unsub non-existing subscriber from Publisher") raise ValueError("Can only unsubscribe subscriber") self.subscribers.remove(subscriber)
def main(self): argparser = self.get_arg_parser() argcomplete.autocomplete(argparser) args = self.args = argparser.parse_args(sys.argv[1:]) if args.debug: logging.basicConfig(level=logging.DEBUG) log_debug("args: {}".format(args)) if not getattr(args, 'cmd'): argparser.print_usage() elif args.cmd == 'version': print("{prog} {version}".format( prog=sys.argv[0], version=version)) else: parsed_spec_path = os.path.join( self.tmpdir, "rpmspectool-" + os.path.basename( self.args.specfile.name)) spechandler = RPMSpecHandler( self.tmpdir, args.specfile, parsed_spec_path) try: specfile_res = spechandler.eval_specfile(self.args.define) except RPMSpecEvalError as e: specpath, returncode, stderr = e.args if args.debug: errmsg = _( "Error parsing intermediate spec file '{specpath}'.") else: errmsg = _("Error parsing intermediate spec file.") print(errmsg.format(specpath=specpath), file=sys.stderr) if args.verbose: print( _("RPM error:\n{stderr}").format(stderr=stderr), file=sys.stderr) sys.exit(2) sources, patches = self.filter_sources_patches( args, specfile_res['sources'], specfile_res['patches']) if args.cmd == 'list': for prefix, what in ( ("Source", sources), ("Patch", patches)): for i in sorted(what): print("{}{}: {}".format(prefix, i, what[i])) elif args.cmd == 'get': if getattr(args, 'sourcedir'): where = specfile_res['srcdir'] else: where = getattr(args, 'directory') for what in sources, patches: for i in sorted(what): url = what[i] if is_url(url): try: download( url, where=where, dry_run=args.dry_run, insecure=args.insecure, force=args.force) except DownloadError as e: log_error(e.args[0]) return 1 except FileExistsError as e: log_error("{}: {}".format(e.args[1], getattr( e, 'filename2', e.filename))) return 1 return 0
def move_files(origin, files, destination, id_order, custom_preorder, numbering, removing, lowercase=False, duplicate=False, replace_files=False): """ Moves or duplicates files from one directory to another. :param origin: Directory from which the files are moved/duplicated :param files: Filenames of the items to be moved :param destination: Pathname to contain the specified files :param id_order: Pre-ordering (0: alphabetical, 1: inverse alpha, 2: creation order, 3: string of numbers before/after pattern) :param custom_preorder: Tuple of pair (Int before/after pattern, String pattern) :param numbering: Tuple of pattern (Boolean to actually rename with numbers, number of digits for the renaming, 0: after 1: before the custom pattern, string containing the custom pattern) :param removing: Tuple containing boolean to determine if to replace or not and the characters to be removed from the filenames :param lowercase: Boolean for whether to transform filenames to lowercase :param duplicate: Boolean for whether to move or duplicate the files :param replace_files: Boolean for whether to overwrite in destination :return: None """ # ------ UTILITY CLOSURES --------- def replace_lower(string): """ Modifies the final name of a file according to the 'replace characters' and 'lowercase' options. :param string: String to be modified :return: Modified string """ return_string = "" if removing[0] and lowercase: # The string has to have the specified characters out # before making it lowercase temp_string = "" for character in removing[1]: temp_string = string.replace(character, "") return_string = temp_string.lower() elif removing[0] and removing[1]: for character in removing[1]: return_string = string.replace(character, "") elif lowercase: return_string = string.lower() else: # No change at all return_string = string return return_string def move_file(origin_name, destination_name): """ Moves or duplicates a file into a destination directory. Deletes a file in the destination if replace_files is True. Otherwise, avoids the operation in case the filename already exists. :param origin_name: File's original name :param destination_name: Name of the file in its new directory :return: None """ # Take care of removing or lowercase transformation if desired destination_name = replace_lower(destination_name) if removing[0] \ or lowercase else destination_name # Create the pathname of the file in its new directory final_pathname = os.path.join(destination, destination_name) # If files should be replaced, get rid of any file # that already has the same name in the destination folder if replace_files and os.path.exists(final_pathname): # Log information about the file being removed log_info("Removing {} from {} as it will be overwritten".format( destination_name, destination)) os.remove(final_pathname) # If the files should not be deleted from the original folder, # use shutil_copyfile to move the file; don't try if file exists if duplicate and not os.path.exists(final_pathname): shutil_copyfile(os.path.join(origin, origin_name), final_pathname) # Else, if the files are to be moved and the filename doesn't # already exist in the destination folder, move with os' rename elif not os.path.exists(final_pathname): os.rename(os.path.join(origin, origin_name), final_pathname) # Log the new file movement log_info("File {} in {} moved with name {} to {}".format(origin_name, origin, destination_name, destination)) # ------ END OF UTILITY CLOSURES --------- # Start a new logging session start_logging() if not files: # No files were checked; log error and abort log_error("Attempted to move files, but no origin files checked.") raise NoSelectedFiles("No files selected to move in origin folder") # Check pre-order to apply before moving/renaming the files if id_order == 0: # Pre-order alphabetically original_files = sort_list(files) elif id_order == 1: # Pre-rder reverse alphabetically original_files = sort_list(files, rev=True) elif id_order == 2: # Order by creation time # List comprehension of tuples containing creation time, name of file c_times = [(localtime(os.stat(os.path.join(origin, f))[ST_CTIME]), f) for f in files] # Sort filenames by creation time original_files = sort_list(c_times, pairs=True) else: # Pre-order by a number found after/before a pattern in the filenames pattern = custom_preorder[1] list_to_order = [] for f in files: # If there is no defined pattern, maybe the name of the files # themselves is just a number... Take the name until its # extension suffix period if not pattern: try: file_tuple = (int(f[:f.find(".")]), f) except ValueError: # Well, this file doesn't follow what we expected.. # Improvise with a simple 0 in the first tuple value file_tuple = (0, f) # If the number pattern is before the specified string: elif custom_preorder[0]: # Create a tuple with the number of the file and its name. # Number determined from start of name to pattern location try: file_tuple = (int(f[:f.find(pattern)]), f) except ValueError: # The file does not follow the pattern, improvise file_tuple = (0, f) else: # Number determined from pattern location to period before # the file extension try: file_tuple = (int(f[f.find(pattern)+len(pattern): f.find(".")]), f) except ValueError: file_tuple = (0, f) list_to_order.append(file_tuple) original_files = sort_list(list_to_order, pairs=True) # If the new filenames will follow a prefixed/suffixed number pattern # then create their names according to the ordered original_files if numbering[0]: try: # Create a format string with the number of digits digits = "{{:0{}d}}".format(int(numbering[1])) except ValueError: # Just put a default of four digits digits = "{{:04d}}" for i, f in enumerate(original_files): ext_period = f.rfind(".") # If the filename has no extension, nothing else should be appended cut_from = ext_period + 1 if ext_period > 0 else len(f) # If the user checked the numeration checkbox but inserted no # pattern, maybe they just want the number to be the filename if not numbering[3]: move_file(f, "{}{}".format(digits.format(i), f[cut_from:])) # If the numbering pattern should go after the custom pattern, # name should be: custom pattern, digits, extension elif numbering[2] == 0: move_file(f, "{}{}{}".format(numbering[3], digits.format(i), f[cut_from:])) # But if the numbering pattern should go before the custom pattern # the new name should be: digits, custom pattern, extension else: move_file(f, "{}{}{}".format(digits.format(i), numbering[3], f[cut_from:])) # Else there is not a special numbering pattern to rename the files with else: # Move or duplicate the files with their same original name # (although maybe with lowercase and replace options, if so desired) for f in original_files: move_file(f, f)
def onerror(func, path, exc_info): log_error("Couldn't remove '{}': {}".format(path, exc_info))
def bad_syntax_response(s: socket, request: str): log_error(code=ERROR, request=request, reason=BAD_SYNTAX) construct_response(s=s, data=ERROR + ": " + BAD_SYNTAX, success=False)
def die(message, parser=None): message = str(message) log_error(message) if parser is not None: parser.print_usage() sys.exit(0)
# little) base_href = '\n<!-- added by flunc: --><base href="%s">' % twill.get_browser().get_url() match = re.search('<head.*?>', html, re.I|re.S) if match: html = html[:match.end()] + base_href + html[match.end():] else: html = base_href + html f = open(dump_file_name, 'wb') f.write(html) f.close() log_info("saved error html to: %s" % dump_file_name) except IOError, e: log_warn("Unable to save error HTML to: %s" % dump_file_name) if e.args: log_error("%s (%s)" % (msg,e.args[0])) else: log_error(msg) if options.show_error_in_browser: if options.dump_file == '-': log_warn("Web browser view is not supported when dumping error html to standard out.") else: try: log_info("Launching web browser...") import webbrowser path = os.path.abspath(os.path.expanduser(options.dump_file)) webbrowser.open('file://' + path) except: maybe_print_stack() log_error("Unable to open current HTML in webbrowser")