def download_list(l, options, logger: Logger, index=1, list_count=1): """downloads list l with options""" videos_without_format_avail = [] keep_file = options.get("keep_file", False) options.pop("keep_file", None) try: with youtube_dl.YoutubeDL(options) as ydl: for i in l: logger.debug( f"[{str(l.index(i) + 1)}/{str(index + 1)}/{str(list_count)}] downloading {i}", options.get("verbose", False)) ydl.download([i]) except DownloadError: videos_without_format_avail.append(i) video_format = options["format"] del options["format"] finally: with youtube_dl.YoutubeDL(options) as ydl: for i in videos_without_format_avail: video_name = "notafile.txt" ydl.download([i]) files = os.listdir() files.sort() for i in files: if not utils.is_video(i): files.pop(files.index(i)) else: video_name = i os.system("ffmpeg -i '" + video_name + "' '" + removeExtension(video_name) + "." + video_format + "'") if not keep_file and video_name != "notafile.txt": os.remove(video_name)
def tab(header_and_content: dict, logger: Logger = None): headers = list(header_and_content.keys()) data = list(header_and_content.values()) if type(data[0]) != list: data = [data] out = tabulate.tabulate(data, headers) if logger != None: logger.debug("\n" + out) print(out) else: print(out) return out
def setup(logger: Logger, debug: bool): r = requests.get("https://api.covid19api.com/countries") for country in r.json(): code_lower = country["ISO2"].lower() country_name_lower = country["Country"].lower() code_to_country[code_lower] = country_name_lower country_to_code[country_name_lower] = code_lower logger.debug("Updated list of supported countries", True) if debug: print( tabulate.tabulate( [[code, name, population_sizes.get(code, "N/A")] for code, name in code_to_country.items()], ("Code", "Name", "Population")))
def main(options, logger=Logger()): logger.context = "transfer.py" VERBOSE = options.get("verbose", False) if __name__ == "__main__": utils.log_header(logger, utils.DESCRIPTION, VERBOSE) for i in utils.getVideos(): videoPath = utils.videosPath() if not (os.listdir(str(Path.home()) + "/" + videoPath).__contains__(i)): logger.plain("copying to (home)/" + videoPath + ": " + str(i), VERBOSE) shutil.copyfile("videos/" + i, str(Path.home()) + "/" + videoPath + "/" + i) for i in utils.getMP3s(): musicPath = utils.musicPath() if not (os.listdir(str(Path.home()) + "/" + musicPath).__contains__(i)): logger.plain("copying to (home)/" + musicPath + ": " + str(i), VERBOSE) shutil.copyfile("music/mp3/" + i, str(Path.home()) + "/" + musicPath + "/" + i) logger.success("Copied all pending files.", not options.get("silent", False)) logger.save()
def main(): logger = Logger(context="ModuleInit") args = parse_args() logger.debug(str(args), args.get("verbose", False)) module = get_module(args.get("file")) config = getattr(module, "CONFIG") logger.debug(str(config), args.get("verbose", False)) runner.main(module, config, logger, args.get("verbose", False))
def main(options, logger=Logger()): VERBOSE = options.get("verbose", False) logger.context = "convert.py" if __name__ == "__main__": utils.log_header(logger, DESCRIPTION, VERBOSE) logger.debug("Bitrate: " + str(options.get("bitrate", "(error)")), VERBOSE) utils.convertFiles(options.get("bitrate", 320), options.get("silent", False)) logger.plain( "Converted files. Now extracting thumbnails from videos...", VERBOSE) utils.extractThumbnails(options.get("silent", False)) logger.plain( "Extracted thumbnails from videos, now writing them to mp3-files...", VERBOSE) utils.writeThumbnails() logger.success("All files converted.", not options.get("silent", False)) logger.save()
def do_iterations(world, iterations, logger=None, config={}): if logger == None: logger = Logger("runner_do_iterations", "iterations", autosave=True) else: logger.context = "iterations" raw_data = {} for species in world.creatures: raw_data[species] = [] raw_data["food"] = [] raw_data["population"] = [] csv_lines = [] try: species_count = { "iteration": 0, "food_count": world.food_count, "food_count_before": world.food_count_before, "population": 0 } mystr = str("iteration, food count, food count before, population") for species in world.creatures: species_count[species] = len(species) mystr += ", " + species species_vs = fileloghelper.VarSet(species_count) print(mystr) for iteration in range(iterations): world.act(iteration, config) # array logging for matplotlib population = 0 for species in world.creatures: n_of_animals_in_species = len(world.creatures[species]) raw_data[species].append(n_of_animals_in_species) population += n_of_animals_in_species raw_data["food"].append(world.food_count_before) raw_data["population"].append(population) species_vs.set("iteration", iteration) for species in world.creatures: species_vs.set(species, len(species)) species_vs.set("food_count", world.food_count) species_vs.set("food_count_before", world.food_count_before) species_vs.set("population", population) species_vs.print_variables() # reduced (string) logging to csv file (i.e. for later analysis) #csv_lines.append(string + "\n") except KeyboardInterrupt: pass except KeyError: pass except Exception as e: raise e finally: logger.save() f = open("raw_data.csv", "w") header = "timestamp, " for i in raw_data: header += i + ", " header = header[:-2] f.write(header + "\n") f.writelines(csv_lines) f.close() return raw_data
def main(module, config, logger=None, verbose=False): if logger == None: logger = Logger("runner_main.log", "runner.py > main", autosave=True) else: logger.context = "runner.py > main()" classes = utils.get_classes(module) errors = utils.parse_config(config, classes) for i in errors: if type(i) == ConfigError: logger.error("ConfigError: " + str(i), True) elif type(i) == PropertyError: logger.warning("PropertyError: " + str(i), True) w = World(initial_food=config.get("World").get("initial_food", 100), food_function=config.get("World").get("food_function", functions.base)) for ccllss in classes: w.creatures[ccllss.__name__] = [] n_of_class = config.get("World", {}).get("initial", {}).get("n_" + ccllss.__name__, 1) for i in range(n_of_class): c = ccllss(name=ccllss.__name__, species=ccllss.__name__, reproduction_chance=config.get("Creatures", {}).get( ccllss.__name__, {}).get("reproduction_chance", 0.1), death_chance=config.get("Creatures", {}).get( ccllss.__name__, {}).get("death_chance", 0.1), speed=config.get("Creatures", {}).get(ccllss.__name__, {}).get("speed", 1), death_age=config.get("Creatures", {}).get(ccllss.__name__, {}).get("death_age", 100), age_increments=config.get("Creatures", {}).get( ccllss.__name__, {}).get("age_increments", 0.1), energy=config.get("Creatures", {}).get(ccllss.__name__, {}).get("energy", 100)) w.creatures[ccllss.__name__].append(c) logger.debug( "Added " + str(n_of_class) + " instances of " + ccllss.__name__ + " to the world", verbose) data = do_iterations(w, config.get("iterations"), logger, config) logger.save() display_data(data)
from http.server import HTTPServer, BaseHTTPRequestHandler import json import urllib.parse as parse from fileloghelper import Logger from download import download_list from utils import removeExtension, get_extension, get_filetype logger = Logger("server.log", "ytHelperServer", True, True) logger.header(True, True, "THE YT SERVER", 8) class ytHelperHandler(BaseHTTPRequestHandler): def do_GET(self): logger.debug("GET - " + self.path) parsed_url = parse.urlparse(self.path) if parsed_url.path == "/": content = open("index.html", "r").read() self.send_response(200) self.send_header("Content-type", "text/html") self.send_header("Content-Length", len(content)) self.end_headers() self.wfile.write(bytes(content, "utf-8")) return True elif parsed_url.path == "/get": token = parse.parse_qs(parsed_url.query).get("token", "")[0] with open("config.json", "r") as f: config = json.loads(f.read()) urls = config["urls"] try: tokendict = urls[token] except KeyError:
import re import json from hashlib import sha256 import datetime from fileloghelper import Logger logger = Logger("utils.log", "utils", True, True) def get_server_params(name: str = None, description: str = None, repo_url: str = None, main_status: int = None, components: dict = None, password: str = None): """check args for user errors and return components: str (in json format), encrypted_password: str, djoined: str""" if name == None: name = "validname" if description == None: description = "" if repo_url == None: repo_url = "https://valid.test" if main_status == None: main_status = 0 if components == None: components = {} if password == None: password = "******" if len(name) > 20: raise ValueError("name argument too long (max 20)")
import datetime import random from hashlib import sha3_512 import pymysql.cursors from fileloghelper import Logger from pymysql.err import IntegrityError, OperationalError from base import mailservice, utils from base.utils import (EmailAlreadyLinked, InvalidParameterError, TaskDoesNotExist, UserAlreadyExists, UserAuthenticationObject, UserDoesNotExist, authenticate, exec_sql, file_paths) DESCRIPTION = "The base of helix, the wrapper to write to the central database" logger = Logger(file_paths["repo"]+"logs/base.log", "base", autosave=True) logger.header(True, True, DESCRIPTION, 0, False) def delete_all_tasks(user: UserAuthenticationObject): """Delete all tasks. No undo!""" userobj = get_user(user) # raises UserDoesNotExist if so exec_sql(f"DELETE FROM tasks WHERE user='******'username')}'") logger.debug(f"Deleted all tasks from user '{userobj.get('username')}'") def add_task(user: UserAuthenticationObject, title: str, description: str = "", due_date: str = "", deadline: str = ""): logger.debug(f"Adding task {title}") date_created = datetime.datetime.now().isoformat()[:-7] title = title.replace("'", "\\'").replace('"', '\\"') description = description.replace("'", "\\'").replace('"', '\\"')
def main(options, logger=None): if logger == None: logger = Logger("download.log", "download.py", True, True) else: logger.context = "download.py" try: query = utils.getQuery() logger.context = "download.py" if __name__ == "__main__": utils.log_header(logger, DESCRIPTION, options.get("verbose", False)) logger.debug("Options:", options.get("verbose", False)) for i in options: logger.debug(i + ": " + str(options[i]), options.get("verbose", False)) download_query(query, options, logger) # move all downloaded videos in videos directory repoDir = utils.repoPath() fs = os.listdir(repoDir) for f in fs: if utils.is_video(f): os.rename(f, "videos/" + f) logger.success("All files downloaded.", not options.get("silent", False)) except Exception as e: logger.handle_exception(e) raise e
import serverly.err import serverly.plugins import serverly.stater import serverly.statistics import uvicorn from fileloghelper import Logger from serverly import default_sites from serverly.objects import Request, Response, StaticSite from serverly.utils import * description = "A really simple-to-use HTTP-server" address = ("localhost", 8080) name = "serverly" version = "0.4.6" logger = Logger("serverly.log", "serverly", False, True) logger.header(True, True, description, fileloghelper_version=True, program_version="serverly v" + version) error_response_templates = {} https_redirect_url: str = None async def _read_body(receive): """ Read and return the entire body from an incoming ASGI message. http://www.uvicorn.org/#http-scope """ body = b'' more_body = True while more_body:
def exec_sql(sql_command="SELECT * FROM tasks", verbose=True, logger: Logger = None): """Execute sql_command on database and return None or whatever is returned from the database. If sql_command is not specified, all tasks will get returned""" result = None if logger != None and type(logger) == Logger: logger.set_context("exec_sql") elif verbose == True: logger = Logger("exec_sql.log", "exec_sql", True, True) try: connection = pymysql.connect(host='localhost', user='******', password='******', db='stater', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor) except OperationalError as e: logger.handle_exception(e) raise AuthenticationError("Access denied to database") if verbose and logger != None: logger.success("connected to database", False) if verbose and logger != None: logger.debug("executing SQL-Command: " + sql_command) with connection.cursor() as cursor: cursor.execute(sql_command) if "SELECT" in sql_command: result = cursor.fetchall() else: connection.commit() if verbose and logger != None: logger.debug("SQL query returned: " + str(result)) try: connection.close() if verbose and logger != None: logger.success("Shut down connection to database", False) except Exception as e: if logger != None and type(logger) == Logger: logger.handle_exception(e) else: print(e) finally: return result
status-system of server as well as submodules --- - 0: no errors - 1: partially constrained - 2: major/complete fail - 3: offline """ from fileloghelper import Logger import pymysql.cursors from pymysql.err import OperationalError from Stater.utils import get_server_params, encrypt from Stater.err import * import json logger = Logger("base.log", "register", True, True) logger.header(True, True, "Stater - base", 0, True, "moonshine") def exec_sql(sql_command="SELECT * FROM tasks", verbose=True, logger: Logger = None): """Execute sql_command on database and return None or whatever is returned from the database. If sql_command is not specified, all tasks will get returned""" result = None if logger != None and type(logger) == Logger: logger.set_context("exec_sql") elif verbose == True: logger = Logger("exec_sql.log", "exec_sql", True, True) try: connection = pymysql.connect(host='localhost', user='******', password='******', db='stater',
try: query = utils.getQuery() logger.context = "download.py" if __name__ == "__main__": utils.log_header(logger, DESCRIPTION, options.get("verbose", False)) logger.debug("Options:", options.get("verbose", False)) for i in options: logger.debug(i + ": " + str(options[i]), options.get("verbose", False)) download_query(query, options, logger) # move all downloaded videos in videos directory repoDir = utils.repoPath() fs = os.listdir(repoDir) for f in fs: if utils.is_video(f): os.rename(f, "videos/" + f) logger.success("All files downloaded.", not options.get("silent", False)) except Exception as e: logger.handle_exception(e) raise e if __name__ == "__main__": logger = Logger("download.log", "download.py", False, True) logger.header(True, True, DESCRIPTION, 0, True) main(parse_args(), logger)
"--pop-percent", "--pop-%", "-p-%", "-p%", "-pp", action="store_true", help="Plot % of active cases of population") args = p.parse_args() if args.all: args.active = True args.recovered = True args.deaths = True args.population_percent = True logger = Logger("log", autosave=True) if not args.summary and not args.summary_only and not ( args.active or args.recovered or args.deaths or args.population_percent): logger.warning( "No output specified (active/recovered etc.). Use the -h option to get more information." ) exit(0) manager = DataManager(logger, args.countries, True) if args.summary_only: manager.load_summary() print_summary() exit(0)
dest="keep_files", action="store_true", help= "when wished format is not availabe, it will be automatically converted by default. This keeps the old format, too." ) parser.add_argument("--filename", "--fname", nargs="?", type=str, help="filename to save the file under") arguments = parser.parse_args() args = vars(arguments) VERBOSE = args.get("verbose", False) log = Logger("workflow.log", "workflow.py", True, True) utils.log_header(log, DESCRIPTION, VERBOSE) try: log.debug("Debug mode activated.", VERBOSE) log.debug("Args: ", VERBOSE) for i in args: log.debug(i + ": " + str(args[i]), VERBOSE) if not args.get("test", False): download.main(args, logger=log) if args.get("facerec", None) != None and not args.get("test", False): if "all" in args.get("facerec", []): # pass all videos from utils.getVideos() to facerec facerec.main({"files": utils.getVideos()}) else: # pass all videos from args to facerec
from json.decoder import JSONDecodeError import yagmail from fileloghelper import Logger try: import base import base.statistics from base.utils import (InvalidParameterError, UserAuthenticationObject, exec_sql, file_paths, ranstr, session_length) except ImportError: if __name__ != "__main__": raise ImportError("Unable to import necessary modules.") from utils import file_paths logger = Logger("logs/mailservive.log", "mailservice", True, True) REGISTRATION_TEMPLATE = """Hi ${username}, thanks for signing up to Helix! When did you? ${djoined} Please click this link to complete your registration and activate your account: ${link} """ VERIFICATION_TEMPLATE = """Hi ${username}, you recently changed your email address. Therefore it needs to be verified so we know, you're who you pretend 😉. To do, please open this link: ${link} Thanks! """ with open(file_paths["config.json"], "r") as f: mail = json.loads(f.read()) yag = yagmail.SMTP(mail["email"]["email"], password=mail["email"]["password"]) def send_email(TO, CONTENT, SUBJECT="", ATTACHMENTS=None, verbose=True):