def check_arguments(args: dict): logger_configurator() global logger logger = logging.getLogger(__name__) print("________________________________") print("") print("- Script générateur de logz -") print("________________________________") print("") print("Script debuté le " + get_dateNow()) if args["num"] <= 0: raise ValueError( "Je suis hors competence pour realiser votre demande irrationnelle ...\n Par example vous pourriez posez la question a siri ou Alexa :)" ) if args["num"] >= 500000: raise ValueError( "Je suis desole mais je ne veux pas realiser une demande aussi grande\n passez en mode --infinite ou alors voyez ca avec le super calculateur de Google :)" ) print("------------------") print("- Chargement Bdd -") print("------------------") # Handle IP bdd file chrono_start_load_bdds = get_dateNow() if not file_exist_check(myPath + myCsvFile): # Check si le fichier Zip existe file_exist_check_ifNot_exit(myPath + myZipFile) extract_a_specified_file_from_zip_archive(myPath + myZipFile, myCsvFile, myPath + myCsvFile) rename_directory(myPath + myCsvFile, myPath + myCsvFile + ".old") move_file(myPath + myCsvFile + ".old/" + myCsvFile, myPath + myCsvFile) remove_directory(myPath + myCsvFile + ".old") # map IP Bdd CSV file to array global myIPArray myIPArray = map_csv2array(myPath + myCsvFile) # Handle UA Bdd file if file_exist_check(myPath + myUaFile): global myUAArray myUAArray = map_ua_csv2array(myPath + myUaFile) else: print("Bdd handler: fichier non trouvé : " + myUaFile) exitProgram() #print("Bdd handler: temps pour charger les bases : " + str( calculat_elapsed_time( get_dateNow(), chrono_start_load_bdds ) )) if args["esapiip"] != "": print(" ") print("---------------") print("- Elastic Api -") print("---------------") pingHost(args["esapiip"]) es_getSrvResponse(args["esapiip"]) es_getSrvColorStatus(args["esapiip"]) es_getSrvVersion(args["esapiip"]) es_check_existing_pipeline(args["esapiip"]) es_check_existing_template(args["esapiip"]) es_check_ilm_policy_exist(args["esapiip"]) es_create_new_index(args["esapiip"])
def get_log(myIPArray:[], myUAArray:[]): #global ip_fake, messagetweet, uuid, age name = get_name() messagetweet = randchoice(MESSAGE_TWEET) row = get_randomRow(myIPArray) ip_fake = get_fakeIp(row) country_short, country_long, region, town, longitude, latitude, chepo, time_zone = get_location_from_IP_Row(row) #get_infoRowFromGroup(row) hashTags, hashTagsArray = gen_Hashtags() uu_id = get_uuid() age = get_age() dateNow = get_dateNow() userAgent = ua_get_user_agent(myUAArray) singleLogToLogFile = ip_fake + " " + dateNow + " " + name + " " + hashTags + " " + '"' + messagetweet + '"' + " " + str(age) + " " + uu_id + " " + country_short + " " + country_long + " " + region + " " + town + " " + longitude + " " + latitude + " " + time_zone + " " + userAgent singleLogToUrl = stringParserToUrl.quote(ip_fake + " " + dateNow + " " + name + " " + hashTags + " " + messagetweet + " " + str(age) + " " + uu_id + " " + country_short + " " + country_long + " " + region + " " + town + " " + longitude + " " + latitude + " " + time_zone + " " + userAgent) """ singleLogToJson = json.dumps( { "ip_address": ip_fake, "dateTime": dateNow, "user": name, "hasTags": hashTagsArray, "message_tweet": messagetweet, "age": age, "uuid": uu_id, "userAgent": userAgent, "country": { "location": { "longitude": longitude, "latitude": latitude }, "country_short": country_short, "country_long": country_long, "region": region, "town": town, "time_zone": time_zone} } ) """ singleLogToJson = json.dumps( { "ip_address": ip_fake, "dateTime": dateNow, "user": name, "hasTags": hashTagsArray, "message_tweet": messagetweet, "age": age, "uuid": uu_id, "userAgent": userAgent } ) return singleLogToLogFile, singleLogToUrl, singleLogToJson
def main(**kwargs): """Main.""" logger.info("Debut de script de generateur de logz") # parse command line arguments parser = ArgumentParser(description="Generate log.") parser.add_argument("--num", metavar="NUMBER", type=int, default=1, help="change number of logs generated") parser.add_argument("--no_print", default=False, action="store_true", help="prevent printing logss to terminal") parser.add_argument("--fname", metavar="FILE NAME", default="", help="save output in a text file") parser.add_argument("--speed_gen", metavar="NUMBER", type=int, default=1, help="set speed log generation") parser.add_argument("--errlog", metavar="FILE NAME", default="", help="save output log text file") parser.add_argument("--infinite", default=False, action="store_true", help="infinit log") parser.add_argument("--webip", default="", type=str, help="target Ip to send logz") parser.add_argument("--no_pause", default=False, action="store_true", help="prevent pause printing logs to terminal") parser.add_argument("--esapiip", default="", type=str, help="elastic api ip address") parser.add_argument("--loglvl", default="ERROR", type=str, help="logger level (DEBUG, INFO, WARNING, ERROR)") # get args if "args" in kwargs: args = kwargs["args"] else: args = vars(parser.parse_args()) try: check_arguments(args) except ValueError as error: print("Error: " + str(error)) exit(1) # MEssage d inforamtion du script partie ES if args["esapiip"] != "": chrono_end_inject_docs = get_dateNow() if args["infinite"] == True: print( "es_api: Injection sans fin de documents, seul ctrl +c pour quitter." ) else: print("es_api: Injection de(s) " + str(args["num"]) + " log(s)") if args["num"] > 100: print("es_api: Bulk_mode actif") print("es_api: Génération en Ram des " + str(args["num"]) + " documents") for _ in range(args["num"]): output_text, output_text_url, output_json = get_log( myIPArray, myUAArray) if (args == "" or args["num"] == 1) and args["infinite"] == False: if not args["no_pause"]: randomPause(args["speed_gen"]) if not args["no_print"]: print(output_text) if args["fname"] != "": log2File(output_text, args["fname"]) if args["esapiip"] != "": es_add_document(args["esapiip"], output_json) if args["webip"] != "": web_post_document(args["webip"], output_text_url, error_log_file_path) elif args["num"] > 1: if not args["no_pause"]: randomPause(args["speed_gen"]) if not args["no_print"]: print(output_text) if args["fname"] != "": log2File(output_text, args["fname"]) if args["esapiip"] != "": es_add_document(args["esapiip"], output_json, int(args["num"]), _) if args["webip"] != "": web_post_document(args["webip"], output_text_url, error_log_file_path) if args["infinite"] == True: while True: output_text, output_text_url, output_json = get_log( myIPArray, myUAArray) if not args["no_pause"]: randomPause(args["speed_gen"]) if not args["no_print"]: print(output_text) if args["fname"] != "": log2File(output_text, args["fname"]) if args["webip"] != "": web_post_document(args["webip"], output_text_url, error_log_file_path) if args["esapiip"] != "": #es_add_document(args["esapiip"], output_json) es_add_document(args["esapiip"], output_json, 1, 1) # ------------------------------------------------- # Message de fin de script if args["esapiip"] != "": print( "es_api: Temps total d'injection du/des " + str(args["num"]) + " doc(s) => " + str(calculat_elapsed_time(get_dateNow(), chrono_end_inject_docs))) #print("es_api: index : " + es_get_index_name_datenow() + " count = " + str( es_count_of_given_indexName(args["esapiip"], es_get_index_name_datenow()) ) ) if args["num"] >= 1: print("\n==============================================") print("Script terminé à : " + get_dateNow()) print("Temps: " + str(calculat_elapsed_time(get_dateNow(), startScript)) + " écoulé pour " + str(args["num"]) + " logs générés") print("Log level = " + get_logLevel_from_file()) print("==============================================")
def exitProgram(): import sys print("Sortie du programme : " + get_dateNow()) sys.exit()
from log_generator.dateTime_handler import get_dateNow, calculat_elapsed_time, addZero from log_generator.files_handler import file_exist_check, check_sizeFileNotNull, extract_a_specified_file_from_zip_archive, rename_directory, move_directory, remove_directory, file_exist_check_ifNot_exit, move_file from log_generator.ip_handler import map_csv2array from log_generator.files_logs_handler import log2File from log_generator.logz_handler import get_log from log_generator.network_handler import pingHost from log_generator.es_handler import es_getSrvColorStatus, es_getSrvResponse, es_getSrvVersion, es_check_existing_pipeline, es_check_existing_template, es_get_index_name_datenow, es_count_of_given_indexName from log_generator.ua_handler import map_ua_csv2array from log_generator.es_handler import es_add_document, es_get_index_shard_number, es_create_new_index, es_check_ilm_policy_exist from log_generator.webSrv_handler import web_post_document from log_generator.logger_handler import logger_configurator, logLevel_Converter, check_exist_log_level, create_file_log_level, get_logLevel_from_file from random import choice as randchoice import random, time, uuid, shutil, logging # -------------------------------------- startScript = get_dateNow() myPath = "./log_generator/" myCsvFile = "IP2LOCATION-LITE-DB11.CSV" myZipFile = "IP2LOCATION-LITE-DB11.CSV.ZIP" lineNumber: int = 0 myUaFile = "list_of_comon_user_agent.txt" error_log_file_path = "./error_httpLogFile.txt" # les tableaux ip et userAgent sont initialisé ici # Et sont porté a global dans la methode "check_argument" myIPArray = [] myUAArray = [] logger = logging.getLogger(__name__) def check_arguments(args: dict):