def __init__(self): super(SentimentAnalysis, self).__init__() self.sentiment_lexicon_file = ConfigLoader.ConfigLoader().get_config_str("Directories", "sentiment_lexicon_file") # REDIS_LEVEL_DB # self.db = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Sentiment") self.time1 = time.time() # Waiting time in secondes between to message proccessed self.pending_seconds = 1 # Send module state to logs self.redis_logger.info(f"Module {self.module_name} initialized")
def run(self, userNowUsingLanguage:str, mainWareHouse:wareHouse): print(mainWareHouse.languagesContents[userNowUsingLanguage]["commandsMessage"]["reloadConfig"]["reloading_TipsMessage"]) ConfigLoader.LoadConfig(mainWareHouse) # 处理 Press Enter key continue print(mainWareHouse.languagesContents[userNowUsingLanguage]["commandsMessage"]["reloadConfig"]["reloadComplete_TipsMessage"]) input(mainWareHouse.languagesContents[userNowUsingLanguage]["globalMessageTips"]["anyKeyContinue_TipsMessage"])
def __init__(self, pymisp): self.pymisp = pymisp self.currentID_date = None self.eventID_to_push = self.get_daily_event_id() config_loader = ConfigLoader.ConfigLoader() self.maxDuplicateToPushToMISP = config_loader.get_config_int( "ailleakObject", "maxDuplicateToPushToMISP") config_loader = None self.attribute_to_tag = None
def __init__(self, id): config_loader = ConfigLoader.ConfigLoader() self.r_serv_db = config_loader.get_redis_conn("ARDB_DB") config_loader = None if self.r_serv_db.hexists('user:all', id): self.id = id else: self.id = "__anonymous__"
def __init__(self): config = ConfigLoader.configImport("config") self.AlphaVantageAccounts = config.getAccountsByAPI("AlphaVantage") self.AlphaVantageKey = config.getAPIKey("AlphaVantage") self.numKeys = len(self.AlphaVantageKey) self.DailyUsage = {val: 0 for val in range(self.numKeys)} self.MinuteUsage = {val: 0 for val in range(self.numKeys)} self.Time = {val: 0 for val in range(self.numKeys)} self.quit = False self.acctIndex = 0 self.loadDailyUsage()
def eraseFileInFolder(filename): today = datetime.now() dt_string = today.strftime("%d-%m-%Y") directory = './' + cfgLoader.getINIConfiguration( )['VIDEO']['Directory'] + '/' + dt_string if (checkFileInFolder(filename, directory)): os.remove(directoty + '/' + filename) return True else: return False
def __init__(self): super(ModuleStats, self).__init__() # Waiting time in secondes between to message proccessed self.pending_seconds = 20 # Sent to the logging a description of the module self.redis_logger.info("Makes statistics about valid URL") # REDIS # self.r_serv_trend = ConfigLoader.ConfigLoader().get_redis_conn("ARDB_Trending")
def initGlobalFuncs (configFilename): global Config, DB, AuthSessionCOOKIEID, AuthAdminSessionCOOKIEID Config = ConfigLoader.ConfigLoader (configFilename) setLogging(logFilename = Config.getSettingStr('LOGGING_DIR', './logs') + '/' + Config.getSettingStr('HTTP_LOGNAME', 'httpdaemon.log'), logLevel = LOGLEVELS[Config.getSettingStr('LOGGING_LEVEL', 'debug').lower()], logSize = Config.getSettingValue('LOGGING_SIZE', '((1024 * 100) * 100)'), logNum = Config.getSettingValue('LOGGING_NO', '10')) # Cookie/Session Authentication AuthSessionCOOKIEID = Config.getSettingStr('AUTH_USER_COOKIEID', 'UserAuthID') AuthAdminSessionCOOKIEID = Config.getSettingStr('AUTH_ADMIN_COOKIEID', 'AdminAuthID')
def __init__(self, new_version): self.version = new_version self.start_time = time.time() self.config = ConfigLoader.ConfigLoader() self.r_serv = self.config.get_redis_conn("ARDB_DB") self.f_version = float(self.version[1:]) self.current_f_version = self.r_serv.get('ail:version') if self.current_f_version: self.current_f_version = float(self.current_f_version[1:]) else: self.current_f_version = 0
def main(argv=None): config = conf.ConfigLoader() root = tk.Tk() config.load() viv.__api_config = config root.title("Vivaldi Theme Loader") #root.geometry("530x320") plugins = plugin.get_modules_from(config.option["vivaldiThemeFolder"] + "/plugins") ModuleDisplay.display(root, plugins) root.mainloop() return 0
def __init__(self, domain, type, port=80): config_loader = ConfigLoader.ConfigLoader() self.r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") self.r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") self.PASTES_FOLDER = os.path.join( os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) + '/' self.domain = domain self.type = type self.port = port self.tags = {} if type == 'onion' or type == 'regular': self.paste_directory = os.path.join( os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) self.paste_crawled_directory = os.path.join( self.paste_directory, config_loader.get_config_str("Directories", "crawled")) self.paste_crawled_directory_name = config_loader.get_config_str( "Directories", "crawled") self.screenshot_directory = os.path.join( os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) self.screenshot_directory_screenshot = os.path.join( self.screenshot_directory, 'screenshot') elif type == 'i2p': self.paste_directory = os.path.join( os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) self.screenshot_directory = os.path.join( os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "crawled_screenshot")) else: ## TODO: # FIXME: add error pass config_loader = None
def recordCameras(): csvFile = cfgLoader.getCSVFile() if (csvFile['result']): sched.startCameraRecording() cameraData = fileManager.readCameraCSV(csvFile['path']) threads = [] for row in cameraData.itertuples(): #row[1] = Store. row[2] = shoppingCenter. row[3] = cameraID. row[4] = cameraIP. thread = threading.Thread(target=startRecording, args=( row[1], row[2], row[3], row[4], )) threads.append(thread) thread.start() else: print('file not found.')
def __init__(self, p_path): config_loader = ConfigLoader.ConfigLoader() self.cache = config_loader.get_redis_conn("Redis_Queues") self.store = config_loader.get_redis_conn("Redis_Data_Merging") self.store_metadata = config_loader.get_redis_conn("ARDB_Metadata") self.PASTES_FOLDER = os.path.join( os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) if self.PASTES_FOLDER not in p_path: self.p_rel_path = p_path self.p_path = os.path.join(self.PASTES_FOLDER, p_path) else: self.p_path = p_path self.p_rel_path = p_path.replace(self.PASTES_FOLDER + '/', '', 1) self.p_name = os.path.basename(self.p_path) self.p_size = round(os.path.getsize(self.p_path) / 1024.0, 2) self.p_mime = magic.from_buffer("test", mime=True) self.p_mime = magic.from_buffer(self.get_p_content(), mime=True) # Assuming that the paste will alway be in a day folder which is itself # in a month folder which is itself in a year folder. # /year/month/day/paste.gz var = self.p_path.split('/') self.p_date = Date(var[-4], var[-3], var[-2]) self.p_date_path = os.path.join(var[-4], var[-3], var[-2], self.p_name) self.p_source = var[-5] self.supposed_url = 'https://{}/{}'.format( self.p_source.replace('_pro', ''), var[-1].split('.gz')[0]) self.p_encoding = None self.p_hash_kind = {} self.p_hash = {} self.p_langage = None self.p_nb_lines = None self.p_max_length_line = None self.array_line_above_threshold = None self.p_duplicate = None self.p_tags = None
def __init__(self, splash_url, type, crawler_options, date, requested_mode, url, domain, port, cookies, original_item, *args, **kwargs): self.splash_url = splash_url self.domain_type = type self.requested_mode = requested_mode self.original_item = original_item self.root_key = None self.start_urls = url self.domains = [domain] self.port = str(port) date_str = '{}/{}/{}'.format(date['date_day'][0:4], date['date_day'][4:6], date['date_day'][6:8]) self.full_date = date['date_day'] self.date_month = date['date_month'] self.date_epoch = int(date['epoch']) self.user_agent = crawler_options['user_agent'] self.png = crawler_options['png'] self.har = crawler_options['har'] self.cookies = cookies config_section = 'Crawler' self.p = Process(config_section) self.item_dir = os.path.join( self.p.config.get("Directories", "crawled"), date_str) config_loader = ConfigLoader.ConfigLoader() self.har_dir = os.path.join( config_loader.get_files_directory('har'), date_str) config_loader = None self.r_serv_log_submit = redis.StrictRedis( host=self.p.config.get("Redis_Log_submit", "host"), port=self.p.config.getint("Redis_Log_submit", "port"), db=self.p.config.getint("Redis_Log_submit", "db"), decode_responses=True) self.root_key = None
def main() : configs = ConfigLoader() ret = None while ret not in [1, 2, 3, 4, 5] : msgs = [] msgs.append("Quelle type de simulation voulez-vous lancer ? :") msgs.append("1- Capacité maximale de remboursement") msgs.append("2- Nombre d'années nécessaire suivant un salaire") msgs.append("3- Mensualité nécessaire suivant un nombre d'années et un salaire") msgs.append("4- Nombre d'années nécessaire suivant un salaire et des dépenses additionnelles") msgs.append("5- Mensualité nécessaire suivant un nombre d'années, un salaire et des dépenses additionnelles") msgs.append("") ret = input("\n".join(msgs)) try : ret = int(ret) except : ret = None try : if ret == 1 : simulation = MaxCapacitySimulator(configs) elif ret == 2 : simulation = PretSimulator(configs, False, False) elif ret == 3 : simulation = PretSimulator(configs, False, True) elif ret == 4 : simulation = PretSimulator(configs, True, False) elif ret == 5 : simulation = PretSimulator(configs, True, True) except : return if simulation.compute() : print(simulation)
def loadDataFromConfig(): import ConfigLoader global deviceName, serverIP, serverPort, serverUrl deviceName, serverIP, serverPort = ConfigLoader.loadConfig() serverUrl = createServerUrl()
def Include(filename): ConfigLoader.loadConfigFile(filename, KickStart.CONFIG_MODULE)
def __init__(self, p_path): try: config_loader = ConfigLoader.ConfigLoader() self.PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], config_loader.get_config_str("Directories", "pastes")) if self.PASTES_FOLDER not in p_path: self.p_rel_path = p_path self.p_path = os.path.join(self.PASTES_FOLDER, p_path) self.t_clean_path = p_path.replace('//', '/', 1) else: self.p_path = p_path self.p_rel_path = p_path.replace(self.PASTES_FOLDER+'/', '', 1) self.t_clean_path = p_path.replace('//', '/', 1) except: configfile = os.path.join(os.environ['AIL_BIN'], 'packages/config.cfg') if not os.path.exists(configfile): raise Exception('Unable to find the configuration file. \ Did you set environment variables? \ Or activate the virtualenv.') cfg = configparser.ConfigParser() cfg.read(configfile) self.PASTES_FOLDER = os.path.join(os.environ['AIL_HOME'], cfg.get("Directories", "pastes")) if self.PASTES_FOLDER not in p_path: self.p_rel_path = p_path self.p_path = os.path.join(self.PASTES_FOLDER, p_path) self.t_clean_path = p_path.replace('//', '/', 1) else: self.p_path = p_path self.p_rel_path = p_path.replace(self.PASTES_FOLDER+'/', '', 1) self.t_clean_path = p_path.replace('//', '/', 1) #--- self.p_path = self.t_clean_path self.p_name = os.path.basename(self.p_path) self.p_size = round(os.path.getsize(self.p_path)/1024.0, 2) #self.p_mime = magic.from_buffer("test", mime=True) #self.p_mime = magic.from_buffer(self.get_p_content(), mime=True) # Assuming that the paste will alway be in a day folder which is itself # in a month folder which is itself in a year folder. # /year/month/day/paste.gz var = self.p_path.split('/') self.p_date = Date(var[-4], var[-3], var[-2]) self.p_date_path = os.path.join(var[-4], var[-3], var[-2], self.p_name) self.p_encoding = None self.p_hash_kind = {} self.p_hash = {} self.p_langage = None self.p_nb_lines = None self.p_max_length_line = None self.array_line_above_threshold = None self.p_duplicate = None self.p_tags = None # # PROVIDER - SEARCH # ------------------- tmpTweetSource = var[-5] cleanTweetSource = tmpTweetSource publisherOr = "UND" if (str(tmpTweetSource).find("[TM]-") == 0): cleanTweetSource = tmpTweetSource[5:] publisherOr = "TwitterMon" self.t_TweetSource = cleanTweetSource self.publisherOr = publisherOr #self.supposed_url = 'https://{}/{}'.format(self.t_TweetSource.replace('_pro', ''), var[-1].split('.gz')[0]) # # TWEET DATA # ------------------- self.t_TweetRaw = None self.t_TweetUser = None self.t_TweetDate = None self.t_TweetRetweets = 0 self.t_TweetFavorites = 0 self.t_TweetText = None self.t_TweetTextTransEN = None self.t_TweetGeo = None self.t_TweetMentions = None self.t_TweetHashTags = None self.t_TweetId = None self.t_TweetPermalink = None self.t_TweetLang = None self.t_TweetLangGuess = None self.t_TweetEmojis = None self.t_SentPos = None self.t_SentNeg = None self.t_SentNeu = None self.t_SentCompound = None self.t_SentCompoundNeg = None self.t_SentCompoundPos = None
}) #print(f"data:{json_data}\n\n") yield "data:" + json_data + "\n\n" time.sleep(1) return Response(generate_sensor_data(), mimetype='text/event-stream') #----------------------------------------------------------------------------- class LoginForm(FlaskForm): """ Form to handle the user login.""" username = StringField('Username', validators=[DataRequired()]) password = PasswordField('Password', validators=[DataRequired()]) remember_me = BooleanField('Remember Me') submit = SubmitField('Sign In') #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- if __name__ == '__main__': gv.iUserMgr = loader.ConfigLoader(gv.USER_PWD, mode='r', filterChars=('#', '', '\n')) gv.iCommReader = xcomm.XAKAsensorComm(gv.DE_COMM, simuMd=gv.gSimulationMode) gv.iCommReader.setSerialComm(searchFlag=True) print('Start the web server.') application.run(debug=False, threaded=True) # application.run(host= "0.0.0.0", debug=False, threaded=True) # use 0.0.0.0 if we want access the web from other computer. print('Finished')
def Include(filename): ConfigLoader.loadConfigFile(filename, KickStart.CONFIG_MODULE)
#!/usr/bin/python import os import re import sqlite3 as lite import subprocess import time import ConfigLoader import dbaccess config = ConfigLoader.getConfig() library_root = config['LibraryRoots'][0] movie_filetypes = config['VideoFileExtensions'] movie_minsize = config['MinMovieSize'] max_search_depth = config['MaxSearchDepth'] con = dbaccess.connect() def dropLibrary(): cur = con.cursor() cur.execute("Drop table if exists library") con.commit() def createLibrary(): cur = con.cursor() cur.execute("CREATE TABLE IF NOT EXISTS library (id INTEGER PRIMARY KEY, path TEXT , basename, size integer, modified integer, added integer,fff text)") cur.execute(" create unique index path on library (path)")
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111, USA. # # ######################################################################## import ConfigLoader CONFIG_MODULE = 'SkunkWeb.Configuration' CONFIG_STRING = """ from SkunkWeb.ConfigAdditives import Location, Host, Port, Scope, IP, UNIXPath from SkunkWeb.constants import * """ #preload the config namespace stuff ConfigLoader.loadConfigString(CONFIG_STRING, "<initconfig>", CONFIG_MODULE) ######################################################################## # $Log: KickStart.py,v $ # Revision 1.1.1.1.2.1 2001/10/16 03:27:15 smulloni # merged HEAD (basically 3.1.1) into dev3_2 # # Revision 1.2 2001/10/02 02:35:34 smulloni # support for scoping on unix socket path; very serious scope bug fixed. # # Revision 1.1.1.1 2001/08/05 14:59:37 drew_csillag # take 2 of import # # # Revision 1.8 2001/07/09 20:38:40 drew # added licence comments
from Commands.HelpDocument import HelpDocument from Commands.ReloadConfig import ReloadConfig from Commands.OpenConfig import OpenConfig # 初始化类 mainWareHouse = WareHouse.wareHouse() mainPrintControler = PrintSheep.PrintFormat() mainInputControler = InputSheep.InputFormat() # 初始化命令插件 HelpDocumentPlugin = HelpDocument() ReloadConfigPlugin = ReloadConfig() OpenConfigPlugin = OpenConfig() # 读取配置文件 ConfigLoader.LoadConfig(mainWareHouse) # 保存默认工作目录 mainWareHouse.defaultWorkDir = os.getcwd() # 输出使用的语言 if mainWareHouse.globalSittings["userLanguage"] == "En": mainWareHouse.userUsingLanguage = "En" print(" * $ User now use English(%s)" % mainWareHouse.globalSittings["userLanguage"]) elif mainWareHouse.globalSittings["userLanguage"] == "Ch_Sp": mainWareHouse.userUsingLanguage = "Ch_Sp" print(" * $ 用户现在使用的是简体中文(%s)" % mainWareHouse.globalSittings["userLanguage"])
def render_GET(self, request): self.numberRequests += 1 request.setHeader("content-type", "text/plain") request.setHeader("Access-Control-Allow-Origin","*") args = request.args print request.path filepath = re.sub(r'^/','',request.path) print filepath if request.path == '/do': if 'action' in args: action = args['action'][0] print action if action == 'getLibrary': con = dbaccess.connect() con.row_factory = lite.Row cur = con.cursor() cur.execute("select * from library") rows = map(dbaccess.dict_from_row,cur.fetchall()) return json.dumps(rows) if action == 'openFile': fileId=args['fileId'][0] con = dbaccess.connect() cur = con.cursor() cur.execute("select path from library where id = ?",[fileId]) path=cur.fetchone()[0]; result=subprocess.check_output(['open','-a',ConfigLoader.getConfig()['openVideosWith'],path]) return 'opened' if action == 'getConfigSchemaJSON': return ConfigLoader.getConfigSchemaJSON() if action == 'saveConfig': jscfg = args['newConfigJSON'][0] print jscfg config = json.loads(jscfg) ConfigLoader.saveConfig(config) return 'saved' if action == 'getFilesInPath': path = args['path'][0] if os.path.isdir(path): files = os.listdir(path) fdata=[] for fpath in files: if fpath.startswith('.'): continue thispath = os.path.join(path,fpath) type = 'file' if pathsplit.isapp(thispath): type='app' elif os.path.isdir(thispath): type='dir' readable = os.access(thispath,os.R_OK) fdata.append({ 'name':fpath, 'type':type, 'readable':readable }) return json.dumps(fdata) else: return 'not a directory' if action == 'splitPath': path = args['path'][0] return json.dumps(pathsplit.os_path_split_asunder(path)) else: return 'WTF' else: return '{}' elif os.path.exists(filepath): request.setHeader("content-type", mimetypes.guess_type(filepath)[0]) if request.setLastModified(os.path.getmtime(filepath)) == http.CACHED: return '' else: f=file(filepath) return f.read() else: request.setResponseCode(404) return 'Unknown command' return "I am request #" + str(self.numberRequests) + "\n"+request.uri
from urllib.parse import urlparse from pyfaup.faup import Faup # interact with splash_crawler API import requests requests.packages.urllib3.disable_warnings( requests.packages.urllib3.exceptions.InsecureRequestWarning) sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) import ConfigLoader sys.path.append(os.path.join(os.environ['AIL_BIN'], 'core/')) import screen config_loader = ConfigLoader.ConfigLoader() r_serv_metadata = config_loader.get_redis_conn("ARDB_Metadata") r_serv_onion = config_loader.get_redis_conn("ARDB_Onion") r_cache = config_loader.get_redis_conn("Redis_Cache") config_loader = None # load crawler config config_loader = ConfigLoader.ConfigLoader(config_file='crawlers.cfg') #splash_manager_url = config_loader.get_config_str('Splash_Manager', 'splash_url') #splash_api_key = config_loader.get_config_str('Splash_Manager', 'api_key') config_loader = None faup = Faup() def generate_uuid():
# $Id$ # Time-stamp: <01/05/03 18:32:41 smulloni> ######################################################################## import ConfigLoader CONFIG_MODULE='SkunkWeb.Configuration' CONFIG_STRING=""" from SkunkWeb.constants import * from SkunkWeb.ConfigAdditives import * """ #preload the config namespace stuff ConfigLoader.loadConfigString(CONFIG_STRING, "<initconfig>", CONFIG_MODULE) ######################################################################## # $Log: KickStart.py,v $ # Revision 1.4 2003/05/01 20:45:55 drew_csillag # Changed license text # # Revision 1.3 2002/03/30 20:05:27 smulloni # added Include directive for sw.conf; fixed IP bug (was being clobbered in sw.conf) # # Revision 1.2 2001/10/02 02:35:34 smulloni # support for scoping on unix socket path; very serious scope bug fixed. # # Revision 1.1.1.1 2001/08/05 14:59:37 drew_csillag
import re import redis from pyfaup.faup import Faup from pubsublogger import publisher from Helper import Process sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages')) import Item sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) import ConfigLoader import regex_helper ## LOAD CONFIG ## config_loader = ConfigLoader.ConfigLoader() server_cred = config_loader.get_redis_conn("ARDB_TermCred") server_statistics = config_loader.get_redis_conn("ARDB_Statistics") minimumLengthThreshold = config_loader.get_config_int( "Credential", "minimumLengthThreshold") criticalNumberToAlert = config_loader.get_config_int("Credential", "criticalNumberToAlert") minTopPassList = config_loader.get_config_int("Credential", "minTopPassList") config_loader = None ## -- ## import signal max_execution_time = 30
import datetime import os, glob, time import cv2 import threading import scheduler as sched import APIConsumer as api import CloudStorageFunctions as cloudStorage import manageFiles as fileManager import ConfigLoader as cfgLoader import Logger as log configFile = cfgLoader.getINIConfiguration() def videoUploader(saveDirectory, videoData, videoResponse): try: cloudStorage.upload_blob('streamed-videos', saveDirectory, videoData['filename']) print("actualizando data.") api.updateVideoStatusReady(videoResponse['id'], videoData['filename'], videoData['videoNumber'], videoData['store'], videoData['shoppingCenter']) except: print("Error de conexion.") print("Contacte con el admin F") log.sendEmailCloud(videoData['videoNumber'], videoData['store'], videoData['shoppingCenter'], videoData['cameraIP'], videoData['cameraID']) else: print("Borrando video...")