def post(self, project, file): '''upload a text/yaml configuration file to a project''' if (project != "project"): args = parsers.yaml_parser.parse_args() filecontent = args['yaml'] if (allowed_conf_file(file)): try: test = config.ordered_load(filecontent) except: api.abort(400, {file: {"saved": "ko - " + err()}}) try: pfile = os.path.join(config.conf["global"]["projects"][ project]["path"], file) with open(pfile, 'w') as f: f.write(filecontent.encode("utf-8", 'ignore')) response = {file: {"saved": "ok"}} config.read_conf() response[file]["yaml_validator"] = config.conf[ "global"]["projects"][project]["files"][file] return response except: api.abort(400, {file: {"saved": "ko - " + err()}}) else: api.abort(403) else: api.abort(403)
def put(self, project): '''create a project''' if (project == "conf"): api.abort(403) elif project in list(config.conf["global"]["projects"].keys()): api.abort(400, 'project "{}" already exists'.format(project)) else: try: dirname = os.path.join(config.conf["global"][ "paths"]["projects"], project) creds_file = os.path.join(dirname, 'creds.yml') os.mkdir(dirname) os.mkdir(os.path.join(dirname, 'recipes')) os.mkdir(os.path.join(dirname, 'datasets')) groups = { 'groups': { str(project): { 'projects': { str(project): { 'admin': str(current_user.name) } } } } } with open(creds_file, 'w') as f: yaml.dump(groups, f) config.read_conf() return {"message": "{} successfully created".format(project)} except: api.abort(400, err())
def get(self): '''get configuration''' try: config.read_conf() return config.conf["global"] except: return {"error": "problem while reading conf"}
def post(self): '''login api with user and hash''' config.read_conf() try: if (config.conf["global"]["api"]["no_auth"] == True): login_user(User("admin")) return {"user": str(current_user.name)} except: pass try: args = request.get_json(force=True) user = args['user'] password = args['password'] except: api.abort(403, {"error": err()}) # Login and validate the user. # user should be an instance of your `User` class try: u = User(user) if (u.check_password(password)): login_user(u, remember=True) return {"user": str(current_user.name)} else: api.abort(403) except: api.abort(403)
def get(self, project): '''get configuration files of a project''' config.read_conf() if project in list(config.conf["global"]["projects"].keys()): return config.conf["global"]["projects"][project] else: api.abort(404)
def __init__(self, name=None, social_id=None, email=None, provider=None): if social_id == None: try: Configured.__init__(self, "users", name) except: config.read_conf() Configured.__init__(self, "users", name) else: try: Configured.__init__(self, "users", str(name)) except: u = { 'social_id': str(social_id), 'provider': str(provider), 'password': None } if email != None: u['email'] = str(email) config.conf['users'][str(name)] = u Configured.__init__(self, "users", str(name)) creds_file = os.path.join( config.conf["global"]["paths"]["conf"], 'security', provider + '.yml') provider_users = { 'users': { user: config.conf["users"][user] for user in config.conf["users"] if (('provider' in list(config.conf["users"][user].keys()) ) and (config.conf["users"][user]['provider'] == provider)) } } with open(creds_file, 'w') as f: yaml.dump(yaml.safe_load(json.dumps(provider_users)), f) try: self.display_name = self.conf["display_name"] except: self.display_name = self.name try: self.password = self.conf["password"] except: self.password = None try: self.email = self.conf["email"] except: self.email = email try: self.social_id = self.conf["social_id"] except: self.social_id = social_id try: self.provider = self.conf["provider"] except: self.provider = None
def get(self): '''get json of all configured datasets''' config.read_conf() response = {dataset: config.conf["datasets"][dataset] for dataset in config.conf["datasets"] if (check_rights(current_user, config.conf["datasets"][dataset]["project"], "read")) } return response
def put(self, recipe, action): '''test, run or stop recipe ** action ** possible values are : - ** test ** : test recipe on sample data - ** run ** : run the recipe - ** stop ** : stop a running recipe (soft kill : it may take some time to really stop) ''' config.read_conf() if (action == "test"): try: callback = config.manager.dict() r = Recipe(recipe) r.init(test=True, callback=callback) r.set_job(Process(target=thread_job, args=[r])) r.start_job() r.join_job() r.df = r.callback["df"] r.log = r.callback["log"] r.errors = r.callback["errors"] except: return {"data": [{"result": "failed"}], "log": "Ooops: {}".format(err())} if isinstance(r.df, pd.DataFrame): df = r.df.fillna("") if (r.df.shape[0] == 0): return {"data": [{"result": "empty"}], "log": r.callback["log"]} try: return jsonify({"data": df.T.to_dict().values(), "log": r.callback["log"]}) except: df = df.applymap(lambda x: unicode(x)) return jsonify({"data": df.T.to_dict().values(), "log": r.callback["log"]}) else: return {"data": [{"result": "empty"}], "log": r.callback["log"]} elif (action == "run"): # run recipe (gives a job) try: if (recipe in list(config.jobs.keys())): status = config.jobs[recipe].job_status() if (status == "up"): return {"recipe": recipe, "status": status} except: api.abort(403) config.jobs[recipe] = Recipe(recipe) config.jobs[recipe].init() config.jobs[recipe].set_job( Process(target=thread_job, args=[config.jobs[recipe]])) config.jobs[recipe].start_job() return {"recipe": recipe, "status": "new job"} elif (action == "stop"): try: if (recipe in list(config.jobs.keys())): thread = Process(config.jobs[recipe].stop_job()) thread.start() return {"recipe": recipe, "status": "stopping"} except: api.abort(404)
def get(self): '''get list of all configured users''' config.read_conf() if (check_rights(current_user, "$admin", "read")): return config.conf["users"] else: return { "me": str(current_user.name), "others": list(config.conf["users"].keys()) }
def delete(self, project, file): '''delete a text/yaml configuration file from project''' if (project != "conf"): if (file in config.conf["global"]["projects"][project]["files"]): try: pfile = os.path.join(config.conf["global"]["projects"][ project]["path"], file) os.remove(pfile) config.read_conf() return jsonify({"conf": project, "file": file, "status": "removed"}) except: api.abort(403)
def get(self): '''get all configured elements Lists all configured elements of the backend, as described in the yaml files : - global configuration - projects : - datasets - recipes''' try: config.read_conf() return config.conf["global"] except: return {"error": "problem while reading conf"}
def main(): global thread_count global all_sockets global server_config server_config = config.read_conf() listen_ip = "127.0.0.1" listen_port = 1234 number_connections = 2 server_connection = open_conn(listen_ip, listen_port, number_connections) while True: (clientsocket, address) = server_connection.accept() print "up" all_sockets.append(clientsocket) try: thread.start_new_thread(cl_socket_thread, ( "Thread-" + str(thread_count), clientsocket, address, )) thread_count += 1 except: print "Failed to create thread" server_connection.close() return 0
def get(self, project, file): '''get a text/yaml configuration file from project''' try: config.read_conf() if (file in config.conf["global"]["projects"][project]["files"]): try: pfile = os.path.join(config.conf["global"]["projects"][ project]["path"], file) with open(pfile) as f: return Response(f.read(), mimetype="text/plain") except: api.abort(404) else: api.abort(404) except: api.abort(404)
def get(self, dataset): '''get json of a configured dataset''' config.read_conf() if (dataset in config.conf["datasets"].keys()): try: response = dict(config.conf["datasets"][dataset]) try: ds = Dataset(dataset) response["type"] = ds.connector.type except: pass return response except: api.abort(500) else: api.abort(404)
def put(self, project): '''create a project''' if (project == "conf"): api.abort(403) elif project in config.conf["global"]["projects"].keys(): api.abort(400, 'project "{}" already exists'.format(project)) else: try: dirname = os.path.join(config.conf["global"][ "paths"]["projects"], project) os.mkdir(dirname) os.mkdir(os.path.join(dirname, 'recipes')) os.mkdir(os.path.join(dirname, 'datasets')) config.read_conf() return {"message": "{} successfully created".format(project)} except: api.abort(400, err())
def login(self): conf = read_conf("conf.conf", "database") if self.ui.login_field.text(): #print "username -", self.ui.login_field.text() conf["username"] = str(self.ui.login_field.text()) if self.ui.password_field.text(): conf["password"] = str(self.ui.password_field.text()) print conf self.mysql_login(conf)
def delete(self, project): '''delete a project''' if (project == "conf"): api.abort(403) elif project in config.conf["global"]["projects"].keys(): response = {project: "not deleted"} try: dirname = os.path.join(config.conf["global"][ "paths"]["projects"], project) shutil.rmtree(dirname) response[project] = "deleted" except: response[project] = "deletion failed - " + err() config.read_conf() # response["yaml_validator"]=config.conf["global"]["projects"][project] return response else: api.abort(404)
def wrapped(*args, **kwargs): try: if config.conf["global"]["api"]["no_auth"] == True: return f(*args, **kwargs) except: pass if (override_project != None): project = override_project else: try: project = kwargs['project'] except: project = None try: dataset = kwargs['dataset'] except: dataset = None try: recipe = kwargs['recipe'] except: recipe = None config.read_conf() if current_user is None: api.abort(401) if project is None: if dataset is None: if recipe is None: api.abort(401) else: try: project = config.conf["recipes"][recipe]["project"] except: api.abort(401) else: try: project = config.conf["datasets"][dataset]["project"] except: api.abort(401) if (check_rights(current_user, project, right) == False): api.abort(401) return f(*args, **kwargs)
def get(self): '''get all configured elements Lists all configured elements of the backend, as described in the yaml files : - global configuration - projects : - datasets - recipes''' try: config.read_conf() if (check_rights(current_user, "$admin", "read")): response = config.conf["global"] else: response = { "projects": {project: config.conf["global"]["projects"][project] for project in config.conf["global"]["projects"] if (check_rights(current_user, project, "read")) } } return response except: return {"error": err()}
def post(self, project): '''(KO) import a zipped project''' if (directory != "conf"): response = {"upload_status": {}} args = parsers.conf_parser.parse_args() for file in args['file']: if (allowed_conf_file(file.filename)): try: file.save(os.path.join(config.conf["global"]["paths"][ "conf"][project], secure_filename(file.filename))) response["upload_status"][file.filename] = "ok" except: response["upload_status"][file.filename] = err() else: response["upload_status"][ file.filename] = "extension not allowed" config.read_conf() response["yaml_validator"] = config.conf[ "global"]["projects"][project] return response else: api.abort(403)
def __init__(self, conf_file, daemon=False, log_file="mega_activity.log"): print("Begin Initialisation ...") self.logger = Logger(log_file) self.conf_file = conf_file self.conf = config.read_conf(conf_file) self.mega = m.Mega(self.conf["mega"], self.logger) self.comptes = [] for compte in self.conf["comptes"]: self.comptes.append(c.Compte(compte)) passwd = SHA256.new( getpass("Enter MasterPassword : "******"Initialisation Success") for compte in self.comptes: self.mega._do_default_(compte, compte.decrypt_passwd(passwd, True)) return for compte in self.comptes: self.mega.login(compte, compte.decrypt_passwd(passwd, True)) self.mega.logout() print("Initialisation Success")
def get(self): '''get all roles''' config.read_conf() return config.conf["roles"]
def put(self, dataset, action): '''action = validation : configure the frontend to point to this dataset''' import config config.init() config.read_conf() if (action == "validation"): if (not(dataset in config.conf["datasets"].keys())): return api.abort(404, {"dataset": dataset, "status": "dataset not found"}) if not("validation" in config.conf["datasets"][dataset].keys()): return api.abort(403, {"dataset": dataset, "status": "validation not allowed"}) if ((config.conf["datasets"][dataset]["validation"] == True) | (isinstance(config.conf["datasets"][dataset]["validation"], OrderedDict))): try: props = {} try: cfg = deepupdate(config.conf["global"]["validation"], config.conf[ "datasets"][dataset]["validation"]) except: cfg = config.conf["global"]["validation"] for conf in cfg.keys(): configfile = os.path.join(config.conf["global"]["paths"][ "validation"], secure_filename(conf + ".json")) dic = { "domain": config.conf["global"]["api"]["domain"], "es_proxy_path": config.conf["global"]["api"]["es_proxy_path"], "dataset": dataset } props[conf] = replace_dict(cfg[conf], dic) print conf print {"dataset": dataset, "status": "to validation", "props": props} return {"dataset": dataset, "status": "to validation", "props": props} except: return api.abort(500, {"dataset": dataset, "status": "error: " + err()}) else: return api.abort(403, {"dataset": dataset, "status": "validation not allowed"}) elif (action == "search"): if (not(dataset in config.conf["datasets"].keys())): return api.abort(404, {"dataset": dataset, "status": "dataset not found"}) if not("search" in config.conf["datasets"][dataset].keys()): return api.abort(403, {"dataset": dataset, "status": "search not allowed"}) if ((config.conf["datasets"][dataset]["search"] == True) | (isinstance(config.conf["datasets"][dataset]["search"], OrderedDict))): try: props = {} try: cfg = deepupdate(config.conf["global"]["search"], config.conf[ "datasets"][dataset]["search"]) except: cfg = config.conf["global"]["search"] for config in cfg.keys(): configfile = os.path.join(config.conf["global"]["paths"][ "search"], secure_filename(config + ".json")) dic = { "domain": config.conf["global"]["api"]["domain"], "es_proxy_path": config.conf["global"]["api"]["es_proxy_path"], "dataset": dataset } props[config] = replace_dict(cfg[config], dic) # with open(configfile, 'w') as outfile: # json.dump(props[config],outfile,indent=2) return {"dataset": dataset, "status": "to search", "props": props} except: return api.abort(500, {"dataset": dataset, "status": "error: " + err()}) else: return api.abort(403, {"dataset": dataset, "status": "search not allowed"}) else: api.abort(404)
def get(self): '''get json of all configured datasets''' config.read_conf() return config.conf["datasets"]
def get(self): '''get json of all configured connectors''' config.read_conf() return config.conf["connectors"]
def allowed_upload_file(filename=None): return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in config.conf[ "global"]["data_extensions"] def allowed_conf_file(filename=None): return '.' in filename and \ filename.rsplit('.', 1)[1].lower() in config.conf[ "global"]["recipe_extensions"] config.init() config.read_conf() app = Flask(__name__) app.wsgi_app = ProxyFix(app.wsgi_app) api = Api(app, version="0.1", title="matchID API", description="API for data matching developpement") app.config['APPLICATION_ROOT'] = config.conf["global"]["api"]["prefix"] @api.route('/conf/', endpoint='conf') class Conf(Resource): def get(self): '''get all configured elements Lists all configured elements of the backend, as described in the yaml files : - global configuration
def get(self): '''get all groups''' config.read_conf() return config.conf["groups"]
from sagemaker.processing import ScriptProcessor from sagemaker.processing import ProcessingInput, ProcessingOutput from sagemaker.workflow.steps import ProcessingStep, TrainingStep from sagemaker.workflow.properties import PropertyFile from sagemaker.workflow.pipeline import Pipeline from sagemaker.workflow.step_collections import RegisterModel from sagemaker.tensorflow import TensorFlow from sagemaker.inputs import TrainingInput from sagemaker.model_metrics import MetricsSource, ModelMetrics from sagemaker.workflow.conditions import ConditionGreaterThanOrEqualTo from sagemaker.workflow.condition_step import ConditionStep, JsonGet from config import read_conf conf = read_conf("../config_bitti.ini") # data augmentation step # the data set that was collected is slow, just around fifty images; # furthermore, all the magazines are placed with the same top-down view # with labels horizontally aligned. We are going to change that. magazine_augmentor = ScriptProcessor( image_uri=str(conf.processing_turicreate_uri), command=["python3"], instance_type=conf.processing_instance_type, instance_count=conf.processing_instance_count, #env={"RotationAngle": "15"}, # TODO: add configurability base_job_name="script-magazine-augmentation", role=conf.role) step_data_augmentation = ProcessingStep(
def __init__(self): # Load our configuration self.config = config.read_conf() self.lastdir = os.getcwd() self.dldr_instances = [] self.results = [] # Force User to input a valid config if not (self.config["temp_dir"] and self.config["rtmpdump_path"] and self.config["ffmpeg_path"]): conf_dlg = config.ConfDlg(self.config) if conf_dlg.ShowModal() == wx.ID_OK: self.config = conf_dlg.getconf() conf_dlg.Destroy() else: conf_dlg.Destroy() fuckedup_dlg = wx.MessageDialog( None, _("hometape can not work without a valid configuration."), _("Exiting"), wx.OK | wx.ICON_ERROR ) fuckedup_dlg.ShowModal() fuckedup_dlg.Destroy() sys.exit() # All the GUI stuff wx.Frame.__init__(self, None, title="hometape") menubar = wx.MenuBar() m_file = wx.Menu() m_quit = wx.MenuItem(m_file, wx.ID_EXIT, _("&Exit")) m_file.AppendItem(m_quit) m_edit = wx.Menu() m_preferences = wx.MenuItem(m_edit, wx.ID_PREFERENCES, _("&Preferences")) m_edit.AppendItem(m_preferences) m_help = wx.Menu() m_info = wx.MenuItem(m_help, wx.ID_ABOUT, _("&About")) m_help.AppendItem(m_info) menubar.Append(m_file, _("&File")) menubar.Append(m_edit, _("&Edit")) menubar.Append(m_help, _("&Help")) self.SetMenuBar(menubar) self.mainpanel = wx.Panel(self, -1) vbox = wx.BoxSizer(wx.VERTICAL) vbox.Add((-1, 2)) hbox1 = wx.BoxSizer(wx.HORIZONTAL) grid = wx.FlexGridSizer(2, 2, 5, 5) search_label = wx.StaticText(self.mainpanel, label=_("Search:")) self.search_box = wx.TextCtrl(self.mainpanel, style=wx.TE_PROCESS_ENTER) search_by_label = wx.StaticText(self.mainpanel, label=_("Search by:")) search_by_choices = [_("Artist + Title"), _("Artist"), _("Artist (exactly)"), _("Title"), _("Title (exact)")] if os.name == "nt": self.search_by_cb = wx.ComboBox( self.mainpanel, choices=search_by_choices, value=search_by_choices[0], style=wx.CB_READONLY | wx.CB_DROPDOWN, ) else: self.search_by_cb = wx.Choice(self.mainpanel, choices=search_by_choices) grid.AddMany( [ (search_label, 0, wx.ALIGN_CENTER_VERTICAL), (self.search_box, 1, wx.EXPAND), (search_by_label, 0, wx.ALIGN_CENTER_VERTICAL), (self.search_by_cb, 1, wx.EXPAND), ] ) grid.AddGrowableCol(1, 1) hbox1.Add(grid, 1, wx.EXPAND, 0) search_btn = wx.Button(self.mainpanel, id=wx.ID_FIND, label=_("Find")) hbox1.Add(search_btn, 0, wx.EXPAND | wx.LEFT, 5) vbox.Add(hbox1, 0, wx.LEFT | wx.RIGHT | wx.BOTTOM | wx.EXPAND, 2) hline = wx.StaticLine(self.mainpanel, style=wx.LI_HORIZONTAL) vbox.Add(hline, 0, wx.ALL | wx.EXPAND, 5) results_cap = wx.StaticText(self.mainpanel, label=_("Search results:")) vbox.Add(results_cap, 0, wx.ALL | wx.EXPAND, 2) self.result_list = wx.ListBox(self.mainpanel, style=wx.LB_SINGLE) self.result_list.SetMinSize((200, 200)) vbox.Add(self.result_list, 1, wx.LEFT | wx.RIGHT | wx.EXPAND, 2) hbox2 = wx.BoxSizer(wx.HORIZONTAL) self.dl_flv = wx.Button(self.mainpanel, label=_("Download FLV Video")) self.dl_mp3 = wx.Button(self.mainpanel, label=_("Download MP3 Audio")) self.dl_flv.Disable() self.dl_mp3.Disable() hbox2.Add(self.dl_flv, 1, wx.RIGHT | wx.EXPAND, 5) hbox2.Add(self.dl_mp3, 1, wx.EXPAND, 0) vbox.Add(hbox2, 0, wx.EXPAND | wx.ALL, 2) self.mainpanel.SetSizer(vbox) vbox.Fit(self) self.SetMinSize(self.GetSize()) try: self.SetSize(self.config["last_size"]) except KeyError: pass self.SetIcon(wx.Icon(os.path.join(tools.progdir(), "wm_icon.png"), wx.BITMAP_TYPE_PNG)) # Events self.Bind(wx.EVT_MENU, self.on_info, id=m_info.GetId()) self.Bind(wx.EVT_MENU, self.on_prefs, id=m_preferences.GetId()) self.Bind(wx.EVT_MENU, self.on_close, id=m_quit.GetId()) self.Bind(wx.EVT_CLOSE, self.on_close) self.Bind(wx.EVT_BUTTON, self.on_search, id=search_btn.GetId()) self.Bind(wx.EVT_TEXT_ENTER, self.on_search, id=self.search_box.GetId()) self.Bind(wx.EVT_LISTBOX, self.on_select, id=self.result_list.GetId()) self.Bind(wx.EVT_BUTTON, self.on_dl_flv, id=self.dl_flv.GetId()) self.Bind(wx.EVT_BUTTON, self.on_dl_mp3, id=self.dl_mp3.GetId())