def __init__(self, proxies={'http': 'http://127.0.0.1:8080', 'https': 'http://127.0.0.1:8080'}): """ Creates an instance of the ZAP api client. :Parameters: - `proxies`: dictionary of ZAP proxies to use. Note that all of the other classes in this directory are generated new ones will need to be manually added to this file """ self.__proxies = proxies self.acsrf = acsrf(self) self.ajaxSpider = ajaxSpider(self) self.ascan = ascan(self) self.authentication = authentication(self) self.autoupdate = autoupdate(self) self.brk = brk(self) self.context = context(self) self.core = core(self) self.forcedUser = forcedUser(self) self.httpsessions = httpSessions(self) self.importLogFiles = importLogFiles(self) self.params = params(self) self.pnh = pnh(self) self.pscan = pscan(self) self.script = script(self) self.search = search(self) self.selenium = selenium(self) self.sessionManagement = sessionManagement(self) self.spider = spider(self) self.users = users(self)
def list_users(): u = [{ 'user_id': user_id, 'username': username, 'directory': directory } for (user_id, username, directory) in users.users()] return render_template('list_users.html', title='Users', users=u)
def __init__(self, log): self.log = log self.server = epoll_server() self.server.init(log) self.dbpool = conn_pool("dbproxy", dbproxy, mongo_client_chk) self.users = users(self.log) self.conn = connections(self.server, log, self.dbpool, self.users)
def user_profile(username): users_total = users() if username in users_total: return "Name: {0}, Age: {1}, Gender: {2}, Hobbies: {3}".format( users_total[username]["name"], users_total[username]["age"], users_total[username]["gender"], users_total[username]["Hobbies"]) else: return "User not found"
def __init__(self): super(home, self).__init__() self.graphs = graph() #Initialisation (commence caché) self.userCards = users() # (commence affiché) self.layout = QtGui.QVBoxLayout(self) #Layout pour que leur resize soit dynamique self.layout.addWidget(self.userCards) self.layout.addWidget(self.graphs) """Lie les actions d'un widget à l'autre""" self.userCards.callGraph.connect(self._graphCaller) #Appel d'un nouveau graph self.graphs.hideCall.connect(self._chooseUser) #Retour choix utilisateurs
def init(self, c): """ Initializes each sub-plugin with the client """ self.client = c self.channels = ["any"] self.plugins = [] for i in self.plugin_classes: self.plugins.append(i()) self.plugins[-1].init(c) self.users = users.users() self.users.init()
def main(): choice = -1 userList = users.users() option = [login, register] while (choice != "3"): try: print(logo) choice = input() option[int(choice) - 1](userList) except: choice = '3' return None
def __init__(self, proxies=None, apikey=None): """ Creates an instance of the ZAP api client. :Parameters: - `proxies`: dictionary of ZAP proxies to use. Note that all of the other classes in this directory are generated new ones will need to be manually added to this file """ self.__proxies = proxies or { 'http': 'http://127.0.0.1:8080', 'https': 'http://127.0.0.1:8080' } self.__apikey = apikey self.acsrf = acsrf(self) self.ajaxSpider = ajaxSpider(self) self.ascan = ascan(self) self.authentication = authentication(self) self.authorization = authorization(self) self.autoupdate = autoupdate(self) self.brk = brk(self) self.context = context(self) self.core = core(self) self.forcedUser = forcedUser(self) self.httpsessions = httpSessions(self) self.importLogFiles = importLogFiles(self) self.params = params(self) self.pnh = pnh(self) self.pscan = pscan(self) self.reveal = reveal(self) self.script = script(self) self.search = search(self) self.selenium = selenium(self) self.sessionManagement = sessionManagement(self) self.spider = spider(self) self.stats = stats(self) self.users = users(self) # not very nice, but prevents warnings when accessing the ZAP API via https requests.packages.urllib3.disable_warnings(InsecureRequestWarning)
def userslist(): error = utils.errormessage(request.args.get('error')) if not (users.loggedin() and (users.userlevel() > 1)): abort(403) allusers = users.users() alluserlevels = users.userlevels() if users.userlevel() == 2: return render_template("users.html", users=allusers) elif users.userlevel() == 3: return render_template("usersadmin.html", users=allusers, userlevels=alluserlevels, error=error)
import posts import todos import photos import users import albums app = Flask(__name__) comments = comments.comments() posts = posts.posts() todos = todos.todos() photos = photos.photos() albums = albums.albums() users = users.users() @app.errorhandler(404) def not_found(error): return make_response(jsonify({'error': 'Not found'}), 404) @app.errorhandler(400) def bad_request(error): return make_response(jsonify({'error': 'Bad request'}), 400) #----------------------------------------------------------------------------------------------------------------------------- @app.route('/posts/<int:post_id>', methods=['GET']) def get_task(post_id): post = [post for post in posts if post['id'] == post_id] if len(post) == 0:
from flask import Flask, render_template, redirect from users import users app = Flask(__name__) users = users() @app.route('/') def index(): return render_template('index.html', users=users) if __name__ == '__main__': app.run(debug=True)
def setUp(self): """ Method to run before each test case """ self.new_user = users("denisnjue", "dn12345ue")
#!/usr/bin/env python3 import users from datetime import datetime lst = users.users() for u in lst: time = datetime.fromtimestamp(u[3]).strftime("%b %d %H:%M") print("{} {} {}".format(u[0], u[1], time))
def users(self, uid): user_info = users(str(uid)) return user_info
def ccm_fast_export(releases, graphs): global acn_ancestors global users users = users() logger.basicConfig(filename='ccm_fast_export.log',level=logger.DEBUG) commit_lookup = {} # Get the initial release for k, v in releases.iteritems(): if k == 'delimiter': continue if k == 'ccm_types': continue if v['previous'] is None: release = k break logger.info("Starting at %s as initial release" % release) if 'created' not in releases[release]: initial_release_time = 0.0 # epoch for now since releases[release] has no 'created' key :( else: initial_release_time = time.mktime(releases[release]['created'].timetuple()) mark = 0 files = [] # Create the initial release # get all the file objects: file_objects = [ccm_cache.get_object(o) for o in releases[release]['objects']] project_obj = ccm_cache.get_object(releases[release]['fourpartname']) paths = project_obj.get_members() for o in file_objects: if o.get_type() != 'dir': object_mark, mark = create_blob(o, mark) for p in paths[o.get_object_name()]: files.append('M ' + releases['ccm_types']['permissions'][o.get_type()] + ' :'+str(object_mark) + ' ' + p) empty_dirs = releases[release]['empty_dirs'] logger.info("Empty dirs for release %s\n%s" %(release, empty_dirs)) mark = create_blob_for_empty_dir(get_mark(mark)) #file_list = create_file_list(objects, object_lookup, releases['ccm_types'], empty_dirs=empty_dirs, empty_dir_mark=mark) if empty_dirs: for d in empty_dirs: if mark: path = d + '/.gitignore' files.append('M 100644 :' + str(mark) + ' ' + path) mark = get_mark(mark) commit_info = ['reset refs/tags/' + release, 'commit refs/tags/' + release, 'mark :' + str(mark), 'author Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'committer Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data 15', 'Initial commit', '\n'.join(files), ''] print '\n'.join(commit_info) logger.info("git-fast-import:\n%s" %('\n'.join(commit_info))) tag_msg = 'Release: %s' %release annotated_tag = ['tag %s' % release, 'from :%s' % str(mark), 'tagger Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data %s' % len(tag_msg), tag_msg] print '\n'.join(annotated_tag) commit_lookup[release] = mark # do the following releases (graphs) release_queue = deque(releases[release]['next']) while release_queue: release = release_queue.popleft() previous_release = releases[release]['previous'] logger.info("Next release: %s" % release) commit_graph = graphs[release]['commit'] commit_graph = fix_orphan_nodes(commit_graph, previous_release) commit_graph = ch.spaghettify_digraph(commit_graph, previous_release, release) #htg.commit_graph_to_image(commit_graph, releases[release], graphs[release]['task'], name=releases[release]['name']+'_after' ) # Find the cutting nodes logger.info("Finding the cutting nodes") undirected = graph() undirected.add_nodes(commit_graph.nodes()) [undirected.add_edge(edge) for edge in commit_graph.edges()] cutting_nodes = cut_nodes(undirected) del undirected # Create the reverse commit graph logger.info("Building the reverse commit graph") reverse_commit_graph = commit_graph.reverse() # Compute the accessibility matrix of the reverse commit graph logger.info("Compute the ancestors") ancestors = accessibility(reverse_commit_graph) del reverse_commit_graph logger.info("Ancestors of the release: %s" % str(ancestors[release])) # Clean up the ancestors matrix for k, v in ancestors.iteritems(): if k in v: v.remove(k) # Get the commits order commits = topological_sorting(commit_graph) # Fix the commits order list commits.remove(previous_release) commits.remove(release) last_cutting_node = None # Check if the release (Synergy project has changed name, if it has the # 'base' directory name needs to be renamed if releases.has_key('delimiter'): delim = releases['delimiter'] else: delim = '-' previous_name = previous_release.split(delim)[0] current_name = release.split(delim)[0] if current_name != previous_name: logger.info("Name changed: %s -> %s" %(previous_name, current_name)) from_mark = commit_lookup[previous_release] mark, commit = rename_toplevel_dir(previous_name, current_name, release, releases, mark, from_mark) print '\n'.join(commit) # adjust the commit lookup commit_lookup[previous_release] = mark for counter, commit in enumerate(commits): logger.info("Commit %i/%i" % (counter+1, len(commits))) acn_ancestors = [] if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] # Create the references lists. It lists the parents of the commit #reference = [commit_lookup[parent] for parent in ancestors[commit] if parent not in acn_ancestors] reference = [commit_lookup[parent] for parent in commit_graph.incidents(commit)] if len(reference) > 1: # Merge commit mark = create_merge_commit(commit, release, releases, mark, reference, graphs, set(ancestors[commit]) - set(acn_ancestors)) else: # Normal commit mark = create_commit(commit, release, releases, mark, reference, graphs) # Update the lookup table commit_lookup[commit] = mark # Update the last cutting edge if necessary if commit in cutting_nodes: last_cutting_node = commit if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] reference = [commit_lookup[parent] for parent in ancestors[release] if parent not in acn_ancestors] logger.info("Reference %s" %str([parent for parent in ancestors[release] if parent not in acn_ancestors])) if not reference: logger.info("Reference previous %s, mark: %d" % (releases[release]['previous'], commit_lookup[releases[release]['previous']])) reference = [commit_lookup[ releases[release]['previous'] ] ] mark, merge_commit = create_release_merge_commit(releases, release, get_mark(mark), reference, graphs, set(ancestors[release]) - set(acn_ancestors)) print '\n'.join(merge_commit) annotated_tag = create_annotated_tag(releases, release, mark) print '\n'.join(annotated_tag) commit_lookup[release] = mark release_queue.extend(releases[release]['next']) #release = releases[release]['next'] #release = None #reset to master master = get_master_tag() reset = ['reset refs/heads/master', 'from :' + str(commit_lookup[master])] logger.info("git-fast-import:\n%s" %('\n'.join(reset))) print '\n'.join(reset)
def list_users(): u = [{'user_id': user_id, 'username': username, 'directory': directory} for (user_id, username, directory) in users.users()] return render_template('list_users.html', title='Users', users=u)
import V001 as view1 import V002 as view2 import V003 as view3 import V004 as view4 import V005 as view5 import V006 as view6 # import V007 as view7 import V008 as view8 # import V009 as view9 import V010 as view10 # import V011 as view11 user_id = 1 user = users.users(user_id=user_id) language = user.user_language_preference() layout = frontend.frontend(language=language) control = backend.backend(user=user, language=language) #materialize-v1.0.0/materialize/css/materialize.css' # external_stylesheets = ['https://fonts.googleapis.com/icon?family=Material+Icons', # 'starter-template/css/materialize.css', # 'starter-template/css/style.css'] external_stylesheets = [ 'https://fonts.googleapis.com/icon?family=Material+Icons' ] app = dash.Dash(__name__, external_stylesheets=external_stylesheets,
import requests from tree import LoadTree from users import users import telebot from telebot import types import keys API_TOKEN = keys.API_TOKEN user = users() def bob(): hello_message=str('Привет,я - бот,сохраняю записи, заготвка для другого бота\n при '+ 'нажатии /start создается запись, в которой вы можете создавать другие записи(файлы)\n'+ '#По некоторым пунктам:\n *\'del запись\'-удаляет запись, в которой вы находитесь(не относится к root)\n'+ ' *\'корень\' - возрващает вас в root\n *кнопка с названием записи открывает её\n'+ ' *\'saved\' внизу показывает сохранена ли запись\n'+ ' *если вы сохраните файл и выйдете, то ваши записи остаются\n'+ '!!все записи, не подходящие под условия запроса от бота игнорируются и удаляются без объяснения,'+ ' тк объяснения нужно сделать,а у меня скоро егэ:)!!\n'+ ' P.S.: to_do_list: добавить: удаление всей записи,завершение сеанса, значки в описание и ...переделать все ') main_keyboard = types.InlineKeyboardMarkup() main_keyboard.add( types.InlineKeyboardButton(text='доб запись', callback_data='add_dir'), types.InlineKeyboardButton(text='доб текст', callback_data='add_text'), types.InlineKeyboardButton(text='del запись', callback_data='del_dir'), types.InlineKeyboardButton(text='корень', callback_data='start'), types.InlineKeyboardButton(text='назад', callback_data='back'), types.InlineKeyboardButton(text='сохр', callback_data='save'),
def ccm_fast_export(releases, graphs): global acn_ancestors global users users = users() logger.basicConfig(filename='ccm_fast_export.log',level=logger.DEBUG) commit_lookup = {} # Get the initial release for k, v in releases.iteritems(): if k == 'delimiter': continue if k == 'ccm_types': continue if v['previous'] is None: release = k break logger.info("Starting at %s as initial release" % release) if 'created' not in releases[release]: initial_release_time = 0.0 # epoch for now since releases[release] has no 'created' key :( else: initial_release_time = time.mktime(releases[release]['created'].timetuple()) mark = 0 files = [] # Create the initial release # get all the file objects: file_objects = (ccm_cache.get_object(o) for o in releases[release]['objects']) project_obj = ccm_cache.get_object(releases[release]['fourpartname']) paths = project_obj.get_members() for o in file_objects: if o.get_type() != 'dir': object_mark, mark = create_blob(o, mark) for p in paths[o.get_object_name()]: files.append('M ' + releases['ccm_types']['permissions'][o.get_type()] + ' :'+str(object_mark) + ' ' + p) empty_dirs = releases[release]['empty_dirs'] logger.info("Empty dirs for release %s\n%s" %(release, empty_dirs)) mark = create_blob_for_empty_dir(get_mark(mark)) #file_list = create_file_list(objects, object_lookup, releases['ccm_types'], empty_dirs=empty_dirs, empty_dir_mark=mark) if empty_dirs: for d in empty_dirs: if mark: path = d + '/.gitignore' files.append('M 100644 :' + str(mark) + ' ' + path) mark = get_mark(mark) commit_info = ['reset refs/tags/' + release, 'commit refs/tags/' + release, 'mark :' + str(mark), 'author Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'committer Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data 15', 'Initial commit', '\n'.join(files), ''] print '\n'.join(commit_info) logger.info("git-fast-import:\n%s" %('\n'.join(commit_info))) tag_msg = 'Release: %s' %release annotated_tag = ['tag %s' % release, 'from :%s' % str(mark), 'tagger Nokia <*****@*****.**> ' + str(int(initial_release_time)) + " +0000", 'data %s' % len(tag_msg), tag_msg] print '\n'.join(annotated_tag) commit_lookup[release] = mark # do the following releases (graphs) release_queue = deque(releases[release]['next']) while release_queue: release = release_queue.popleft() previous_release = releases[release]['previous'] logger.info("Next release: %s" % release) commit_graph = graphs[release]['commit'] commit_graph = fix_orphan_nodes(commit_graph, previous_release) commit_graph = ch.spaghettify_digraph(commit_graph, previous_release, release) #htg.commit_graph_to_image(commit_graph, releases[release], graphs[release]['task'], name=releases[release]['name']+'_after' ) # Find the cutting nodes logger.info("Finding the cutting nodes") undirected = graph() undirected.add_nodes(commit_graph.nodes()) [undirected.add_edge(edge) for edge in commit_graph.edges()] cutting_nodes = cut_nodes(undirected) del undirected # Create the reverse commit graph logger.info("Building the reverse commit graph") reverse_commit_graph = commit_graph.reverse() # Compute the accessibility matrix of the reverse commit graph logger.info("Compute the ancestors") ancestors = accessibility(reverse_commit_graph) del reverse_commit_graph logger.info("Ancestors of the release: %s" % str(ancestors[release])) # Clean up the ancestors matrix for k, v in ancestors.iteritems(): if k in v: v.remove(k) # Get the commits order commits = topological_sorting(commit_graph) # Fix the commits order list commits.remove(previous_release) commits.remove(release) last_cutting_node = None # Check if the release (Synergy project has changed name, if it has the # 'base' directory name needs to be renamed if releases.has_key('delimiter'): delim = releases['delimiter'] else: delim = '-' previous_name = previous_release.split(delim)[0] current_name = release.split(delim)[0] if current_name != previous_name: logger.info("Name changed: %s -> %s" %(previous_name, current_name)) from_mark = commit_lookup[previous_release] mark, commit = rename_toplevel_dir(previous_name, current_name, release, releases, mark, from_mark) print '\n'.join(commit) # adjust the commit lookup commit_lookup[previous_release] = mark for counter, commit in enumerate(commits): logger.info("Commit %i/%i" % (counter+1, len(commits))) acn_ancestors = [] if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] # Create the references lists. It lists the parents of the commit #reference = [commit_lookup[parent] for parent in ancestors[commit] if parent not in acn_ancestors] reference = [commit_lookup[parent] for parent in commit_graph.incidents(commit)] if len(reference) > 1: # Merge commit mark = create_merge_commit(commit, release, releases, mark, reference, graphs, set(ancestors[commit]) - set(acn_ancestors)) else: # Normal commit mark = create_commit(commit, release, releases, mark, reference, graphs) # Update the lookup table commit_lookup[commit] = mark # Update the last cutting edge if necessary if commit in cutting_nodes: last_cutting_node = commit if last_cutting_node is not None: acn_ancestors = ancestors[last_cutting_node] reference = [commit_lookup[parent] for parent in ancestors[release] if parent not in acn_ancestors] logger.info("Reference %s" %str([parent for parent in ancestors[release] if parent not in acn_ancestors])) if not reference: logger.info("Reference previous %s, mark: %d" % (releases[release]['previous'], commit_lookup[releases[release]['previous']])) reference = [commit_lookup[ releases[release]['previous'] ] ] mark, merge_commit = create_release_merge_commit(releases, release, get_mark(mark), reference, graphs, set(ancestors[release]) - set(acn_ancestors)) print '\n'.join(merge_commit) annotated_tag = create_annotated_tag(releases, release, mark) print '\n'.join(annotated_tag) commit_lookup[release] = mark release_queue.extend(releases[release]['next']) #release = releases[release]['next'] #release = None #reset to master master = get_master_tag() reset = ['reset refs/heads/master', 'from :' + str(commit_lookup[master])] logger.info("git-fast-import:\n%s" %('\n'.join(reset))) print '\n'.join(reset)
class backend: user = users.users() instance1 = view1.V001() instance2 = view2.V002() instance3 = view3.V003() instance4 = view4.V004() instance5 = view5.V005() instance6 = view6.V006() # instance7 = view7.V007() instance8 = view8.V008() # instance9 = view9.V009() instance10 = view10.V010() # instance11 = view11.V011() def __init__(self, user, language): self.user = user self.load_views(language) def load_views(self, language="pt"): self.instance1 = view1.V001(type_result="dash", language=language) self.instance1.generate_dataset(number_students=20, number_assigns=10) self.instance2 = view2.V002(type_result="dash", language=language) self.instance2.generate_dataset(number_students=20) self.instance3 = view3.V003(type_result="dash", language=language) self.instance3.generate_dataset(number_students=20) self.instance4 = view4.V004(type_result="dash", language=language) self.instance4.generate_dataset(number_students=20) self.instance5 = view5.V005(type_result="dash", language=language) self.instance5.generate_dataset(number_students=60) self.instance6 = view6.V006(type_result="dash", language=language) self.instance6.generate_dataset(number_students=60) # view7.V007(type_result = "dash", language = language) # self.instance7.generate_dataset() self.instance8 = view8.V008(type_result="dash", language=language) self.instance8.generate_dataset(number_students=35, number_weeks=7) # self.instance9 = view9.V009(type_result = "dash", language = language) # self.instance9.generate_dataset() self.instance10 = view10.V010(type_result="dash", language=language) self.instance10.generate_dataset(number_students=35, number_video=10) # self.instance11 = view11.V011(type_result = "dash", language = language) # self.instance11.generate_dataset() def get_preference_graph(self, index): lst_id_charts = self.user.user_graph_preference( "v" + str(index)) #get id charts if index == 1: view = self.build_view(self.instance1, lst_id_charts, index) elif index == 2: view = self.build_view(self.instance2, lst_id_charts, index) elif index == 3: view = self.build_view(self.instance3, lst_id_charts, index) elif index == 4: view = self.build_view(self.instance4, lst_id_charts, index) elif index == 5: view = self.build_view(self.instance5, lst_id_charts, index) elif index == 6: view = self.build_view(self.instance6, lst_id_charts, index) elif index == 7: return html.H1(className="header center orange-text", children=["In developing..."]) # view = self.build_view(self.instance7,lst_id_charts,index) elif index == 8: view = self.build_view(self.instance8, lst_id_charts, index) elif index == 9: return html.H1(className="header center orange-text", children=["In developing..."]) # view = self.build_view(self.instance9,lst_id_charts,index) elif index == 10: view = self.build_view(self.instance10, lst_id_charts, index) elif index == 11: return html.H1(className="header center orange-text", children=["In developing..."]) # view = self.build_view(self.instance11,lst_id_charts,index) return html.Div(id="v" + str(index), children=view) def build_view(self, instance, lst_id_charts, v): lst_chart = [] number_opt = 0 view = [] for i in range(0, len(lst_id_charts)): #get id options lst_id_options = instance.get_option_index(lst_id_charts[i]) for j in range(0, len(lst_id_options)): #get option layout number_opt += 1 lst_chart.append( self.parser(instance, lst_id_options[j], "option", "v" + str(v) + "@o" + str(number_opt))) lst_chart.append( self.parser(instance, lst_id_charts[i], "graph")) #append option layout and graph if i != len(lst_id_charts) - 1: lst_chart.append(html.Hr()) lst_chart.append(html.Br()) lst_chart.append(html.Br()) view.append( html.Div(id="v" + str(v) + "@c" + str(i + 1), children=lst_chart)) lst_chart = [] return view def parser(self, instance, index, type, id_ref="", values=[]): if (type == "graph"): if (index == 1): return instance.graph_01() elif (index == 2): return instance.graph_02() elif (index == 3): return instance.graph_03() elif (index == 4): return instance.graph_04() elif (index == 5): return instance.graph_05() elif (index == 6): return instance.graph_06() elif (index == 7): return instance.graph_07() elif (index == 8): return instance.graph_08() elif (index == 9): return instance.graph_09() elif (index == 10): return instance.graph_10() elif (index == 11): return instance.graph_11() elif (index == 12): return instance.graph_12() elif (index == 13): return instance.graph_13() elif (index == 14): return instance.graph_14() elif (index == 15): return instance.graph_15() elif (index == 16): return instance.graph_16() elif (index == 17): return instance.graph_17() elif (index == 18): return instance.graph_18() elif (index == 19): return instance.graph_19() elif (index == 20): return instance.graph_20() elif (index == 21): return instance.graph_21() elif (index == 22): return instance.graph_22() elif (index == 23): return instance.graph_23() elif (index == 24): return instance.graph_24() elif (index == 25): return instance.graph_25() elif (index == 26): return instance.graph_26() elif (index == 27): return instance.graph_27() elif (index == 28): return instance.graph_28() elif (index == 29): return instance.graph_29() elif (index == 30): return instance.graph_30() elif (index == 31): return instance.graph_31() elif (index == 32): return instance.graph_32() elif (index == 33): return instance.graph_33() elif (index == 34): return instance.graph_34() elif (index == 35): return instance.graph_35() elif (index == 36): return instance.graph_36() elif (index == 37): return instance.graph_37() elif (index == 38): return instance.graph_38() elif (index == 39): return instance.graph_39() elif (index == 40): return instance.graph_40() elif (index == 41): return instance.graph_41() elif (index == 42): return instance.graph_42() elif (index == 43): return instance.graph_43() elif (index == 44): return instance.graph_44() elif (index == 45): return instance.graph_45() elif (index == 46): return instance.graph_46() elif (index == 47): return instance.graph_47() elif (index == 48): return instance.graph_48() elif (index == 49): return instance.graph_49() elif (index == 50): return instance.graph_50() elif (index == 51): return instance.graph_51() elif (index == 52): return instance.graph_52() elif (index == 53): return instance.graph_53() elif (index == 54): return instance.graph_54() elif (index == 55): return instance.graph_55() elif (type == "option"): if (index == 1): return instance.option_01(id_ref) elif (index == 2): return instance.option_02(id_ref) elif (index == 3): return instance.option_03(id_ref) elif (index == 4): return instance.option_04(id_ref) elif (index == 5): return instance.option_05(id_ref) elif (index == 6): return instance.option_06(id_ref) elif (index == 7): return instance.option_07(id_ref) elif (index == 8): return instance.option_08(id_ref) elif (index == 9): return instance.option_09(id_ref) elif (index == 10): return instance.option_10(id_ref) elif (index == 11): return instance.option_11(id_ref) elif (index == 12): return instance.option_12(id_ref) elif (index == 13): return instance.option_13(id_ref) elif (index == 14): return instance.option_14(id_ref) elif (index == 15): return instance.option_15(id_ref) elif (index == 16): return instance.option_16(id_ref) elif (index == 17): return instance.option_17(id_ref) elif (index == 18): return instance.option_18(id_ref) elif (index == 19): return instance.option_19(id_ref) elif (index == 20): return instance.option_20(id_ref) elif (index == 21): return instance.option_21(id_ref) elif (index == 22): return instance.option_22(id_ref) elif (index == 23): return instance.option_23(id_ref) elif (index == 24): return instance.option_24(id_ref) elif (index == 25): return instance.option_25(id_ref) elif (index == 26): return instance.option_26(id_ref) elif (index == 27): return instance.option_27(id_ref) elif (index == 28): return instance.option_28(id_ref) elif (index == 29): return instance.option_29(id_ref) elif (index == 30): return instance.option_30(id_ref) elif (index == 31): return instance.option_31(id_ref) elif (index == 32): return instance.option_32(id_ref) elif (index == 33): return instance.option_33(id_ref) elif (index == 34): return instance.option_34(id_ref) elif (index == 35): return instance.option_35(id_ref) elif (index == 36): return instance.option_36(id_ref) elif (index == 37): return instance.option_37(id_ref) elif (index == 38): return instance.option_38(id_ref) elif (index == 39): return instance.option_39(id_ref) elif (index == 40): return instance.option_40(id_ref) elif (index == 41): return instance.option_41(id_ref) elif (index == 42): return instance.option_42(id_ref) elif (index == 43): return instance.option_43(id_ref) elif (index == 44): return instance.option_44(id_ref) elif (index == 45): return instance.option_45(id_ref) elif (index == 46): return instance.option_46(id_ref) elif (index == 47): return instance.option_47(id_ref) elif (index == 48): return instance.option_48(id_ref) elif (index == 49): return instance.option_49(id_ref) elif (index == 50): return instance.option_50(id_ref) elif (index == 51): return instance.option_51(id_ref) elif (index == 52): return instance.option_52(id_ref) elif (index == 53): return instance.option_53(id_ref) elif (index == 54): return instance.option_54(id_ref) elif (index == 55): return instance.option_55(id_ref)
from categories import categories from categories import category from expenses import expenses from expenses import expense from payments import payments from payments import payment import getpass import pickle as pick # Load the data into objects from the pickle serialization files for users. if os.path.isfile("users.pickle"): with open('users.pickle', 'rb') as handle: obj_users = pick.load(handle) else: obj_users = users() dict_Users = obj_users.create_userDict() # Load the data into objects from the pickle serialization files for categories. if os.path.isfile("categories.pickle"): with open('categories.pickle', 'rb') as handle: obj_categories = pick.load(handle) else: obj_categories = categories() dict_categories = obj_categories.create_catDict() # Load the data into objects from the pickle serialization files for expenses. if os.path.isfile("expenses.pickle"): with open('expenses.pickle', 'rb') as handle: obj_expenses = pick.load(handle) else:
"""Video Share app for sharing youtube videos""" import os from functools import wraps import uuid from flask import Flask, render_template, flash, redirect, url_for, session, request from wtforms import Form, StringField, BooleanField, TextAreaField, SelectField, PasswordField, validators from movies import movies from users import users from votes import votes from data import Movie, User, Vote app = Flask(__name__) all_movies = movies() all_users = users() all_votes = votes() new_movies = Movie() new_users = User() new_votes = Vote() #=========================================== #Initialising data to be used in the apllication #adding stored movies from the all_movies array for movie in all_movies: new_movies.set_movie(movie) #adding stored users from all_users array for user in all_users: new_users.register_user(user)