def newCategory(): if request.method == 'POST': # Get name from the posted form name = request.form['name'].strip() # Verify that a name was entered if name: db = DBConnect() # Check if this category already exsists if not db.categoryNameUsed(name): # Add the new category db.addCategory(name, 1) # Redirect to catalog page return redirect(url_for('catalog')) # If the category was not added, redirect the # user to the error page return redirect( url_for('error', error='This name\ was already used')) return redirect(url_for('error', error='You need to enter a name')) if request.method == 'GET': user = session.get('username') if user is None: return redirect(url_for('showLogin')) return render_template('newCategory.html')
def catalog(): db = DBConnect() categories = db.getAllCategories() recentItems = db.getAllRecentItems() return render_template('categories.\ html', categories=categories, recentItems=recentItems)
def showCategory(category_name): db = DBConnect() categories = db.getAllCategories() items = db.getItemsByCategory(category_name) count = db.getItemCountByCategory(category_name) return render_template('showCategory.html', categories=categories, items=items, categoryName=category_name, numberOfItems=count)
def create_dbconnect(self): try: conn, cursor = DBConnect.open_connection("db\\" + self.import_db_file) print("SUCCESS: open database file") except Exception as e: print("ERROR:", e) else: # TODO create db table graph = DBConnect.create_pathgraph(cursor) DBConnect.load_data(graph)
def editItem(category_name, item_name): db = DBConnect() item = db.getItemByName(item_name) if request.method == 'POST': name = request.form['name'] description = request.form['description'] categoryName = request.form['category'] if name and description and categoryName: category = db.getCategoryByName(categoryName) db.editItem(item, name, description, category.id) return redirect( url_for('showItem', category_name=category.name, item_name=name)) if request.method == 'GET': # Authorization check before serving the edit page userEmail = session.get('email') userID = db.getUserIDByEmail(userEmail) if userID != item.user.id: return redirect( url_for('error', error='You are not\ authorized to edit this item')) categories = db.getAllCategories() return render_template('editItem.html', selectedItem=item, categories=categories)
def showItem(category_name, item_name): db = DBConnect() selectedItem = db.getItemByName(item_name) userEmail = session.get('email') userID = db.getUserIDByEmail(userEmail) if userID != selectedItem.user.id: return render_template('publicShowItem.html\ ', selectedItem=selectedItem, categoryName=category_name) return render_template('showItem.html\ ', selectedItem=selectedItem, categoryName=category_name)
def db_dl_prodinfo(dbinfo, orders_scenes): """ Queries the database to get the associated product options This query is meant to go with downloads by product :param dbinfo: Database connection information :type dbinfo: dict :param orders_scenes: Order id's that have been downloaded from based on web logs and scene names :type orders_scenes: tuple :return: Dictionary of count values """ ids = zip(*orders_scenes)[0] ids = remove_duplicates(ids) sql = ('SELECT o.orderid, o.product_opts ' 'FROM ordering_order o ' 'WHERE o.orderid = ANY (%s)') with DBConnect(**dbinfo) as db: db.select(sql, (ids, )) results = {k: val for k, val in db.fetcharr} return results
def db_uniquestats(source, begin_date, end_date, sensors, dbinfo): """ Queries the database to get the total number of unique users dates are given as ISO 8601 'YYYY-MM-DD' :param source: EE or ESPA :type source: str :param begin_date: Date to start the count on :type begin_date: str :param end_date: Date to stop the count on :type end_date: str :param sensors: which sensor types to process (['tm4', 'etm7',...]) :type sensors: tuple :param dbinfo: Database connection information :type dbinfo: dict :return: Dictionary of the count """ sql = '''select count(distinct(email)) from ordering_order left join lateral jsonb_object_keys(product_opts) sensors on True where order_date::date >= %s and order_date::date <= %s and order_source = %s and sensors in %s and product_opts->sensors ? 'inputs' ;''' with DBConnect(**dbinfo) as db: db.select(sql, (begin_date, end_date, source, sensors)) return db[0][0]
def query_sensor_count(dbinfo, start, end, sensors=None): """Select aggregate number of scenes sorted by sensor.""" sql = ''' select count(s.name) n_scenes, left(s.name, 4) sensor, extract(month from o.order_date) mm, extract(year from o.order_date) yy from ordering_scene s join ordering_order o on o.id=s.order_id where o.order_date::date >= '{0}' and o.order_date::date <= '{1}' and s.sensor_type = 'landsat' group by sensor, yy, mm''' with DBConnect(**dbinfo) as db: dat = sqlio.read_sql_query(sql.format(start, end), db.conn) d2 = dat.pivot_table(index='mm', values='n_scenes', columns='sensor').fillna(0) def p_month_name(mmm): """Format integers as month names.""" return datetime.date(2017, mmm, 1).strftime('%b') d2.index = d2.index.astype(int).map(p_month_name) if 'LO08' in d2.columns: d2['LC08'] += d2['LO08'] return d2[sensors]
def catalogJSON(): user = session.get('username') if user is None: return redirect(url_for('showLogin')) db = DBConnect() categories = db.getAllCategories() # Building the json object to be returned total = {'Category': []} for c in categories: items = db.getItemsByCategory(c.name) total['Category'].append({ 'id': c.id, 'name': c.name, 'items': [i.serialize for i in items] }) return jsonify(total)
def db_prodinfo(dbinfo, begin_date, end_date, sensors): """ Queries the database to build the ordered product counts dates are given as ISO 8601 'YYYY-MM-DD' :param dbinfo: Database connection information :type dbinfo: dict :param begin_date: Date to start the counts on :type begin_date: str :param end_date: Date to end the counts on :type end_date: str :param sensors: which sensors to process (['tm4','etm7',...]) :type sensors: tuple :return: Dictionary of count values """ sql = ('SELECT product_opts ' 'FROM ordering_order ' 'left join lateral jsonb_object_keys(product_opts) sensors on True ' 'WHERE order_date::date >= %s ' 'AND order_date::date <= %s ' 'AND sensors in %s ' "and product_opts->sensors ? 'inputs' " "group by product_opts, id") init = {'total': 0} with DBConnect(**dbinfo) as db: db.select(sql, (begin_date, end_date, sensors)) results = reduce(counts_prodopts, [process_db_prodopts(r, sensors) for r in db.fetcharr], init) results['title'] = 'What was Ordered' return results
def db_top10stats(begin_date, end_date, sensors, dbinfo): """ Queries the database to get the total number of unique users dates are given as ISO 8601 'YYYY-MM-DD' :param source: EE or ESPA :type source: str :param begin_date: Date to start the count on :type begin_date: str :param end_date: Date to stop the count on :type end_date: str :param sensors: which sensor types to process (['tm4', 'etm7',...]) :type sensors: tuple :param dbinfo: Database connection information :type dbinfo: dict :return: Dictionary of the count """ sql = '''select u.email, coalesce(sum(jsonb_array_length(product_opts->sensors->'inputs')),0) scenes from ordering_order o left join lateral jsonb_object_keys(product_opts) sensors on True join auth_user u on o.user_id = u.id where o.order_date::date >= %s and o.order_date::date <= %s and sensors in %s and product_opts->sensors ? 'inputs' group by u.email order by scenes desc limit 10''' with DBConnect(**dbinfo) as db: db.select(sql, (begin_date, end_date, sensors)) return db[:]
def newItem(category_name): if request.method == 'POST': # Strip off the extra spaces the user may have entered name = request.form['name'].strip() description = request.form['description'] # Ensure we have needed item info if name and description: db = DBConnect() isUsed = db.itemNameUsed(name) # Check if the item name has already been used somewhere else if not isUsed['used']: category = db.getCategoryByName(category_name) userID = db.getUserIDByEmail(session['email']) db.addItem(name, description, category.id, userID) return redirect( url_for('showItem\ ', category_name=category_name, item_name=name)) return redirect( url_for('error', error='This\ item name has already been used')) return redirect( url_for('error', error='You need to enter\ both a name and description')) if request.method == 'GET': user = session.get('username') if user is None: return redirect(url_for('showLogin')) return render_template('newItem.html', categoryName=category_name)
def get_tables(self): '''returns all tables required in the from clause for this query. ''' tables = self.get_queried_tables() # add the tables required to link the above tables together db = DBConnect.getInstance() exps = db.get_linking_expressions(tables) for exp in exps: tables += exp.get_tables() return list(set(tables))
def __init__(self, parent, properties = None, show_controls = True, size=(600, 600), loadData = True, **kwargs): wx.Frame.__init__(self, parent, -1, size=size, title='Dimensionality Reduction Plot', **kwargs) self.SetName('Plot main') if properties is not None: global p p = properties if not p.is_initialized(): logging.critical('Classifier requires a properties file. Exiting.') raise Exception('Classifier requires a properties file. Exiting.') global db db = DBConnect.getInstance() global classifier classifier = parent if loadData: # Define a progress dialog dlg = wx.ProgressDialog('Fetching cell data...', '0% Complete', 100, classifier, wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | wx.PD_REMAINING_TIME | wx.PD_CAN_ABORT) def cb(frac): cont, skip = dlg.Update(int(frac * 100.), '%d%% Complete'%(frac * 100.)) if not cont: # cancel was pressed dlg.Destroy() raise StopCalculating() # Load the data for each object try: self.data, self.data_dic = self.load_obj_measurements(cb) except StopCalculating: self.PostMessage('User canceled updating training set.') return dlg.Destroy() else: self.data, self.data_dic = None, None self.features_dic = self.load_feature_names() self.class_masks = None self.class_names = None self.object_opacity = None figpanel = PlotNotebook(self) self.figure_scores = figpanel.add('Scores') self.figure_loadings = figpanel.add('Loadings') self.update_figures() sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(figpanel, 1, wx.EXPAND) configpanel = PlotControl(self, self.figure_scores, self.figure_loadings) sizer.Add(configpanel, 0, wx.EXPAND | wx.ALL, 5) self.SetSizer(sizer) self.Centre()
def query_scene_count(dbinfo, start, end, who=None): """Query count of scenes ordered per path/row. Arsg: dbinfo (dict): database connection info start (str): begining of period range (e.g. '2018-04-01') end (str): end of periof range (e.g. '2018-04-30') Returns: np.array: number of scenes per path/row """ sql = ''' select count(*) n_scenes, case when split_part(s.name, '_', 2) = '' then right(left(s.name,6),3) else left(split_part(s.name, '_', 3), 3) end as path, case when split_part(s.name, '_', 2) = '' then right(left(s.name,9),3) else right(split_part(s.name, '_', 3), 3) end as row from ordering_scene s left join ordering_order o on o.id=s.order_id where o.order_date::date >= '{0}' and o.order_date::date <= '{1}' and s.sensor_type = 'landsat' {2} group by path, row ; ''' email_str = '' if who is 'ALL' else ("and o.email = '%s'" % who) with DBConnect(**dbinfo) as db: dat = sqlio.read_sql_query(sql.format(start, end, email_str), db.conn) dat['path'] = dat['path'].astype(int) dat['row'] = dat['row'].astype(int) dat['alpha'] = dat['n_scenes'].apply(lambda v: get_alpha (v, MAXALPHA, MINALPHA, dat['n_scenes'].min(), dat['n_scenes'].max())) dat = dat.sort_values(by='alpha') return ( dat[['path', 'row', 'alpha']].values, dat['n_scenes'].min(), dat['n_scenes'].max() )
def db_orderstats(source, begin_date, end_date, sensors, dbinfo): """ Queries the database to get the total number of orders separated by USGS and non-USGS emails dates are given as ISO 8601 'YYYY-MM-DD' :param source: EE or ESPA :type source: str :param begin_date: Date to start the count on :type begin_date: str :param end_date: Date to stop the count on :type end_date: str :param sensors: which sensor types to process (['tm4', 'etm7',...]) :type sensors: tuple :param dbinfo: Database connection information :type dbinfo: dict :return: Dictionary of the counts """ sql = ('''select COUNT(distinct orderid) from ordering_order left join lateral jsonb_object_keys(product_opts) sensors on True where order_date::date >= %s and order_date::date <= %s and orderid like '%%@usgs.gov-%%' and order_source = %s and sensors in %s and product_opts->sensors ? 'inputs' ;''', '''select COUNT(distinct orderid) from ordering_order left join lateral jsonb_object_keys(product_opts) sensors on True where order_date::date >= %s and order_date::date <= %s and orderid not like '%%@usgs.gov-%%' and order_source = %s and sensors in %s and product_opts->sensors ? 'inputs' ;''') counts = {'orders_month': 0, 'orders_usgs': 0, 'orders_non': 0} with DBConnect(**dbinfo) as db: for q in sql: db.select(q, (begin_date, end_date, source, sensors)) if 'not like' in q: counts['orders_non'] += int(db[0][0]) else: counts['orders_usgs'] += int(db[0][0]) counts['orders_month'] = counts['orders_usgs'] + counts['orders_non'] return counts
def get_email_addr(dbinfo, who): """ Retrieve email address(es) from the database for a specified role """ key = 'email.{0}'.format(who) sql = 'select value from ordering_configuration where key = %s' with DBConnect(**dbinfo) as db: db.select(sql, key) out = db[0][0].split(',') return out
def extractDataFromGroupsMysql(self, dependentDataValues, independentDataValues): from properties import Properties from dbconnect import ( DBConnect, UniqueImageClause, UniqueObjectClause, GetWhereClauseForImages, GetWhereClauseForObjects, image_key_columns, object_key_columns, ) import sqltools as sql p = Properties.getInstance() p.LoadFile("C:\\Users\\Dalitso\\Desktop\\workspace2\\abhakar\\Properties_README.txt") db = DBConnect.getInstance() def buildquery(self, theGroup, var): pairs = theGroup.pairsDict if var == "dep": for i in pairs.keys(): print i q = "SELECT " + "`" + self.dependentVariable + "`" + "FROM " + self.table + " WHERE " q2 = [i + " LIKE `" + pairs[i] + "` AND " for i in pairs.keys()] result = q + "".join(q2)[:-4] print result if var == "ind": for i in pairs.keys(): print i q = "SELECT " + "`" + self.independentVariable + "`" + "FROM " + self.table + "WHERE" q2 = [i + "`" + " LIKE `" + pairs[i] + "` AND " for i in pairs.keys()] result = q + "".join(q2)[:-4] print result return result import numpy as np dataDict = {} dependentDataValues = np.array(dependentDataValues) independentDataValues = np.array(independentDataValues) tmp = {} for iGrp in self.groupDefinitions: theGroup = iGrp theGroup.checkMatchCount tmp["dependentData"] = db.execute(buildquery(self, theGroup, "dep")) tmp["independentData"] = db.execute(buildquery(self, theGroup, "ind")) # tmp['pairs'] = theGroup.pairsDict[theGroup.description] dataDict[theGroup.description] = tmp # print iGrp,theGroup.description,tmp return dataDict
def get_config_value(dbinfo, key): """ Retrieve a specified configuration value :param dbinfo: DB connection information :param key: table key to get the value for :return: value """ sql = ('SELECT value from ordering_configuration ' 'WHERE key = %s') with DBConnect(**dbinfo) as db: db.select(sql, key) ret = db[0][0] return ret
def current_pass(db_info): """ Retrieves the current password from the database :param db_info: database connection information :type db_info: dict :return: exception message """ sql_str = "select value from ordering_configuration where key = 'landsatds.password'" with DBConnect(**db_info) as db: db.select(sql_str) curr = db[0][0] return curr
def __init__(self, env): self._base_url = base_url_from_env(env) self._first_name = '' self._last_name = '' self._job_title = '' self._email = '' self._password = '' self._space_guid = '' self._organization_guid = '' self._role = '' self._person_guid = '' self._employee_guid = '' self._user_guid = '' self._invite_code_guid = '' self._headers = {} self._dbconn = DBConnect(env) self._is_admin = False
def update_db(passwrd, db_info): """ Update the database with the new password :param passwrd: new password :type passwrd: string :param db_info: database connection information :type db_info: dict :return: exception message """ sql_str = "update ordering_configuration set value = %s where key = 'landsatds.password'" try: with DBConnect(**db_info) as db: db.execute(sql_str, passwrd) db.commit() except Exception: raise CredentialException('Error updating the database with the new password')
def db_scenestats(source, begin_date, end_date, dbinfo): """ Queries the database for the number of scenes ordered separated by USGS and non-USGS emails dates are given as ISO 8601 'YYYY-MM-DD' :param source: EE or ESPA :type source: str :param begin_date: Date to start the count on :type begin_date: str :param end_date: Date to stop the count on :type end_date: str :param dbinfo: Database connection information :type dbinfo: dict :return: Dictionary of the counts """ sql = ('''select COUNT(*) from ordering_scene inner join ordering_order on ordering_scene.order_id = ordering_order.id where ordering_order.order_date::date >= %s and ordering_order.order_date::date <= %s and ordering_order.orderid like '%%@usgs.gov-%%' and ordering_order.order_source = %s;''', '''select COUNT(*) from ordering_scene inner join ordering_order on ordering_scene.order_id = ordering_order.id where ordering_order.order_date::date >= %s and ordering_order.order_date::date <= %s and ordering_order.orderid not like '%%@usgs.gov-%%' and ordering_order.order_source = %s;''') counts = {'scenes_month': 0, 'scenes_usgs': 0, 'scenes_non': 0} with DBConnect(**dbinfo) as db: for q in sql: db.select(q, (begin_date, end_date, source)) if 'not like' in q: counts['scenes_non'] += int(db[0][0]) else: counts['scenes_usgs'] += int(db[0][0]) counts['scenes_month'] = counts['scenes_usgs'] + counts['scenes_non'] return counts
def get_where_clause(self): '''Build the where clause from conditions given by the user and conditions that link all the tables together. ''' db = DBConnect.getInstance() conditions = [] conditions += ['(%s)'%(str(f)) for f in self.filters] queried_tables = self.get_queried_tables() if len(queried_tables) > 1: link_exps = db.get_linking_expressions(queried_tables) if link_exps: conditions += [str(exp) for exp in link_exps] if self.old_filters: conditions += ['%s.%s = subquery_%d.%s'%(p.image_table, col, i, col) for i in range(len(self.old_filters)) for col in image_key_columns()] if self.wheres: conditions += [str(where) for where in self.wheres] return ' AND '.join(conditions)
def db_dl_prodinfo(dbinfo, ids): """ Queries the database to get the associated product options :param dbinfo: Database connection information :type dbinfo: dict :param ids: Order id's that have been downloaded from based on web logs :type ids: tuple :return: Dictionary of count values """ ids = remove_duplicates(ids) sql = ('SELECT o.orderid, o.product_options ' 'FROM ordering_order o ' 'WHERE o.orderid = ANY (%s)') with DBConnect(**dbinfo) as db: db.select(sql, (ids, )) results = [x for x in db] return results
def db_prodinfo(dbinfo, begin_date, end_date): """ Queries the database to build the ordered product counts dates are given as ISO 8601 'YYYY-MM-DD' :param dbinfo: Database connection information :type dbinfo: dict :param begin_date: Date to start the counts on :type begin_date: str :param end_date: Date to end the counts on :type end_date: str :return: Dictionary of count values """ sql = ('''SELECT COUNT(s.name) "total", SUM(CASE WHEN o.product_options::json->>'include_cfmask' = 'true' THEN 1 ELSE 0 END) "include_cfmask", SUM(CASE WHEN o.product_options::json->>'include_customized_source_data' = 'true' THEN 1 ELSE 0 END) "include_customized_source_data", SUM(CASE WHEN o.product_options::json->>'include_sr_evi' = 'true' THEN 1 ELSE 0 END) "include_sr_evi", SUM(CASE WHEN o.product_options::json->>'include_source_metadata' = 'true' THEN 1 ELSE 0 END) "include_source_metadata", SUM(CASE WHEN o.product_options::json->>'include_sr_msavi' = 'true' THEN 1 ELSE 0 END) "include_sr_msavi", SUM(CASE WHEN o.product_options::json->>'include_sr_nbr' = 'true' THEN 1 ELSE 0 END) "include_sr_nbr", SUM(CASE WHEN o.product_options::json->>'include_sr_nbr2' = 'true' THEN 1 ELSE 0 END) "include_sr_nbr2", SUM(CASE WHEN o.product_options::json->>'include_sr_ndmi' = 'true' THEN 1 ELSE 0 END) "include_sr_ndmi", SUM(CASE WHEN o.product_options::json->>'include_sr_ndvi' = 'true' THEN 1 ELSE 0 END) "include_sr_ndvi", SUM(CASE WHEN o.product_options::json->>'include_sr_savi' = 'true' THEN 1 ELSE 0 END) "include_sr_savi", SUM(CASE WHEN o.product_options::json->>'include_source_data' = 'true' THEN 1 ELSE 0 END) "include_source_data", SUM(CASE WHEN o.product_options::json->>'include_sr' = 'true' THEN 1 ELSE 0 END) "include_sr", SUM(CASE WHEN o.product_options::json->>'include_sr_thermal' = 'true' THEN 1 ELSE 0 END) "include_sr_thermal", SUM(CASE WHEN o.product_options::json->>'include_sr_toa' = 'true' THEN 1 ELSE 0 END) "include_sr_toa" FROM ordering_order o JOIN ordering_scene s ON s.order_id = o.id WHERE LENGTH(o.product_options) > 0 AND o.order_date::date >= %s AND o.order_date::date <= %s;''') with DBConnect(cursor_factory=psycopg2.extras.DictCursor, **dbinfo) as db: db.select(sql, (begin_date, end_date)) results = dict(db[0]) results['title'] = 'What was Ordered' return results
def db_uniquestats(source, begin_date, end_date, dbinfo): """ Queries the database to get the total number of unique users dates are given as ISO 8601 'YYYY-MM-DD' :param source: EE or ESPA :type source: str :param begin_date: Date to start the count on :type begin_date: str :param end_date: Date to stop the count on :type end_date: str :param dbinfo: Database connection information :type dbinfo: dict :return: Dictionary of the count """ sql = '''select count(distinct(split_part(orderid, '-', 1))) from ordering_order where order_date::date >= %s and order_date::date <= %s and order_source = %s;''' with DBConnect(**dbinfo) as db: db.select(sql, (begin_date, end_date, source)) return db[0][0]
def deleteItem(category_name, item_name): db = DBConnect() item = db.getItemByName(item_name) if request.method == 'POST': db.deleteItem(item) return redirect(url_for('showCategory', category_name=category_name)) if request.method == 'GET': # Authorization check before serving the delete page userEmail = session.get('email') userID = db.getUserIDByEmail(userEmail) if userID != item.user.id: return redirect( url_for('error', error='You are not\ authorized to delete this item')) return render_template('deleteItem.html', categoryName=category_name, itemName=item_name)
def __init__(self, parent, properties=None, show_controls=True, size=(600, 600), loadData=True, **kwargs): wx.Frame.__init__(self, parent, -1, size=size, title='Dimensionality Reduction Plot', **kwargs) self.SetName('Plot main') if properties is not None: global p p = properties if not p.is_initialized(): logging.critical( 'Classifier requires a properties file. Exiting.') raise Exception( 'Classifier requires a properties file. Exiting.') global db db = DBConnect.getInstance() global classifier classifier = parent if loadData: # Define a progress dialog dlg = wx.ProgressDialog( 'Fetching cell data...', '0% Complete', 100, classifier, wx.PD_ELAPSED_TIME | wx.PD_ESTIMATED_TIME | wx.PD_REMAINING_TIME | wx.PD_CAN_ABORT) def cb(frac): cont, skip = dlg.Update(int(frac * 100.), '%d%% Complete' % (frac * 100.)) if not cont: # cancel was pressed dlg.Destroy() raise StopCalculating() # Load the data for each object try: self.data, self.data_dic = self.load_obj_measurements(cb) except StopCalculating: self.PostMessage('User canceled updating training set.') return dlg.Destroy() else: self.data, self.data_dic = None, None self.features_dic = self.load_feature_names() self.class_masks = None self.class_names = None self.object_opacity = None figpanel = PlotNotebook(self) self.figure_scores = figpanel.add('Scores') self.figure_loadings = figpanel.add('Loadings') self.update_figures() sizer = wx.BoxSizer(wx.VERTICAL) sizer.Add(figpanel, 1, wx.EXPAND) configpanel = PlotControl(self, self.figure_scores, self.figure_loadings) sizer.Add(configpanel, 0, wx.EXPAND | wx.ALL, 5) self.SetSizer(sizer) self.Centre()
def gconnect(): # Verify that the token the client sends the server matches the one # that was sent if request.args.get('state') != session['state']: # If they do not match then respond with an error response = make_response(json.dumps('Invalid state token'), 401) response.headers['Content-Type'] = 'application/json' return response # If the state tokens match then we take our code code = request.data # Try to use the one time code and exchange it for a # credentials object try: # Create oauth flow object and adds client secret key info to # that object oauth_flow = flow_from_clientsecrets('secret/client_secrets.json', scope='') # Specify that this the one time code flow this server sends off oauth_flow.redirect_uri = 'postmessage' # Init exchange credentials = oauth_flow.step2_exchange(code) # Handle the case where an error occurs during the exchange except FlowExchangeError: response = make_response( json.dumps('Failed to upgrade the\ authorization code'), 401) response.headers['Content-Type'] = 'application/json' return response # Check to see if there is a valid access token inside of the # returned credentials access_token = credentials.access_token url = 'https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'\ % access_token h = httplib2.Http() result = json.loads(h.request(url, 'GET')[1]) if result.get('error') is not None: response = make_response(json.dumps(result['error']), 500) response.headers['Content-Type'] = 'application/json' return response # Compare id in the credentials object against the id returned # by the google api server gplus_id = credentials.id_token['sub'] if result['user_id'] != gplus_id: response = make_response( json.dumps('Token user ID does not\ match given user ID'), 401) response.headers['Content-Type'] = 'application/json' return response # Compare client IDs if result['issued_to'] != CLIENT_ID: response = make_response( json.dumps('Token client ID does not\ match the apps ID'), 401) print('Token client ID does not match the apps ID') response.headers['Content-Type'] = 'application/json' return response # Check if the user is already logged into the system stored_credentials = session.get('credentials') stored_gplus_id = session.get('gplus_id') if stored_credentials is not None and stored_gplus_id == gplus_id: response = make_response( json.dumps('Current user is\ already logged in'), 200) response.headers['Content-Type'] = 'application/json' return response # Store credentials and google plus id in this session session['credentials'] = credentials.access_token session['gplus_id'] = gplus_id # Get more information about the user from the google plus api userinfo_url = 'https://www.googleapis.com/oauth2/v1/userinfo' params = {'access_token': credentials.access_token, 'alt': 'json'} answer = requests.get(userinfo_url, params=params) data = json.loads(answer.text) # Store user info in login session session['username'] = data['email'] session['email'] = data['email'] session['picture'] = data['picture'] db = DBConnect() userID = db.getUserIDByEmail(session['email']) if userID is None: db.createUser(session['username'], session['email'], session['picture']) userID = db.getUserIDByEmail(session['email']) session['user_id'] = userID output = '' output += '<h1>Welcome, ' output += session['username'] return output
def HTS_GroupDataExtractMySql(dataSelector,table): from HTS_dataDict import HTS_dataDict import xlrd as xl import numpy as np import matplotlib.pyplot as plt from properties import Properties from dbconnect import DBConnect, UniqueImageClause, UniqueObjectClause, GetWhereClauseForImages, GetWhereClauseForObjects, image_key_columns, object_key_columns import sqltools as sql p = Properties.getInstance() db = DBConnect.getInstance() dependentDataValues = False independentDataValues = False # def buildquery(self, table): # q = 'SELECT ' + self.independentVar + ', '+ self.dependentVar +' FROM ' + table # q2= [' WHERE '+ y[1] +' LIKE ' +y[0] for y in group.pairs] # return q + ''.join(q2) returnDict = HTS_dataDict(dataSelector) #for iFile in range(nFiles): dataSelector.clearAllIndici print('HTS_GroupDataExtract: using sheet %s for %s\n',table) query ='SELECT `' + dataSelector.independentVariable + '` FROM ' + '`' +table+'`' print query dataSelector.findValidIndiciFromDataColumn(dataSelector.independentVariable,db.execute(query)) query = 'SELECT `' + dataSelector.dependentVariable + '` FROM ' + '`' +table+'`' print query dataSelector.findValidIndiciFromDataColumn(dataSelector.dependentVariable,db.execute(query)) print dependentDataValues, independentDataValues, "variables" if not dependentDataValues: print 'HTS_GroupDataExtract: dependent variable not found in %s', if not independentDataValues: print 'HTS_GroupDataExtract: independent variable not found in %s' # print dataSelector.extractDataFromGroups(dependentDataValues,independentDataValues) dataSelector.table = table returnDict.dict[table] = dataSelector.extractDataFromGroupsMysql(dependentDataValues,independentDataValues) # if nFiles > 1: # # Get data from the selector # nGroups = dataSelector.nGroups # groupKeys = dataSelector.getGroupDescriptions() # # Create the combined group # combinedData = {} # tmpStruct = {} # tmpStruct['dependentData'] = np.array([]) # tmpStruct['independentData'] = np.array([]) # for iGrp in range(nGroups): # combinedData[groupKeys[iGrp]] = tmpStruct # for iFile in range(nFiles): # #print('the key = %s\n') # fileData = returnDict.dict[dataFiles[iFile]] # for iGrp in range(nGroups): # grpKey = groupKeys[iGrp] # #combDepData = combinedData[grpKey]['dependentData'] # fileDepData = fileData[grpKey]['dependentData'] # tmpStruct['dependentData'] = np.concatenate((tmpStruct['dependentData'],fileDepData[:])) # #combIndData = [combinedData[grpKey]['independentData']] # fileIndData = fileData[grpKey]['independentData'] # tmpStruct['independentData'] = np.concatenate((tmpStruct['independentData'],fileIndData[:])) # combinedData[grpKey] = tmpStruct # # returnDict.dict['combinedData'] = combinedData return returnDict
import properties import logging import matplotlib.cm import numpy as np from itertools import groupby import os import sys import re import wx import cpa.helpmenu import csv p = properties.Properties.getInstance() # Hack the properties module so it doesn't require the object table. properties.optional_vars += ['object_table'] db = DBConnect.getInstance() required_fields = ['plate_shape', 'well_id'] fixed_width = (200,-1) class PlateViewer(wx.Frame, CPATool): def __init__(self, parent, size=(800,-1), **kwargs): wx.Frame.__init__(self, parent, -1, size=size, title='Plate Viewer', **kwargs) CPATool.__init__(self) self.SetName(self.tool_name) self.SetBackgroundColour("white") # Fixing the color # Check for required properties fields. fail = False for field in required_fields:
from employee import Employee from erp_organization import Organization from workspace import Workspace from dbconnect import DBConnect urllib3.disable_warnings() while True: env = input("Введите среду (test/dev/demo/prod): ").lower() if env not in ['test', 'dev', 'demo', 'prod']: continue break new_space = Workspace(env) new_space_admin = Employee(env) db = DBConnect(env) while True: space_name = input("Введите Имя Спейса (должно быть уникальным): ") if space_name.lower() in db.space_codes_on_server(): print("Пространство с таким именем уже сущестует. Введите другое имя") continue break while True: email = input("Введите валидный email (логин должен быть уникальным в рамках всех спейсов): ") if new_space_admin.login_exists(email): print("Логин занят") continue break
import urllib3 import sys from authorization import Authorization from employee import Employee from erp_organization import Organization from workspace import Workspace from dbconnect import DBConnect urllib3.disable_warnings() env = sys.argv[1] space_name = sys.argv[2] email = sys.argv[3] new_space = Workspace(env) new_space_admin = Employee(env) db = DBConnect(env) space_guid = new_space.create_workspace(space_name=space_name, space_code=space_name) new_space_admin.create_admin_for_new_space(space_guid, email) auth = Authorization(env) headers = auth.create_headers(email) new_organization_guid = Organization(env, headers).add_organization('Owner') new_space.set_space_owner(new_organization_guid) new_space_admin.add_admin_to_employees(new_organization_guid, headers) print(space_guid, space_name, email)
from __future__ import with_statement from dbconnect import DBConnect from properties import Properties from singleton import Singleton from heapq import heappush, heappop from weakref import WeakValueDictionary import imagetools import logging import numpy import threading import wx import os db = DBConnect.getInstance() p = Properties.getInstance() def load_lock(): return TileCollection.getInstance().load_lock class List(list): pass class TileCollection(Singleton): ''' Main access point for loading tiles through the TileLoader. ''' def __init__(self): self.tileData = WeakValueDictionary()
f.write('\n') f.close() def make_unique_plot_name(self, prefix): '''This function must be called to generate a unique name for each plot. eg: plot.SetName(wx.GetApp().make_unique_plot_name('Histogram')) ''' plot_num = max([int(plot.Name[len(prefix):]) for plot in self.plots if plot.Name.startswith(prefix)]) return '%s %d'%(prefix, plot_num) if __name__ == "__main__": # Initialize the app early because the fancy exception handler # depends on it in order to show a dialog. app = CPAnalyst(redirect=False) if sys.platform=='darwin': import bioformats # Install our own pretty exception handler unless one has already # been installed (e.g., a debugger) if sys.excepthook == sys.__excepthook__: from errors import show_exception_as_dialog sys.excepthook = show_exception_as_dialog # Black magic: Bus errors occur on certain Macs if we wait until # later to connect, so we'll do it here. DBConnect.getInstance().connect() app.MainLoop() os._exit(0)