def save_to_db(self): if self._id == None: sql = f''' INSERT INTO users(username, password) VALUES ('{self.username}', '{self.hashed_password}') RETURNING id ''' try: conn = connect() cursor = conn.cursor() cursor.execute(sql) self._id = cursor.fetchone()[0] conn.close() return True except psycopg2.errors.UniqueViolation: print(f'User {self.username} already exists') except Exception as e: conn.close() print('saving wasn\'t done') return False else: sql = f''' UPDATE users SET username = '******', password = '******' WHERE id={self._id} ''' try: conn = connect() cursor = conn.cursor() cursor.execute(sql) conn.close() return True except Exception as e: conn.close() print('saving wasn\'t done') return False
def save_to_db(self): if self._id == None: sql = f''' INSERT INTO messages(from_id, to_id, msg) VALUES ({self.from_id},{self.to_id},'{self.msg}') RETURNING id, creation_date ''' try: conn = connect() cursor = conn.cursor() cursor.execute(sql) data = cursor.fetchone() self._id = data[0] self.creation_date = data[1] conn.close() return True except Exception as e: print('saving failed') conn.close() return False else: sql = f''' UPDATE messages SET from_id={self.from_id}, to_id={self.to_id}, creation_date='{self.creation_date}', msg='{self.msg}' WHERE id = {self.id} ''' try: conn = connect() cursor = conn.cursor() cursor.execute(sql) conn.close() return True except Exception as e: conn.close() print('saving wasn\'t done') return False
def execute_query(query, return_result=True, connection_method = connect1()): connection_method.cursor().execute(query) result = [] if return_result: for item in connect().cursor(): result.append(item) connect().close() return result
def connect(self): address = self.addressEnter.text() port = self.portEnter.text() if self.connectButton.text() == "Connect": self.status = con.connect(address, port)[0] self.session = con.connect(address, port)[1] self.setStatus() self.connectButton.setText("Disconnect") else: self.disconnect()
def __init__(self, serverip, port, gamestatus): try: connection.connect(serverip, port) except socket.error as e: #print e if e.errno == 115: #This error means that everything is alright #(Operation now in Progress) self.__setup(gamestatus) else: sys.exit() else: self.__setup(gamestatus)
def create_client_profile(data, user_type): connect() # Si il s'agit d'un client de GestiBank ==> redirection vers table SQL client if user_type == "client": print('client') table_name = "client" table_format = "(surname, name, global_name, address, email, password, phone, b_account_num, " \ "beginning_contract, end_contract, marital_status, children_nb, user_type)" values_format = "(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" insert_stmt = ("INSERT INTO " + table_name + table_format + " VALUES " + values_format) cursor = cnx.cursor() cursor.executemany(insert_stmt, data) # Si il s'agit d'un admin de GestiBank ==> redirection vers table SQL admin elif user_type == "admin": print('admin') table_name = "admin" table_format = "(surname, name, address, email, password, phone, pro_id, user_type)" values_format = "(%s, %s, %s, %s, %s, %s, %s, %s)" insert_stmt = ("INSERT INTO " + table_name + table_format + " VALUES " + values_format) cursor = cnx.cursor() cursor.executemany(insert_stmt, data) # Si il s'agit d'un managert de GestiBank ==> redirection vers table SQL manager elif user_type == "manager": print('agent') table_name = "manager" table_format = "(surname, name, address, email, password, phone, pro_id, user_type)" values_format = "(%s, %s, %s, %s, %s, %s, %s, %s)" insert_stmt = ("INSERT INTO " + table_name + table_format + " VALUES " + values_format) cursor = cnx.cursor() cursor.executemany(insert_stmt, data) else: print("Type d'utilisiteur inconnu") pass cnx.close()
def Checkdb(dbname): prgrm = 1 if checkdb(dbname) is True: #Check the existance of the database (if exists or not) createdb(dbname) #Create the database session = connect(dbname) #Connect to the database createtables(dbname) #Create the tables populate(dbname, rawdata) #Populate the database finishTime = datetime.now() #End of the set up timeDetla = finishTime - startTime print('\n----------------------------------------------------') print("Setup is finished. Your database is now available.") print("The process was completed in : " + str( timeDetla.total_seconds()) + "s.") #Total time print('----------------------------------------------------\n') return prgrm else: print("Your database already exists.\n") print('Do you want to delete the tables to run the program anyway ?') print('1 = Delete the tables and recreate others.') print('Other integer: do not run the program\n') try: Choice = int(input('Your choice : ')) except ValueError: print("The input is not right...\n") prgrm = 0 return prgrm if Choice == 1: engine = db.create_engine(f'mysql+pymysql://{username}:{password}@{host}/{dbname}') connection = engine.connect() #Connect session = connect(dbname) #Connect to the database query = db.delete(Books) results = connection.execute(query) populate(dbname, rawdata) #Populate the database with what we want return prgrm else : prgrm = 0 print('The program will not run') return prgrm print('----------------------------------------------------\n')
def donation_trend_per_month(query): data = pd.read_sql(query, engine.connect()) posting_day = [] donor_code = [] quant = [] for count in range(len(data)): record = data.iloc[count] if "/" in record['posting_date']: arr = record['posting_date'].split("/") posting_day.append(int(arr[1])) elif "-" in record['posting_date']: arr = record['posting_date'].split("-") date = arr[2].split(" ") posting_day.append(int(date[0])) donor_code.append(int(record['unc_donor_class'])) quant.append(int(record['quantity'])) donation = { 'posting_day': posting_day, 'unc_donor_class': donor_code, 'quantity': quant } df = pd.DataFrame.from_dict(donation) pivot_df = df.pivot(index='posting_day', columns='unc_donor_class', values='quantity') index = sorted(pivot_df.index) pivot_df.reindex(index) pivot_df.plot.bar(stacked=True, figsize=(15, 10)).legend(bbox_to_anchor=(1.0, 1.2))
def copy_database (target_db, admin_user = None, admin_password = None, force = False): """ Copy the current database to a new database name (see http://www.mongodb.org/display/DOCS/Clone+Database) """ with connection.protect(): # save the current connection db_connection = connection.connection() db_connection_ = connection.connection_information() source_db = db_connection_["db"] if (source_db == target_db): logger.debug("Ignored request to copy '%s' into itself." % target_db) return # open a connection to the admin collection admin_connection = connection.connect(db = "admin", user = admin_user, password = admin_password) if (target_db in admin_connection.connection.database_names()): if (force): logger.debug("'%s' already exists and will be merged with content of '%s'." % (source_db, target_db)) else: raise errors.DBOperationError("Unable to copy database '%s' to '%s': target already exists." % (source_db, target_db)) # copy the database try: admin_connection.connection.copy_database(source_db, target_db) # restore the current connection finally: connection._connection = db_connection connection._connection_information = db_connection_ logger.debug("Copy of '%s' into '%s' successful." % (source_db, target_db))
def load_all_messages(**kwargs): sql = ''' SELECT * FROM messages ''' sql_temp = [] for key, value in kwargs.items(): sql_temp.append(f' {key}={value}') if sql_temp: sql += f"Where {' AND '.join(sql_temp)}" try: conn = connect() cursor = conn.cursor() cursor.execute(sql) data = cursor.fetchall() msg_list = [] for msg in data: message = Messages() message._id = msg[0] message.from_id = msg[1] message.to_id = msg[2] message.creation_date = msg[3] message.msg = msg[4] msg_list.append(message) conn.close() return msg_list except Exception as e: conn.close() print('loading failed')
def remove_object(bucket, object): try: #conn = boto.connect_s3() conn = connection.connect() bucketNameSlashPath = str(bucket.name.lower()) toBeDeletedFileName = str(object.name) #seperate bucket name from the path listOfStrings = bucketNameSlashPath.split('/', 1) numberOfStrings = len(listOfStrings) bucketName = listOfStrings[0] if (numberOfStrings == 2): path = listOfStrings[1] else: path = '' b = conn.get_bucket(bucketName) from boto.s3.key import Key # It's full of keys. Delete them all full_key_name = os.path.join(path, toBeDeletedFileName) k = Key(b) k.key = full_key_name k.delete() return "Successfully delete the file!" except: return "Object Deletion Failed."
def upload_object(bucket, localDirectory, filename): #takes a bucketname/path as a first parameter and #a local fileName as a second parameter and uploads it to S3 try: #conn = boto.connect_s3() conn = connection.connect() bucketNameSlashPath = str(bucket.name) localDir = str(localDirectory) localFileName = str(filename) #seperate bucket name from the path listOfStrings = bucketNameSlashPath.split('/', 1) numberOfStrings = len(listOfStrings) bucketName = listOfStrings[0] if (numberOfStrings == 2): path = listOfStrings[1] else: path = '' full_key_name = os.path.join(path, localFileName) #need to extract the buffer name from the input string bucket = conn.get_bucket(bucketName) key = bucket.new_key(full_key_name) fullLocalFileName = os.path.join(localDir, localFileName) print fullLocalFileName key.set_contents_from_filename(fullLocalFileName) return "Upload Successful" except: return "Upload Failed"
def move_object(bucket_from, bucket_to, file): conn = connection.connect() bucket_from_name = bucket_from.name #self.list_object() #src_uri = boto.storage_uri(bucket_from_name + '/' + file.name, 'gs') # Create a file-like object for holding the object contents. object_contents = StringIO.StringIO() try: from boto.s3.key import Key k = Key(bucket_from) k.key = file.name #k.key.get_file(object_contents) k.get_contents_to_file(object_contents) dst_uri = boto.storage_uri(bucket_to.name + '/' + file.name, 'gs') object_contents.seek(0) dst_uri.new_key().set_contents_from_file(object_contents) # bucket = conn.get_bucket(bucket_to.name) # key = bucket.new_key(file.name) # # key.set_contents_from_file(object_contents) object_contents.close() except: msg = 'Sorry, but fail to move file: "%s".' % file.name return msg else: msg = 'Successfully move "%s"!' % file.name return msg
def move_object(bucket_from, bucket_to, file): print 'FUNCTION: MOVE OBJECT' print '' # Call list_object first bucket_from_name = bucket_from.name #self.list_object() #filename = raw_input("Which file to download: ") #dest_dir = raw_input("Input the downloading directory: ") src_uri = boto.storage_uri(bucket_from_name + '/' + file.name, GOOGLE_STORAGE) # Create a file-like object for holding the object contents. object_contents = StringIO.StringIO() try: src_uri.get_key().get_file(object_contents) conn = connection.connect() bucket = conn.get_bucket(bucket_to.name) key = bucket.new_key(file.name) object_contents.seek(0) key.set_contents_from_file(object_contents) object_contents.close() except: msg = 'Sorry, but fail to move file: "%s".' % file.name return msg else: msg = 'Successfully move "%s"!' % file.name return msg
def ro(analysis_id, variable_ids): cnx, now = connect(), currtime() result, ro_id, survey_id, org_id = select_from_analysis(analysis_id) update_analysis_running(analysis_id) delete_results(analysis_id) variables = get_varinfo_from_varids(variable_ids, analysis_id) variable_ids = variable_ids.split(',') options = get_options(variable_ids) data = get_variable_dataf(variable_ids, ro_id, survey_id, org_id, analysis_id) check_empty_var(data, variables, cnx) orders = list(range(len(variable_ids))) for subset in combinations(orders, 2): (first, second) = subset print(subset) subids = [variable_ids[i] for i in [first, second]] subdata = [data[i] for i in [first, second]] subopts = [options[i] for i in [first, second]] counts = find_counts(subdata, subopts) percentages = find_percentages(counts) try: chi2, p, dof, ex = chi2_contingency(counts, correction=False) except: cursor_err = cnx.cursor() message = "Algorithm has failed" cursor_err.execute(sperror(message, analysis_id)) cnx.commit() cursor_err.close() exit(1) p = round(p, 4) save_results_as_corr(p, percentages, survey_id, subids, subopts, analysis_id) update_analysis_done(analysis_id) return
def get_variable_dataf(variable_ids, ro_id, survey_id, org_id, analysis_id): cnx, now = connect(), currtime() data = [] for variable_id in variable_ids: cursor = cnx.cursor(dictionary=True, buffered=True) try: cursor.callproc('GetVariableDataF', [variable_id, org_id, survey_id, ro_id, 0, '']) except: cursor_err = cnx.cursor() cursor_err.execute(sperror("Variable data can't be retrieved", analysis_id)) cnx.commit() cursor_err.close() cnx.close() exit(1) for i in cursor.stored_results(): fetched = i.fetchall() fetched = [('1',) if element[0] is None else element for element in fetched] data.append(fetched) cursor.close() cnx.close() cleaned = [] for variable in data: ints = [] for data_point in variable: if data_point is not None: ints.append(int(data_point[0])) else: ints.append(1) cleaned.append(ints) return cleaned
def test(self): mydb = connection.connect() mycursor = mydb.cursor() schema_prod = devconfig.Dev("dev").connect_db() schema_test = devconfig.Dev("test").connect_db() del sys.path[:] sys.path.append( os.path.join(os.path.dirname(os.path.dirname(__file__)), 'files', 'ddl', table + '.sql')) for file_path in sys.path: mycursor.execute("desc " + schema_prod[3] + "." + table) data = mycursor.fetchall() b = [] for i in data: a = list(i) b.append((str(a[0]) + " " + str(a[1])).upper().replace( "B'", "").replace("'", "")) #print(b) for file_path in sys.path: mycursor.execute("desc " + schema_test[3] + "." + table) data = mycursor.fetchall() c = [] for i in data: a = list(i) c.append((str(a[0]) + " " + str(a[1])).upper().replace( "B'", "").replace("'", "")) #print(c) self.assertEqual( (collections.Counter(b) == collections.Counter(c)), True, msg=table + " " + str(set(c) - set(b)))
def save_results_as_corr(p, percentages, survey_id, variable_ids, options, analysis_id): cnx, now = connect(), currtime() for i in range(len(options[0])): for j in range(len(options[1])): cursor1 = cnx.cursor() cursor2 = cnx.cursor() query1 = """INSERT INTO correlation_analysis_results SET analysis_id = {current_id}, var1 = {var1}, var2 = {var2}, var1_option = {var1_option}, var2_option = {var2_option}, p_value = {p_value}, percentage = {percentage}, is_chisquare = 1""".format( current_id=analysis_id, var1=variable_ids[0], var2=variable_ids[1], var1_option=i+1, var2_option=j+1, p_value=p, percentage=round(percentages[i][j], 2)) query2 = """INSERT INTO correlation_analysis_results SET analysis_id = {current_id}, var1 = {var2}, var2 = {var1}, var1_option = {var2_option}, var2_option = {var1_option}, p_value = {p_value}, percentage = {percentage}, is_chisquare = 1""".format( current_id=analysis_id, var1=variable_ids[0], var2=variable_ids[1], var1_option=i + 1, var2_option=j + 1, p_value=p, percentage=round(percentages[i][j], 2)) try: cursor1.execute(query1) cursor2.execute(query2) cnx.commit() except: exit(1) cursor1.close() cursor2.close() cnx.close() return
def run_main(): args = arguments.get_arguments() pp.pprint(args._to_dicts()) p, conn = None, None env = None random.seed(args.seed) torch.manual_seed(args.seed) choice, input_args = get_choice(args.input) if choice == 'remote': print("connecting to: " + input_args.host) p, conn = connection.connect('ws://' + input_args.host) else: config, dataset = load_dataset(args) env = initialise(config, dataset, args) try: run_trainer(args, conn, env=env) except (KeyboardInterrupt, SystemExit): p.terminate() except Exception: traceback.print_exc() p.terminate()
def remove_bucket(bucket): #conn = boto.connect_s3() conn = connection.connect() toBeDeletedBucket = str(bucket.name.lower()) #First we need to check if there is a bucket of the same name or it is not found try: nonexistent = conn.lookup(toBeDeletedBucket) if nonexistent is None: return "No such bucket!" else: #No need to validate we alreday checked its existence print "Caution: If the bucket is Full, all its contents will be ERASED! " #proceed = raw_input('Are you sure you want to proceed yes/no ?') #full_bucket = None #if (proceed == 'yes'): full_bucket = conn.get_bucket(toBeDeletedBucket, validate=False) # It's full of keys. Delete them all. for key in full_bucket.list(): key.delete() # The bucket is empty now. Delete it. conn.delete_bucket(toBeDeletedBucket) #else: #print "Delete Bucket Operation Aborted!" return "Bucket deletion successful!" except: return "Failed to delete bucket."
def RegisterNewWorker(): thisDir = os.path.dirname(os.path.abspath(__file__)) infoPath = os.path.join(thisDir, glob.workerInfoFileName) if os.path.exists(infoPath): print('There is already a file of a registered worker at ' + infoPath) return else: conn, cursor = connection.connect() WorkerName = input('Please insert a name for this worker:\n') GPUModel = input('Please insert the GPU model:\n') GPUMemory_GB = int(input('Please insert the GPU memory, in GB:\n')) cursor.execute('select * from REGISTERED_WORKERS where WorkerName=\'' + WorkerName + '\'') query = cursor.fetchall() if len(query) > 0: print('There is already a registered worker with name ' + WorkerName) return else: cursor.execute( 'insert into REGISTERED_WORKERS (WorkerName, GPUName, GPUMemory) values (?, ?, ?)', (WorkerName, GPUModel, GPUMemory_GB)) conn.commit() # workerId = cursor.execute("select SCOPE_IDENTITY()").fetchone()[0] cursor.execute( 'select WorkerId from REGISTERED_WORKERS where WorkerName=\'' + WorkerName + '\'') workerId = cursor.fetchone()[0] with open(infoPath, 'w') as fid: fid.write(str(workerId))
def upsert_wp_table(pmPostId, pmValue, dateKey): mysqlConnect = connection.connect() conn = mysqlConnect.connect_mysql() cur = conn.cursor() updatePublish = "publish" updatePrivate = "private" sleep(2) now = datetime.datetime.now() with cur as cursor: try: # ----------------------------------- # 全て公開にする処理 # ----------------------------------- postsSql = "UPDATE wp_posts SET post_status = %s" cursor.execute(postsSql, (updatePublish)) # オートコミットじゃないので、明示的にコミットを書く必要がある conn.commit() except: dV.exception_error_log() finally: conn.close()
def connection(request): connection = con.connect() request.addfinalizer(con.disconnect) cursor = connection.cursor() con.drop_tables() cursor.execute("""create table test_type ( id serial primary key, "varchar" varchar(15), "varchar2" varchar(20), "varchar_not_null" varchar(20) not null default 'abc', "integer" integer, "boolean" boolean, "time" time, "date" date, "timestamp" timestamp, "json" json, "jsonb" jsonb, "text" text, "double" double precision, "real" real)""") connection.commit() mro.load_database(connection) return connection
def logout(): connection = connect() cursor = connection.cursor() logging_out = User.logout(cursor) if logging_out: connection.close() return redirect('/')
def test_validations(self): mydb = connection.connect() mycursor = mydb.cursor() del sys.path[:] sys.path.append( os.path.join(os.path.dirname(os.path.dirname(__file__)), 'files', 'ddl', self.tables + '.sql')) for file_path in sys.path: mycursor.execute("desc " + self.tables) data = mycursor.fetchall() b = [] for i in data: a = list(i) b.append( (str(a[0]) + " " + str(a[1]).replace("b'", "'")).upper().replace("'", "")) with open(file_path, 'r') as datas: lines = datas.readlines() line1 = "".join(lines) line1 = re.sub(' +', ' ', line1) line1 = line1.replace("\n", "").split(self.tables)[1].replace( "NOT NULL", "").replace("PRIMARY KEY", "").strip("(").strip(" (").replace( ");", "").split("FOREIGN KEY")[0].split(",") c = [] for i in line1: c.append(i.rstrip().lstrip()) if ("" in c): c.remove("") self.assertEqual( (collections.Counter(b) == collections.Counter(c)), True, msg=self.tables + " " + str(set(c) - set(b)))
def test_insertion(self): mydb = connection.connect() mycursor = mydb.cursor() schema_prod = devconfig.Dev("dev").connect_db() schema_test = devconfig.Dev("test").connect_db() mycursor.execute( "select column_name from information_schema.columns where table_name='" + self.tables + "' limit 1") data = mycursor.fetchone() data = str(data).split(",")[0].replace("(", "").replace("'", "") mycursor.execute( "select column_name from information_schema.columns where table_name='test_" + self.tables + "'limit 1") data1 = mycursor.fetchone() data1 = str(data1).split(",")[0].replace("(", "").replace("'", "") mycursor.execute("select * from " + schema_test[3] + ".test_" + self.tables + " where " + data1 + " not in (select " + data + " from " + schema_prod[3] + "." + self.tables + ")") comp = mycursor.fetchall() mycursor.execute("select * from " + schema_prod[3] + "." + self.tables + " where " + data + " not in (select " + data1 + " from " + schema_test[3] + ".test_" + self.tables + ")") comp1 = mycursor.fetchall() self.assertEqual(comp == [], True, msg=self.tables + " " + str(comp)) self.assertEqual(comp1 == [], True, msg=self.tables + " " + str(comp1))
def messages(): message = None connection = connect() cursor = connection.cursor() check_for_login = User.login_check(cursor) if check_for_login: inbox = Message.inbox(cursor, check_for_login[0]) sent = Message.sent(cursor, check_for_login[0]) users = User.get_all_users(cursor) if request.method == 'POST': new_message = Message(check_for_login[0], request.form['to_id'], request.form['message']) send_message = new_message.send_message(cursor) if send_message: message = "Message sent!" connection.close() return render_template('messages.html', check_for_login=check_for_login, users=users, message=message, inbox=inbox, sent=sent) connection.close() return redirect('/')
def connection(request): connection = con.connect() request.addfinalizer(con.disconnect) cursor = connection.cursor() con.drop_tables() cursor.execute("""create table table1 ( id serial unique, name varchar(20) not null, value varchar(20), primary key (id, name) );""") cursor.execute("""create table table2 ( id serial, name varchar(20) not null, table1_id integer, primary key (id), foreign key (table1_id) references table1(id) );""") cursor.execute("""create table table3 ( value varchar(20) not null );""") connection.commit() mro.load_database(connection) create_test_data(connection) return connection
def main(): exchange = connect() bank = Bank() helloResponse = sendHello(exchange) stocksState = AllStocks(helloResponse) bank.updateStateFromResponse(helloResponse) orderCounter = 1 while True: #sleep(0.1) read = read_from_exchange(exchange) #if 'symbol' in read and read['symbol'] == 'BOND': # print(read) if read['type'] == 'open': stocksState.openStocks(read) elif read['type'] == 'book': stocksState.updateStocks(read) elif read['type'] == 'trade': # SOMEONE ELSE TRADED DO SOMETHING #print("Trade has happened") some = 9 elif read['type'] == 'close': stocksState.closeStocks(read) elif read['type'] == 'ack': orderID = read['order_id'] if orderID in bank.getOrders(): order = bank.getOrders()[orderID] print("Success " + order.stock + " " + order.orderType + " " + str(order.price) + " " + str(order.quantity)) order.orderStatus = "success" elif read['type'] == 'error' or read['type'] == 'reject': print("Got an Error: " + read['error'], file=sys.stderr) OrderID = -1 if read['type'] == 'reject': orderID = read['order_id'] if orderID in bank.getOrders(): order = bank.getOrders()[orderID] print("here is why: " + order.stock) order.orderStatus = "cancel" sendCancel(exchange, orderID) elif read['type'] == 'out': orderID = read['order_id'] if orderID in bank.getOrders(): order = bank.getOrders()[orderID] if order.orderType == "SELL" and order.orderStatus == "cancel": bank.deltaQuantity(order.stock, order.quantity) bank.deleteOrder(read['order_id']) elif read['type'] == 'fill': orderID = read['order_id'] if orderID in bank.getOrders(): order = bank.getOrders()[orderID] print("FILLED " + order.stock + " " + order.orderType + " " + str(order.price) + " " + str(order.quantity)) order.orderStatus = "fill" bank.deltaState(read['dir'], read['price'], read['size'], read['symbol']) orderCounter = handleTrade(orderCounter, exchange, stocksState, bank)
def get_grades(user_name, pass_word): page = connect( user_name, pass_word) #connection class returns the HTML for the grades page grades = bs.BeautifulSoup( page.content, 'html5lib' ) #creating a new beautiful soup object which we can scrape for data weighted_classes = ['Digital Forensics', 'Computer Science III'] not_graded_classes = ['PSAT Team', 'Off-Period'] not_graded_num = 0 all_courses = [] all_grades = [] courses = {} #scraping all the course names within the HTML of the grades page for course in grades.find_all(attrs={'id': 'courseName'}): #check to not include repeat classes # if course.text not in all_courses: all_courses.append(course.text) #scraping all the grades for the respective classes for grade in grades.find_all(attrs={'id': 'average'}): #the grades are originally strings so we must convert them to floats (then round them for GPA calculation polcy) #if the string is empty the grade is defaulted to 0 try: all_grades.append(round(float(grade.text))) except: all_grades.append(0) #creating a dictionary to match the course names to their respective grades for ease in calculations for course in range(len(all_courses)): if all_courses.count( all_courses[course]) > 1 and all_grades[course] == 0: continue courses[all_courses[course]] = all_grades[course] weighted_total = 0 total_minus = 0 #manipulates grade values in order to calculate the GPA for course in courses: if course in not_graded_classes: not_graded_num += 1 continue if course in weighted_classes or 'AP' in course: weighted_total += 1 total_minus += get_minus(6, courses[course]) else: total_minus += get_minus(5, courses[course]) #GPA is calculated depending on the types of classes you take then subtracted by the total minus #the total raw "GPA" is then divided by the number of classes you take to finally get a weighted GPA. onLevel = 5 * (len(courses) - weighted_total - not_graded_num) gradeBoosted = 6 * weighted_total gpaSum = onLevel + gradeBoosted - total_minus gpa = gpaSum / (len(courses) - not_graded_num) gpa = round(gpa, 3) return courses, gpa
def update_author_by_id(first_name, last_name, id): sql = f""" UPDATE author SET first_name = '{first_name}', last_name='{last_name}' WHERE id = {id}; """ connection = connect() cursor = connection.cursor() cursor.execute(sql) connection.close()
def get_book_by_id(id): conn = connect() cursor = conn.cursor() sql = f"SELECT * FROM book where id={id};" cursor.execute(sql) data = cursor.fetchone() conn.close() return data
def get_authors(): conn = connect() cursor = conn.cursor() sql = "SELECT * FROM author;" cursor.execute(sql) data = cursor.fetchall() conn.close() return data
def connect(self, timeout=None): args = dict(user_at_host=self.name, host_key=self._host_key, keep_alive=self.keep_alive) if timeout: args['timeout'] = timeout self.ssh = connection.connect(**args) return self.ssh
def opt_connect(**kw): """ Connect to api if we havent passed a `conn` argument in. """ if 'conn' in kw: return kw['conn'] else: return connect(**kw)
def run(self): global closeConnection, App if self.host == None or self.port == None: dest = None else: dest = [self.host, self.port] conn.connect(dest) """SEND_DATA_CMD = "GET_DATA_STREAM" _, data = conn.sendMessage(SEND_DATA_CMD) if data == "ACK_GDS": while not closeConnection: SEND_ACK = "ACK" _, data = conn.sendMessage(SEND_ACK) plotData = data.split(",") plotData[0], plotData[1] = int(plotData[0]), int(plotData[1]) App.plotHandler(plotData)""" while not closeConnection: continue print "DISCONNECTING..." conn.closeConnection()
def query_num(num): result = "" import MySQLdb,connection db= connection.connect("rig") cur = db.cursor(MySQLdb.cursors.DictCursor) sql="SELECT `trainname` AS `Train Name`,`text` As `Description`,`sstatus` AS `Status`,`lstatus` AS `Status Description` FROM `train` WHERE `trainnum`="+"%d"%num #print sql cur.execute(sql) result=cur.fetchall() return result
def config_mongodb(host, port): """Config mongodb instance """ db = None try: db = connect(host, port) _LOGGER.info("config mongodb ok: %s, %s" % host % port) except Exception as ex: _LOGGER.error("config mongodbfailed: %s %s %s" % (ex, host, port)) return db
def update(identifiant, password, name, id_request, db_host, db_port): syslog.openlog() cycle = parsing.getCycleNumber() syslog.syslog("Update datas for %s (%s)" % (name, identifiant)) cookies = connection.connect(identifiant, password) connection.loadGalaxy(cookies, "Aon") datas = parsing.getAllDatas(cookies, name, id_request, db_host, db_port) database.insertAllDatas(datas, name, cycle, id_request, db_host, db_port) database.updateCycleNumber(name, cycle, db_host, db_port)
def connection(request): connection = con.connect() request.addfinalizer(con.disconnect) cursor = connection.cursor() con.drop_tables() cursor.execute("create table table1 (id serial primary key, column1 integer, column2 varchar(20), column3 integer)") cursor.execute("create table table2 (id serial primary key, column1 varchar(20), column2 integer, column3 varchar(20))") cursor.execute("insert into table1 (column1, column2, column3) values (%s,%s,%s)", (1,'Hellow World!', 2)) cursor.execute("insert into table1 (column1, column2, column3) values (%s,%s,%s)", (2,'Hellow World2!', 3)) cursor.execute("insert into table2 (column1, column2, column3) values (%s,%s,%s)", ('Hellow World3!', 4, 'Hellow World4!')) connection.commit() return connection
def connect(self, timeout=None, create_key=None, context='connect'): args = dict(user_at_host=self.name, host_key=self._host_key, keep_alive=self.keep_alive, _create_key=create_key) if context == 'reconnect': # The reason for the 'context' workaround is not very # clear from the technical side. # I'll get "[Errno 98] Address already in use" altough # there are no open tcp(ssh) connections. # When connecting without keepalive, host_key and _create_key # set, it will proceed. args = dict(user_at_host=self.name, _create_key=False, host_key=None) if timeout: args['timeout'] = timeout self.ssh = connection.connect(**args) return self.ssh
def connection(request): connection = con.connect() request.addfinalizer(con.disconnect) cursor = connection.cursor() con.drop_tables() cursor.execute("create table table1 (id serial primary key, created_date date not null default current_date, column1 integer default 1, column2 varchar(20), column3 integer)") cursor.execute("create table table2 (column1 varchar(20), column2 integer, column3 varchar(20))") cursor.execute("create table table3 (created_datetime timestamp not null default current_timestamp, created_time time not null default current_time, column1 varchar(20) default 'ABC DEF', column2 integer, column3 varchar(20), column4 jsonb)") cursor.execute("insert into table1 (column1, column2, column3) values (%s,%s,%s)", (1,'Hello World!', 2)) cursor.execute("insert into table1 (column1, column2, column3) values (%s,%s,%s)", (2,'Hello World2!', 3)) cursor.execute("insert into table2 values (%s,%s,%s)", ('Hello World3!', 4, 'Hello World4!')) connection.commit() return connection
def verify_outlets (self): #import dinamico pra evitar problema de referencia ciclica from connection import Connection, connect from object import Object #building an outlet test outlet = Object(self.x, self.y-30, "outlet") finished = False #tries to connect every single outlet to the outlet above n_outlets = -1 while not(finished): n_outlets = n_outlets+1 #if fails on connect, finishes the interaction finished = not (connect(self, 0, outlet, n_outlets)) outlet.delete() self.outlets = n_outlets return n_outlets
def query_name(name): result = "" import MySQLdb,connection db= connection.connect("rig") cur = db.cursor(MySQLdb.cursors.DictCursor) name=name.split() sql="SELECT `trainname` AS `Train Name`,`text` As `Description`,`sstatus` AS `Status`,`lstatus` AS `Status Description` FROM `train` WHERE (`text` LIKE '" for i in range(0,len(name)): if(i<len(name)-1): sql = sql+"%"+name[i] else: sql = sql+"%"+name[i]+"%');" #print sql cur.execute(sql) result=cur.fetchall() return result
def cmd_maki(main_window, argv): """ /maki [connect|shutdown] """ def usage(): print_notification("Usage: /maki [connect|disconnect|shutdown]") if len(argv) != 2: usage() return cmd = argv[1] if cmd == "connect": print_notification("Connection to maki...") if connection.connect(): print_notification("Connection to maki etablished.") else: print_notification("Connection to maki failed.") main_window.update_divider() elif cmd == "disconnect": if no_connection(): return connection.disconnect() print_notification("Disconnected from maki.") main_window.update_divider() elif cmd == "shutdown": if no_connection(): return connection.sushi.shutdown(config.get("chatting", "quit_message")) main_window.update_divider() else: usage() return
def whiteline(plant_clear): THRESHOLD= 80 FWD_FAST = "50" FWD_SLOW = "70" FWD_VSLOW = "90" FWD_FAST1 = "70" FWD_SLOW1 = "90" FWD_VSLOW1 = "110" STOP = "80" Loop_count = 2 i=0 sleep(0.3) ser = connection.connect() #connect to FB 6 movements.set_motor_mode(ser,1) #set motor mode to 1 sensors.on_white_line(ser) #on whiteline sensors sleep(0.2) colour_tracker = opencv_fun.ColourTracker(0) loop_value = True value = 0 while loop_value: #----for skiping detected plant location if any ---# i=i+1 if plant_clear and i==Loop_count: loop_value=False else: loop_value = True #sleep(0.1) #---------------------------------------------------# Whiteline = sensors.sense_all_white_line(ser) (plant,pos) = plantFound(colour_tracker) #get plant status #print Whiteline if plant == 2 or plant_clear: if((Whiteline[3]<=THRESHOLD) or (Whiteline[4]<=THRESHOLD)) : if value == 1: #to avoid repeating the same commend print "Same" else: Left_Velocity = FWD_FAST Right_Velocity = FWD_FAST movements.move_bot(ser,Left_Velocity,Right_Velocity) print "Go Stright[3,4]" value=1 elif((Whiteline[5]< THRESHOLD) or (Whiteline[6] < THRESHOLD)): if value == 2: #to avoid repeating the same commend print "Same" else: Left_Velocity = FWD_FAST Right_Velocity = FWD_VSLOW movements.move_bot(ser,Left_Velocity,Right_Velocity) print "Turn Right" value = 2 elif(( Whiteline[2] < THRESHOLD) or (Whiteline[1] < THRESHOLD)): if value == 3: #to avoid repeating the same commend print "Same" else: Left_Velocity = FWD_VSLOW Right_Velocity = FWD_FAST movements.move_bot(ser,Left_Velocity,Right_Velocity) print "Turn LEFT" value = 3 else: if value == 4: #to avoid repeating the same commend print "Same" else: Left_Velocity = STOP Right_Velocity = STOP movements.stop_here(ser) value = 4 Whiteline = sensors.sense_all_white_line(ser) (plant,pos) = plantFound(colour_tracker) #get plant status and possition if plant == 1 and not plant_clear: movements.stop_here(ser) Left_Velocity = FWD_VSLOW Right_Velocity = FWD_VSLOW #sleep(1) (plant,pos) = plantFound(colour_tracker) print "Plant red d0t %s" %plant if pos < P_W_L: print "plant < P_W_L: Move <-" movements.move_bot(ser,Left_Velocity,Right_Velocity) #sleep(0.5) movements.stop_here(ser) elif pos >P_W_R: print "plant < P_W_L: Move ->" movements.move_bot_back(ser,Left_Velocity,Right_Velocity) #sleep(0.5) movements.stop_here(ser) movements.clear_encode_counter(ser) #(plant,pos) = plantFound(colour_tracker) #sleep(1) if (plant != 0): plant = find_plant(ser,colour_tracker,plant,pos) value =5 '''#sleep(0.3) (plant,pos) = plantFound(colour_tracker) #get plant status if((Whiteline[3]<=THRESHOLD) or (Whiteline[4]<=THRESHOLD)) : Left_Velocity = FWD_FAST1 Right_Velocity = FWD_FAST1 movements.move_bot(ser,Left_Velocity,Right_Velocity) print "Go Stright[3,4]" elif((Whiteline[5]< THRESHOLD) or (Whiteline[6] < THRESHOLD)): Left_Velocity = FWD_FAST1 Right_Velocity = FWD_VSLOW1 movements.move_bot(ser,Left_Velocity,Right_Velocity) print "Turn Right" elif(( Whiteline[2] < THRESHOLD) or (Whiteline[1] < THRESHOLD)): Left_Velocity = FWD_VSLOW1 Right_Velocity = FWD_FAST1 movements.move_bot(ser,Left_Velocity,Right_Velocity) print "Turn LEFT" else: Left_Velocity = STOP Right_Velocity = STOP movements.stop_here(ser) print "Plant red det %s" %plant movements.stop_here(ser) #sleep(0.3) (plant,pos) = plantFound(colour_tracker)''' #Whiteline = sensors.sense_all_white_line(ser) #(plant,pos) = plantFound(colour_tracker) #get plant status if plant == 0 and not plant_clear: print "++++plant+++" movements.stop_here(ser) movements.set_motor_mode(ser,2) movements.set_position_one_b(ser) value =6 #plant == 1 #find_plant(ser,colour_tracker) return print "-----------------------------"
def __init__(self, config): self.conn = None self.images = None self.history = None self.conn = connection.connect(config)
__author__ = 'Milan' import odbchelper import connection import sys #print(odbchelper.buildConnectionString.__doc__) #print(sys.path) (Monday, Tuesday, Wednesday, Thursday, Friday, Saturday, Sunday) = range(7) print("Danas je " + str(Tuesday)) connection.connect()
prompt = " [Y/n] " elif default == "no": prompt = " [y/N] " else: raise ValueError("invalid default answer: '%s'" % default) while True: sys.stdout.write(question + prompt) choice = raw_input().lower() if default is not None and choice == '': return valid[default] elif choice in valid: return valid[choice] else: sys.stdout.write("Please respond with 'yes' or 'no' (or 'y' or 'n').\n") # Usage example if __name__ == '__main__': args = parse_args() from connection import setup, connect connect(*setup(args.hosts, args.keyspace)) from hierachy import * if query_yes_no('Are you sure to sync ?', default='no'): create(args.keyspace) print "Done...." else: print "No is also a good thing. Bye!"
import json import MySQLdb,connection db= connection.connect("rig") cursor=db.cursor(MySQLdb.cursors.DictCursor) import re,time import string import datetime def removePandS(content): re.sub(r'\b\w{,2}\b',' ', content) re.sub(r'\b\w{15,}\b',' ', content) regex=re.compile('[%s]'% re.escape(string.punctuation)) content=re.sub(regex," ",content) stop_words=set(["weather","a","a's","belongs","able","about","above","according","accordingly","across","actually","after","afterwards","again","against","ain't","all","allow","allows","almost","alone","along","already","also","although","always","am","among","amongst","an","and","another","any","anybody","anyhow","anyone","anything","anyway","anyways","anywhere","apart","appear","appreciate","appropriate","are","aren't","around","as","aside","ask","asking","associated","at","available","away","awfully","be","became","because","become","becomes","becoming","been","before","beforehand","behind","being","believe","below","beside","besides","best","better","between","beyond","both","brief","but","by","c'mon","c's","came","can","can't","cannot","cant","cause","causes","certain","certainly","changes","clearly","co","com","come","comes","concerning","consequently","consider","considering","contain","containing","contains","corresponding","could","couldn't","course","currently","definitely","described","despite","did","didn't","different","do","does","doesn't","doing","don't","done","down","downwards","during","each","edu","eg","eight","either","else","elsewhere","enough","entirely","especially","et","etc","even","ever","every","everybody","everyone","everything","everywhere","ex","exactly","example","except","far","few","fifth","first","five","followed","following","follows","for","former","formerly","forth","four","from","further","furthermore","get","gets","getting","given","gives","go","goes","going","gone","got","gotten","greetings","had","hadn't","happens","hardly","has","hasn't","have","haven't","having","he","he's","hello","help","hence","her","here","here's","hereafter","hereby","herein","hereupon","hers","herself","hi","him","himself","his","hither","hopefully","how","howbeit","however","i'd","i'll","i'm","i've","ie","if","ignored","immediate","in","inasmuch","indeed","indicate","indicated","indicates","inner","insofar","instead","into","inward","is","isn't","it","it'd","it'll","it's","its","itself","just","keep","keeps","kept","know","known","knows","last","lately","later","latter","latterly","least","less","lest","let","let's","like","liked","likely","little","look","looking","looks","ltd","mainly","many","may","maybe","me","mean","meanwhile","merely","might","more","moreover","most","mostly","much","must","my","myself","name","namely","nd","near","nearly","necessary","need","needs","neither","never","nevertheless","new","next","nine","no","nobody","non","none","noone","nor","normally","not","nothing","novel","now","nowhere","obviously","of","off","often","oh","ok","okay","old","on","once","one","ones","only","onto","or","other","others","otherwise","ought","our","ours","ourselves","out","outside","over","overall","own","particular","particularly","per","perhaps","placed","please","plus","possible","presumably","probably","provides","que","quite","qv","rather","rd","re","really","reasonably","regarding","regardless","regards","relatively","respectively","right","said","same","saw","say","saying","says","second","secondly","see","seeing","seem","seemed","seeming","seems","seen","self","selves","sensible","sent","serious","seriously","seven","several","shall","she","should","shouldn't","since","six","so","some","somebody","somehow","someone","something","sometime","sometimes","somewhat","somewhere","soon","sorry","specified","specify","specifying","still","sub","such","sup","sure","t's","take","taken","tell","tends","th","than","thank","thanks","thanx","that","that's","thats","the","their","theirs","them","themselves","then","thence","there","there's","thereafter","thereby","therefore","therein","theres","thereupon","these","they","they'd","they'll","they're","they've","think","third","this","thorough","thoroughly","those","though","three","through","throughout","thru","thus","to","together","too","took","toward","towards","tried","tries","truly","try","trying","twice","two","un","under","unfortunately","unless","unlikely","until","unto","up","upon","us","use","used","useful","uses","using","usually","value","various","very","via","viz","vs","want","wants","was","wasn't","way","we","we'd","we'll","we're","we've","welcome","well","went","were","weren't","what","what's","whatever","when","whence","whenever","where","where's","whereafter","whereas","whereby","wherein","whereupon","wherever","whether","which","while","whither","who","who's","whoever","whole","whom","whose","why","will","willing","wish","with","within","without","won't","would","wouldn't","yes","yet","you","you'd","you'll","you're","you've","your","yours","yourself","yourselves"]) a=" ".join(word for word in content.split() if word not in stop_words) return a.split() def hourly(city,time): if len(str(time))==1: time="0"+str(time) sql = "SELECT * FROM weatherhourly WHERE city='%s' and time='%s'"%(city,time) cursor.execute(sql) data=cursor.fetchone() return data def general(city): city="%"+city+"%"
import connection #Basic configuration infomration server = "irc.rizon.net" channel = "#etc" botnick = "tacobot" port = 6667 # Create instance of the irc bot ircbot = connection.connect(server, channel, botnick, port)
#for Testing purpose import connection ser = connection.connect() print "-- Acceptes only two argument commend --" try: while True: output =[] print "NEX " input_var = raw_input("Enter Command : ") input_var = input_var.split() inpt= "NEX" + ('\\x' + input_var[0]).decode('string_escape') +('\\x' + input_var[1]) .decode('string_escape') print inpt output = ser.write(inpt) print output except: connection.disconnect()
''' #import Cloud as cloud import SoftLayer from pprint import pprint as pp import os import controller import connection import time clear = lambda: os.system('cls') if __name__ == '__main__': clear() try: db = connection.connect() db.start_connection() while (True): db.refresh() control = controller.controller(db) sleep(60) except KeyboardInterrupt: #clear() db.close_connection() print("The application is Exiting") raise SystemExit
#!usr/bin/python __madeby__="Sarkis Derderian" #import library for connect to quickbook but note you shoud have admin access import connection path="C:\Documents and Settings\All Users\Documents\Intuit\QuickBooks\Company Files\demo.qbw" name="demo" #Connect to Quickbooks s,t=connection.connect(name,path) # Send query and receive responsec qbxml_query = """ <?qbxml version="4.0"?> <QBXML> <QBXMLMsgsRq onError="continueOnError"> <AccountQueryRq> <IncludeRetElement>Name</IncludeRetElement> <IncludeRetElement>Balance</IncludeRetElement> </AccountQueryRq> </QBXMLMsgsRq> </QBXML> """ qbxml_response = s.ProcessRequest(t, qbxml_query) #Disconnect connection connection.disconnect(s) # Parse the response by Element Tree
import azure.mgmt.resource import connection from azure.mgmt.common import SubscriptionCloudCredentials deployment_name = 'TestDeployPython' group_name = 'arm-deployment' auth_token = connection.connect() subscription_id = connection.subscription() creds = SubscriptionCloudCredentials(subscription_id, auth_token) resource_client = azure.mgmt.resource.ResourceManagementClient(creds) template_link = azure.mgmt.resource.resourcemanagement.ParametersLink( uri='https://raw.githubusercontent.com/Xplendit/Stage/master/PythonDeployment/armTemplate1.json' ) parameters_link = azure.mgmt.resource.resourcemanagement.TemplateLink( uri='https://raw.githubusercontent.com/Xplendit/Stage/master/PythonDeployment/Parameters.json' ) properties = azure.mgmt.resource.resourcemanagement.DeploymentProperties( mode="incremental", template_link=template_link, parameters_link=parameters_link ) deploy_parameter = azure.mgmt.resource.Deployment() deploy_parameter.properties=properties result = resource_client.deployments.create_or_update( resource_group_name=group_name,
__author__ = 'marin' import connection as conn import parser import observers import models par = parser.Parser() par.add_observer(observers.Poster(parser.JSONParser(), 'http://127.0.0.1:5000/save_count')) par.add_observer(observers.Storer(models.DataModel)) par.add_observer(observers.Printer()) socket = conn.connect() conn.receive(socket, par, 60)
def connect(self): self.ssh = connection.connect(user_at_host=self.name, host_key=self._host_key, keep_alive=self.keep_alive) return self.ssh
'''Program to follow the black line''' from time import sleep import connection import sensors import movements THRESHOLD=100 #threshold level for black line sleep(0.3) ser = connection.connect() #connect to Firebird VI via serial port movements.set_motor_mode(ser,1) #Set motor to mode 1 i.e. constant speed mode sensors.on_white_line(ser) #on whiteline sensors sleep(2) while True: sleep(0.1) Whiteline = sensors.sense_all_white_line(ser) #get whiteline data print Whiteline #robot in black line if(Whiteline[3]>=THRESHOLD and Whiteline[4]>=THRESHOLD) : Left_Velocity = "50"; Right_Velocity = "50"; movements.move_bot(ser,Left_Velocity,Right_Velocity); print "Go Stright" elif(Whiteline[3]>=THRESHOLD): Left_Velocity = "50"; Right_Velocity = "5f"; movements.move_bot(ser,Left_Velocity,Right_Velocity); print "Turn Right"