def get_connection_to_dbs(source_config): ''' Get the connection to the source and destination databases :param: configs to connect to source DB ''' from_db_conn = get_connection(source_config['host'], source_config['dbname'], source_config['user'], source_config['pass']) config = get_config() to_db_conn = get_connection(config['host'], config['dbname'], config['user'], config['pass']) return from_db_conn, to_db_conn
def _download_shards(self, p2pfile): try: for shard in p2pfile.shard_ids: if shard in self.shards: continue self._log("Downloading shard: %s" % shard) sh = cPickle.loads(utils.get_connection(SERVER_ADDRESS).serve_shard(shard)) sh.save(SHARD_DIR) self.shards.append(sh.shard_id) p2pfile.peer_ids[self.peer_id] = LOCAL_ADDRESS updated_file = cPickle.loads(utils.get_connection(SERVER_ADDRESS).update_p2pfile(cPickle.dumps(p2pfile), self.peer_id)) if updated_file not in self.p2pfiles: self.p2pfiles.append(updated_file) except: pass
def gather_student_history(testers): cnx = utils.get_connection("advisor", "passadvise", "localhost", "ADVISING") cursor = cnx.cursor(buffered=True) hists = [] sql = "select GRADE,CLASS_NAME,SEMESTER_TAKEN from class_list_tester where STUDENT_ID = %s" min = 1 max = 5001 if testers: min = 5001 max = 5501 for x in range(min, max): student_hist = [] key = (str(x), ) cursor.execute(sql, key) results = cursor.fetchall() for result in results: class_pair = [result[1], result[0], result[2]] student_hist.append(class_pair) hists.append(student_hist) cursor.close() cnx.close() return hists
def package_student_data(**kwargs): print("hi") cnx = utils.get_connection(user="******", password="******", host="localhost", database="SFSU_STUDENT_HISTORY") students = pull_student_data(cnx, **kwargs) packaged_data = pull_course_data(cnx, students) cnx.close() return packaged_data
def get_page_data(): """ Get the page data from the cache """ conn = get_connection() data = conn.get('PAGE_DATA') if data: return pickle.loads(data) return {}
def write_table(db_details, table_name, values, cols, batch_size=1000, logs=True): recs = [] count = 1 connection = get_connection(db_details["target"]) cursor = connection.cursor() query = build_insert_query(table_name, cols) for rec in values: recs.append(rec) if count % batch_size == 0: cursor.executemany(query, recs) connection.commit() if logs: print(f"{count} rows inserted") recs = [] count += 1 cursor.executemany(query, recs) connection.commit() if logs: print(f"{count} rows inserted")
def setUp(self): self.test_disk = os.path.join(test_dir, "test.vmdk") self.block_size = 1024 self.test_disk_blocks = 256000 # 250MB # open a local disk self.disk = get_connection(block_size=self.block_size)
def login(): result = "" if (request.form and 'user' in request.form and 'password' in request.form): username = request.form['user'] password = getMD5(request.form['password']) connection = get_connection() cursor = connection.cursor() sql = 'SELECT userid FROM users WHERE ' + \ 'userid = %s AND ' + \ 'password = %s' cursor.execute(sql, ( username, password, )) userid = cursor.fetchone() connection.close() if userid: token = generateToken(userid["userid"]) result = '"{}"'.format(token) else: result = '""' else: return make_response(jsonify({'error': 'Bad Request'}), 400) return make_response(result, 200)
def process_results(results): """ process the results and gather data """ conn = get_connection() new_results = [] for d in results: mirror = d.get('mirror') if mirror in IGNORE_MIRRORS: # skip mirrors we want to ignore. continue status = d.get('status') location = get_location_for_mirror(mirror) d['location'] = location_name(location) if status != 'Unavailable': resp_time = d.get('response_time') age = get_total_seconds(d.get('time_diff')) conn.rpush(cache_key('RESPTIME', mirror), resp_time ) conn.rpush(cache_key('AGE', mirror), age) resp_list = conn.lrange(cache_key('RESPTIME', mirror), -60, -1) age_list = conn.lrange(cache_key('AGE', mirror), -60, -1) d['num_packages'] = find_number_of_packages(mirror) d['resp_list'] = ",".join(resp_list) d['age_list'] = ",".join(age_list) new_results.append(d) return new_results
def setup_connection(self, args=None): try: self.connection = get_connection(args) except GenestackVersionException as e: sys.stderr.write(str(e)) sys.stderr.write('\n') exit(13)
def get_json_data(): """ Get the json data from the cache """ conn = get_connection() data = conn.get('JSON_DATA') if not data: return {} return data
def analyze_data(): urls = {} entries = [] # datafile = '/tmp/' + 'data_' + time.strftime("%Y-%m-%d") + ".tsv" datafile = 'data.tsv' url_pattern = re.compile( 'http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+') with open(datafile) as csvfile: csvreader = csv.reader(csvfile, delimiter='\t', quotechar='|') for row in csvreader: if url_pattern.search(row[4]): # print('\t'.join(pattern_url.findall(row[4]))) for url in url_pattern.findall(row[4]): if url in urls: urls[url]['rank'] = urls[url]['rank'] + 1 else: urls[url] = {} urls[url]['rank'] = 1 urls[url]['details'] = get_url_details(url) # print all the urls conn = utils.get_connection() curr = conn.cursor() for url in urls: details = urls[url]['details'] if details: entries.append((time.strftime("%Y-%m-%d"), details['base'], details[ 'type'], details['title'], details['description'], urls[url]['rank'])) print(details['base'] + "\t" + details['type'] + "\t" + details['title'] + "\t" + details['description']) # bulk insert curr.executemany( "insert into entries(date, url, type, title, description, rank) values(?, ?,?,?,?,?)", entries) conn.commit()
def process_results(results): """ process the results and gather data """ conn = get_connection() new_results = [] for d in results: mirror = d.get('mirror') if mirror in IGNORE_MIRRORS: # skip mirrors we want to ignore. continue status = d.get('status') location = get_location_for_mirror(mirror) d['location'] = location_name(location) if status != 'Unavailable': resp_time = d.get('response_time') age = get_total_seconds(d.get('time_diff')) conn.rpush(cache_key('RESPTIME', mirror), resp_time) conn.rpush(cache_key('AGE', mirror), age) resp_list = conn.lrange(cache_key('RESPTIME', mirror), -60, -1) age_list = conn.lrange(cache_key('AGE', mirror), -60, -1) d['num_packages'] = find_number_of_packages(mirror) d['resp_list'] = ",".join(resp_list) d['age_list'] = ",".join(age_list) new_results.append(d) return new_results
def iterative_impact_prereq( students): #need to double chack courses are labled correctly. course_cat = import_tools.get_class_prereqs( ) # TODO Fix thhese two outputs to be nmore readable. make the header go first, then sort results, then print. result_set = [[ "course a", "op", "course_b", "t", "p", "pos_n", "pos_min_max", "pos_mean", "pos_vari", "neg_n", "neg_min_max", "neg_mean", "neg_vari" ]] for crs in course_cat: #course cat has crs -> prereq, prereq, preeq test_courses = course_cat[crs] results = [] for test in test_courses: #prereqs compare_data = grade_compare_positive_query( crs, test, ">", cnx=utils.get_connection()) try: result = calc_pval_tval(compare_data[0], compare_data[1], filter=students, starter=[crs], test=test) except ValueError: result = [test, crs, "NO RESULTS", 999] results.append(result) #print("********") #print(results) results.sort(key=lambda x: x[0] + str(x[3])) result_set.extend(results) return result_set
def populate_tester_db(): starting_id = "1" insert_db = [] cnx = utils.get_connection("advisor", "passadvise", "localhost", "ADVISING") cursor = cnx.cursor(buffered=True) for x in range(1, 145): sql = "select STUDENT_FIRST_NAME, STUDENT_LAST_NAME, STUDENT_EMAIL from student_list_tester where STUDENT_ID = %s" key = (str(x), ) cursor.execute(sql, key) results = cursor.fetchone() first_name = results[0] last_name = results[1] email = results[2] entry = [first_name, last_name, email, x] insert_db.append(entry) for db_entry in insert_db: sql = "insert into checkpoints (STUDENT_ID, STUDENT_FIRST_NAME, STUDENT_LAST_NAME, STUDENT_EMAIL) values (%s, %s, %s, %s)" key = (db_entry[3], db_entry[0], db_entry[1], db_entry[2]) cursor.execute(sql, key) cnx.commit() cursor.close() cnx.close()
def get_asset_metadata(id): conn = get_connection(CONN_NAME, PATH) cur = conn.cursor() cur.execute("SELECT symb, name FROM exprm.assets WHERE asset_id = " + str(id)) conn.commit() for row in cur: conn.close() return row[1], row[0]
def get_device_status(self): try: return self._device_status except: self._device_status = utils.get_connection(**self.options).scheduler.get_device_status( self.board["device_type"] + "_01" ) return self._device_status
def get_posts(post_id): connection = utils.get_connection() cursor = connection.cursor() SQL_STRING = 'SELECT * FROM posts WHERE post_id = ' + str(post_id) cursor.execute(SQL_STRING) post = cursor.fetchall() connection.close() return jsonify(post)
def _reg_with_server(self): self._log("Register with server") client_id, server_ciphered_key = cPickle.loads(utils.get_connection(SERVER_ADDRESS).register(client_key.publickey().exportKey())) self.peer_id = client_id self.server_ciphered_key = server_ciphered_key with open(SERVER_PUBLIC_KEY_FILE, 'r') as fp: self.server_key = RSA.importKey(fp.read())
def get_posts(post_id): connection = utils.get_connection() cursor = connection.cursor() # Create our prepared statement SQL_STRING = 'SELECT * FROM posts WHERE post_id = %s' cursor.execute(SQL_STRING, (post_id, )) post = cursor.fetchall() connection.close() return jsonify(post)
def main_generate_student_data(name_path, core_path, elective_path): cnx = utils.get_connection("advisor", "passadvise", "localhost", "ADVISING") name_data = utils.list_from_file(name_path, "\n", ",", False) elective_data = utils.list_from_file(elective_path, "\n", ",", False) core_data = utils.list_from_file(core_path, "\n", ",", False) students = generate_student(name_data, elective_data[:], core_data[:]) persist_students(students, cnx) cnx.close()
def _process_file_info(self, p2pfile, peer_id): try: newer_shards = list(set(p2pfile.shard_ids) - set(self.shards)) peer_con = utils.get_connection(self.current_peers[peer_id]) for shard_id in newer_shards: shard = peer_con.serve_shard(shard_id) shard = cPickle.loads(shard) if hashlib.md5(shard.shard).hexdigest() == shard.checksum: self._log("Downloaded shard: %s, checksum: %s" % (shard.shard_id, shard.checksum)) shard.save(SHARD_DIR) self.shards.append(shard.shard_id) p2pfile.peer_ids[self.peer_id] = LOCAL_ADDRESS updated_file = cPickle.loads(utils.get_connection(SERVER_ADDRESS).update_p2pfile(cPickle.dumps(p2pfile), self.peer_id)) if p2pfile not in self.p2pfiles: self.p2pfiles.append(p2pfile) else: self.p2pfiles[self.p2pfiles.index(p2pfile)] = p2pfile except: pass
def process_db(db): _, SM1 = get_connection(db) s1 = SM1() db2 = 'r_{}'.format(db) _, SM2 = get_connection(db2) s2 = SM2() run(('python', 'create_db.py', db2, db2)) logger.info('Database created') tweets_db = [t[0] for t in s1.query(Tweet.id).all()] tweets_id_chunks = list(divide_chunks(tweets_db, 100)) logger.info('Got the tweets from the database') for i, til in enumerate(tweets_id_chunks): logger.info('Working on tweets_id_chunk {}'.format(i)) get_statuses(s2, til) notify('Database {}'.format(db), 'It is done')
def get_asset_quotes(id): conn = get_connection(CONN_NAME, PATH) cur = conn.cursor() cur.execute("SELECT dayref, adj_close, vol FROM exprm.hist_quotes WHERE asset_id = " + str(id)) conn.commit() res = {} for row in cur: res[row[0]] = {'AdjClose': row[1], 'Vol': row[2]} conn.close() return res
def _get_file_from_server(self, filename): self._log("New p2pfile on server: %s" % filename) p2pfile = cPickle.loads(utils.get_connection(SERVER_ADDRESS).get_p2pfile(filename)) for peer_id in p2pfile.peer_ids.keys(): if peer_id not in self.current_peers and peer_id != self.peer_id: self._log("Found new peer: %s" % peer_id) if p2pfile.peer_ids[peer_id] == SERVER_ADDRESS: self._download_shards(p2pfile) else: self._auth_peer(peer_id, p2pfile.peer_ids[peer_id])
def _peer_contact_thread(self): # contact each peer after PEER_CONTACT_INTERVAL while True: try: for peer in self.current_peers.keys(): self._log("Ping peer: %s" % str(self.current_peers[peer])) peer_con = utils.get_connection(self.current_peers[peer]) peer_con.update_file_information(cPickle.dumps(self.p2pfiles), self.peer_id) except Exception as err: self._log(err) time.sleep(PEER_CONTACT_INTERVAL)
def update_student_cs_gpa(gpa_dict): # TODO part of init cnx = utils.get_connection() cursor = cnx.cursor(buffered=True) sql = "update student_data set final_cs_gpa = %s where student_id = %s" for gpa in gpa_dict: key = (gpa_dict[gpa], gpa) cursor.execute(sql, key) cnx.commit() cursor.close() cnx.close()
def update_student_serious(change_list): cnx = utils.get_connection() cursor = cnx.cursor(buffered=True) sql = "update student_data set serious_student = %s where student_id = %s" for change in change_list: key = (True, change) cursor.execute(sql, key) cnx.commit() cursor.close() cnx.close() return
def update_course_loads(course_ids): # TODO part of init cnx = utils.get_connection() cursor = cnx.cursor(buffered=True) sql = "update course_data set ge_load = %s, tech_load = %s where ref_id = %s" for sets in course_ids: set_ = course_ids[sets] key = (set_[0], set_[1], sets) cursor.execute(sql, key) cursor.close() cnx.commit() cnx.close()
def _auth_peer(self, peer_id, address): self._log("Sending auth request to peer: %s at %s" % (peer_id, str(address))) try: peer_con = utils.get_connection(address) peer_cipher_key = peer_con.handshake(self.peer_id, cPickle.dumps(self.server_ciphered_key), LOCAL_ADDRESS) if peer_cipher_key: peer_cipher_key = cPickle.loads(peer_cipher_key) if self.server_key.verify(peer_id, peer_cipher_key): self._log("Authenticated peer: %s" % peer_id) self.current_peers[peer_id] = address except Exception as err: self._log("Cannot authenticate peer")
def get_grouping_types(groupings): cnx = utils.get_connection() cursor = cnx.cursor(buffered=True) combos = [] for grouping in groupings: if grouping == "admin_descript": sql = "select distinct(admin_descript) from student_data" elif grouping == "resident_status": sql = "select distinct(resident_status) from student_data" elif grouping == "ethnicity": sql = "select distinct(ethnicity) from student_data" elif grouping == "type_descript": sql = "select distinct(type_descript) from student_data" elif grouping == "type_descript_summary": sql = "select distinct(type_descript_summary) from student_data" elif grouping == "sex": sql = "select distinct(sex) from student_data" elif grouping == "prep_assess": sql = "select distinct(prep_assess) from student_data" elif grouping == "prep_assess_summary": sql = "select distinct(prep_assess_summary) from student_data" elif grouping == "serious": sql = "select distinct(serious_student) from student_data" elif grouping == "dropout_sem": sql = "select distinct(dropout_semester) from student_data" elif grouping == "first_sem": sql = "select distinct(first_semester) from student_data" elif grouping == "entry_major": sql = "select distinct(entry_major) from student_data" elif grouping == "final_major": sql = "select distinct(final_major) from student_data" elif grouping == "entry_standing": sql = "select distinct(entry_standing) from student_data" elif grouping == "status": sql = "select distinct(status) from student_data" elif grouping == "global_status": sql = "select distinct(global_status) from student_data" else: print("invalid type") print(grouping) raise TypeError cursor.execute(sql) results = cursor.fetchall() temp = [] for result in results: temp.append(result[0]) combos.append(temp) cursor.close() cnx.close() return list(product(*combos))
def set_shell_user(self, args): """ Set the connection for shell mode. :param args: script arguments :type args: argparse.Namespace """ # set user for shell self.connection = get_connection(args) email = self.connection.whoami() self.prompt = '%s> ' % email self.intro = self.INTRO if self.INTRO else "Hello, %s!" % email
def load_table(db_details, data, column_names, table_name): TARGET_DB = db_details['TARGET_DB'] connection = get_connection(db_type=TARGET_DB['DB_TYPE'], db_host=TARGET_DB['DB_HOST'], db_name=TARGET_DB['DB_NAME'], db_user=TARGET_DB['DB_USER'], db_pass=TARGET_DB['DB_PASS']) cursor = connection.cursor() query = build_insert_query(table_name, column_names) insert_data(connection, cursor, query, data) connection.close()
def search(): q = request.args.get('q', '') if q == '': return redirect(url_for('home')) db = utils.get_connection() hits = db.command("text", "subtitles", search=q)['results'] results = [{ "score": hit['score'], "subtitle": hit['obj'], "timestamp": max(0, int(hit['obj']['from']) - 10), # rewind about 10 seconds "episode": db.files.find_one({'_id': hit['obj']['file']}), } for hit in hits] return render_template('search_results.html', results=results, search_term=q)
def get_chart(city_id, age_id): conn = get_connection(CONN_NAME, PATH) cur = conn.cursor() cur.execute("SELECT * FROM EXPM_MCCRATE WHERE AGE = " + str(age_id) + "AND REGION_ = " + str(city_id) + " ORDER BY 4 asc") conn.commit() mccrate = [] mccname = [] for row in cur: mccrate.append(row[3]) mccname.append(row[0]) # res[row[2]] = row[3] conn.close() return mccrate, mccname
def update_type_status(labels): # TODO part of init sql = "update student_data set type_descript = %s where student_id = %s" cnx = utils.get_connection() cursor = cnx.cursor(buffered=True) for label in labels: students = labels[label] for student in students: key = (label, student.id_num) cursor.execute(sql, key) cnx.commit() cursor.close() cnx.close()
def update_student_prep(change_list): # TODO make part of init cnx = utils.get_connection() cursor = cnx.cursor(buffered=True) sql = "update student_data set prep_assess = %s where student_id = %s" for change in change_list: print(change.prep_assess) key = (change.prep_assess, change.id_num) cursor.execute(sql, key) cnx.commit() cursor.close() cnx.close() return
def profile(): loggedInUser = getLoggedInUser(request) if loggedInUser == "": return make_response(jsonify({}, 401)) sql = "SELECT * FROM users WHERE userid = %s" connection = get_connection() cursor = connection.cursor() cursor.execute(sql, (loggedInUser, )) user = cursor.fetchone() connection.close() return make_response(jsonify(user), 200)
async def download_episode(session: ClientSession, name: str, link: str, path: str, total_bar: tqdm, pool: asyncio.Queue): """Download the episode to local storage""" file_target = f'{path}/{name}.mp4' try: async with get_connection( pool): # Limit ourself to max concurrent downloads if os.path.isfile(file_target): raise FileExistsError(f'{name} already exists in the folder') req_method = session.post if Servers.MP4UPLOAD in link else session.get async with req_method(link) as resp: if resp.status != 200: raise RuntimeError( f'Got a bad response from the server for {name}: {resp.status}' ) file_size = int(resp.headers.get('content-length')) with tqdm(desc=name, total=file_size, unit='B', unit_scale=True, unit_divisor=1024, leave=False, ncols=BAR_WIDTH) as progress_bar: async with aiofiles.open(f'{path}/{name}', mode='wb') as file: async for chunk, _ in ChunkTupleAsyncStreamIterator( resp.content): await file.write(chunk) progress_bar.update(len(chunk)) # Mark success and wait a bit before removing the bar progress_bar.set_postfix_str(green('✔️')) await asyncio.sleep(5) except Exception as e: tqdm.write(red(f'Failed to download {name} : {e} ❌')) if DEBUG: tqdm.write( red(''.join( traceback.format_exception(None, e, e.__traceback__)))) try: os.remove(f'{path}/{name}') except FileNotFoundError: pass return False finally: total_bar.update(1) return True
def build_grade_comp(compare_type): comp_dict = {} course_cat = import_tools.get_course_names_by_type() core_courses = course_cat['core'] compare_courses = course_cat['core'] compare_courses.extend(course_cat['elective']) compare_courses.extend( ["MATH228", "MATH199", "MATH109", "MATH400", "MATH338", "MATH245"]) #compare_courses.extend(course_cat['ge']) ran_courses = [] for x in range(0, len(core_courses)): print("build " + str(x + 1) + " of " + str(len(core_courses))) course_b = core_courses[x] for course_a in compare_courses: ran_courses.append([course_a, course_b]) #print("A : " + course_a + " B :" + course_b) if course_a == course_b or course_a + "_" + course_b in comp_dict: continue if compare_type == 'equiv': compare_data = grade_compare_equiv(course_a, course_b, cnx=utils.get_connection()) comp_dict[course_a + "_" + course_b] = [[ compare_data[0], compare_data[1], course_a, course_b ], [compare_data[2], compare_data[3], course_b, course_a]] else: compare_data = grade_compare_positive_query( course_a, course_b, cnx=utils.get_connection( )) # testing effect of taking course a before b and not. comp_dict[course_a + "_" + course_b] = [[ compare_data[0], compare_data[1], course_a, course_b ]] # print(results) #for crs in ran_courses: # print(crs) return comp_dict
def home(): loggedInUser = getLoggedInUser(request) if loggedInUser == "": return make_response(jsonify({}, 401)) sql = "SELECT * FROM messages WHERE userid = %s " + \ "ORDER BY sent DESC" connection = get_connection() cursor = connection.cursor() cursor.execute(sql, (loggedInUser, )) messages = cursor.fetchall() connection.close() return make_response(jsonify(messages), 200)
def read_table(db_details, table_name, rows = None): limit = f" LIMIT {rows}" if rows else "" connection = get_connection(db_details["source"]) cursor = connection.cursor() query = f"SELECT * FROM {table_name}{limit}" cursor.execute(query) data = cursor.fetchall() column_names = cursor.column_names connection.close() return data, column_names
def get_cities_names(): #return dict {name: [id]} conn = get_connection(CONN_NAME, PATH) cur = conn.cursor() ##cur.execute("SELECT asset_id, symb, name, conn_name FROM exprm.assets") try: cur.execute("SELECT REGION_, (case when REGION_ = 1 then 'Moscow' when REGION_=2 then 'SPB' else 'Perm' end) city FROM expm_balances GROUP BY REGION_") conn.commit() assets_list = {} for row in cur: assets_list[row[1]] = [row[0]] except: logging.error(traceback.format_exc()) conn.close() return assets_list
def get_ages(city_id): #return dict {name: [id]} conn = get_connection(CONN_NAME, PATH) cur = conn.cursor() ##cur.execute("SELECT asset_id, symb, name, conn_name FROM exprm.assets") try: cur.execute("SELECT age FROM expm_balances where AGE<80 and AGE > 10 and region_ = " + str(city_id) + "GROUP BY AGE") conn.commit() ages_list = [] for row in cur: assets_list.append([0]) except: logging.error(traceback.format_exc()) conn.close() return ages_list
def get_assets_names(): #return dict {symb: [id, name]} conn = get_connection(CONN_NAME, PATH) cur = conn.cursor() ##cur.execute("SELECT asset_id, symb, name, conn_name FROM exprm.assets") try: cur.execute("SELECT * FROM exprm.assets") conn.commit() assets_list = {} for row in cur: assets_list[row[1]] = [row[0], row[2]] except: logging.error(traceback.format_exc()) conn.close() return assets_list
def upload_file(name): try: s = utils.get_connection(LOCAL_ADDRESS) file_list = cPickle.loads(s.get_files()) if filter(lambda x: x.filename == name, file_list): print "File already exists on server" return with open(name, 'rb') as handle: def read_chunk(): return handle.read(SHARD_SIZE) for chunk in iter(read_chunk, ''): data = xmlrpclib.Binary(chunk) s.upload_file(name, data) s.save_state() except Exception as ext: print "File upload error" print ext
def preloop(self): """ Entry point. Check whether we should run a script and exit, or start an interactive shell. """ parser = self.get_shell_parser() args, others = parser.parse_known_args() command = self.COMMANDS.get(args.command) if command: command = command() elif args.command: print "*** Unknown command: %s" % args.command print get_help(parser) exit(0) elif others: print "*** Unknown arguments: %s" % ' '.join(others) print get_help(parser) exit(0) if args.help: if not command: print get_help(parser) elif command.OFFLINE: print get_help(command.get_command_parser()) else: print get_help(command.get_command_parser(make_connection_parser())) exit(0) if command: if not command.OFFLINE: connection = get_connection(args) else: connection = None self.process_command(command, others, connection) exit(0) # do shell try: readline.read_history_file(self.get_history_file_path()) except (IOError, NameError): pass self.set_shell_user(args)
def _ping_server_thread(self): # send signal to server HEARTBEAT_INTERVAL while True: try: p2pfiles = utils.get_connection(SERVER_ADDRESS).heartbeat(self.peer_id, cPickle.dumps(self.p2pfiles)) self._log("pinging server") p2pfiles = cPickle.loads(p2pfiles) new_files = filter(lambda x: x not in self.p2pfiles, p2pfiles) for x in new_files: t = threading.Thread(target = self._get_file_from_server, args = (x.filename,)) t.start() t.setDaemon(True) t.join(0.1) for p2pfile in p2pfiles: local_file = self.p2pfiles[self.p2pfiles.index(p2pfile)] local_file.peer_ids.update(p2pfile.peer_ids) except: pass time.sleep(HEARTBEAT_INTERVAL)
def gen_cost_matrix(costos, grid_size): # Read costs costo_calles = costos['costo_calles'] costo_eco_pequenas = costos['costo_unidades_economicas_pequenas'] costo_eco_medianas = costos['costo_unidades_economicas_medianas'] costo_transporte = costos['costo_transporte'] # Read interests antes = costos['interes_antes'] despues = costos['interes_despues'] DROP = ("""DROP TABLE IF EXISTS features.costs_{grid_size}""".format(grid_size=grid_size)) QUERY = ("""CREATE TABLE features.costs_{grid_size} AS ( WITH costos_construccion AS ( SELECT cell_id, year, calle_distancia_km * {costo_calles} + unidades_economicas_pequenas_distancia_km * {costo_eco_pequenas} + unidades_economicas_medianas_distancia_km * {costo_eco_medianas} + carretera_distancia_km * {costo_transporte} AS costo_construccion, CASE WHEN zona_urbana_distancia_min = 0 THEN 0 ELSE 1 END AS dummy_urbano FROM features.features_{grid_size} ) SELECT cell_id, year, 0 as CTN, costo_construccion * dummy_urbano AS CFP, costo_construccion * dummy_urbano * {antes} AS CTP, costo_construccion * dummy_urbano * {despues} AS CFN FROM costos_construccion)""".format(grid_size=grid_size, costo_calles=costo_calles, costo_eco_pequenas=costo_eco_pequenas, costo_eco_medianas=costo_eco_medianas, costo_transporte=costo_transporte, antes=antes, despues=despues)) db_conn = utils.get_connection().raw_connection() cur = db_conn.cursor() cur.execute(DROP) db_conn.commit() cur.execute(QUERY) db_conn.commit()
def get_location_for_mirror(mirror): """ get the location for the mirror """ conn = get_connection() loc_key = cache_key('IPLOC', mirror) value = conn.get(loc_key) if value: return pickle.loads(value) # if we have a mirror name like mirror.domain.suffix/blah it won't work try: hostname = urlparse("http://{0}".format(mirror)).netloc except Exception as exc: # if error, just default to mirror that works most of the time print("Error getting location for {0} \n {1}".format(mirror, exc)) hostname = mirror ip = socket.gethostbyname(hostname) location = ping_ip2loc(ip) if location: conn.setex(loc_key, 86400, pickle.dumps(location)) # 1 day cache return location # if we get here, no good, return None return None
def add_entry(): db = get_connection() db.insert({'text':request.form['text'], 'date': make_datetime()}) flash('New Entry was successfully posted') return redirect(url_for('.show_entries'))
def show_entries(): db = get_connection() entries = db.notes.find().sort('_id', -1).limit(20) return render_template('show_entries.html', entries=entries)
#!/usr/bin/env python #-*- coding: utf-8 -*- from __future__ import unicode_literals, print_function import os import os.path import tempfile # from pyExcelerator import Workbook from pymongo import Connection from sendmail import send_mail import mail_template as mt import utils conn, db = utils.get_connection() spec = {'$or': [dict(min_alert={'$lt': 0}), dict(max_alert={'$lt': 0}), ] } fields = ['code', 'department', 'project', 'store_good', 'store_bad', 'name', 'min_store', 'max_store', 'out_good', 'out_bad', ] codes = dict() for sp in db.spareparts.find(spec, fields=fields): dp = sp['department'] if dp not in codes: codes[dp] = dict() pj = sp['project']
def store_json_data(data): """ Store the data in the cache for later use.""" conn = get_connection() conn.set('JSON_DATA', data)
def get_connection(cls, conn=None): if not conn: conn = get_connection() return conn
def list_files(): s = utils.get_connection(LOCAL_ADDRESS) files = cPickle.loads(s.get_files()) for f in files: print f