def applyLeave(self, request, mail): leaveId = request.form.get('leaveId') employeId = request.headers.get('employeId') requestedto = request.form.get('requestedto') ondate = request.form.get('ondate') reason = request.form.get('reason') reason = reason.replace("'", "''") state = 'pending' reqToEmpDetailsQuery = "select employeId from employees where email = '{}'".format( requestedto) reqToEmpDetails = DB().execute(reqToEmpDetailsQuery, QueryType.fetchOne) reqToEmpId = reqToEmpDetails['employeId'] reqByEmpDetailsQuery = "select email, first_name from employees where employeId = '{}'".format( employeId) reqByEmpDetails = DB().execute(reqByEmpDetailsQuery, QueryType.fetchOne) reqByEmpName = reqByEmpDetails['first_name'] reqByEmpEmail = reqByEmpDetails['email'] if leaveId: query = "UPDATE leaves SET requestedby = '{}', requestedto = '{}', ondate = '{}', reason = '{}', state = '{}' WHERE leaveId = '{}'".format( employeId, reqToEmpId, ondate, reason, state, leaveId) else: query = "INSERT INTO leaves (requestedby, requestedto, ondate, reason, state) values ('{}', '{}', '{}', '{}', '{}')".format( employeId, reqToEmpId, ondate, reason, state) msg = Message('Request for leave', sender='*****@*****.**', recipients=[requestedto]) msg.body = reqByEmpName + ' with email ' + reqByEmpEmail + ' is requested for leave on ' + ondate + ' with the reason ' + reason mail.send(msg) return DB().execute_json(query, QueryType.insert)
def approveLeave(self, request, mail): employeId = request.headers.get('employeId') leaveId = request.form.get('leaveId') ondate = request.form.get('ondate') approvedtoId = request.form.get('approvedtoId') state = request.form.get('state') approverEmailQuery = "select email from employees where employeId = '{}'".format( employeId) approverDeatails = DB().execute(approverEmailQuery, QueryType.fetchOne) approverEmail = approverDeatails['email'] approvedtoEmailQuery = "select email from employees where employeId = '{}'".format( approvedtoId) approvedtoDeatails = DB().execute(approvedtoEmailQuery, QueryType.fetchOne) approvedtoEmail = approvedtoDeatails['email'] query = "UPDATE leaves SET state = '{}' WHERE leaveId = '{}'".format( state, leaveId) status = DB().execute(query, QueryType.insert) if status == '': msg = Message('Response for your leave request', sender='*****@*****.**', recipients=[approvedtoEmail]) msg.body = 'Your request for leave ondate ' + str( ondate) + ' is ' + state + ' by ' + approverEmail mail.send(msg) return jsonify({'status': 'ok', 'message': '', 'data': ''}) return jsonify({'status': 'fail', 'message': status['message']})
def __init__(self, spec): source_db = DB(spec['source_url'], spec['source_user'], spec['source_password'], spec['source_db']) destination_db = DB(spec['destination_url'], spec['destination_user'], spec['destination_password'], spec['destination_db']) self.spec = Spec(source_db, destination_db, spec['migrations'])
def getLeavetypes(self, request): leavepolicyId = request.args.get('leavepolicyId') if leavepolicyId: query = "select * from leavepolicies where leavepolicyId = '{}'".format( leavepolicyId) return DB().execute_json(query, QueryType.fetchAll) else: query = "select * from leavepolicies" return DB().execute_json(query, QueryType.fetchAll)
def add_from_api(): request.is_json content = request.get_json() data = { 'title': content['title'], 'date': content['date'] } DB().add_data(data) return jsonify(DB().get_all())
def getHolidays(self, request): holidayId = request.args.get('holidayId') if holidayId: query = "select * from holidays where holidayId = '{}'".format( holidayId) return DB().execute_json(query, QueryType.fetchAll) else: query = "select * from holidays" return DB().execute_json(query, QueryType.fetchAll)
def add_from_form(): title = request.form.get('title') date = request.form.get('date') data = { 'title': title, 'date': date } print(data) DB().add_data(data) return jsonify(DB().get_all())
def get_user_comments(user, sortby='created', orderby='asc', start=0, count=20): if sortby not in ['id', 'postid', 'created', 'subreddit', 'ups']: sortby = 'created' if orderby not in ['asc', 'desc']: orderby = 'desc' query = ''' select id, postid, text, subreddit, created, permalink, ups, downs from comments where comments.userid in (select id from users where username = ?) order by %s %s limit %d offset %d ''' % (sortby, orderby, count, start) db = DB() cur = db.conn.cursor() execur = cur.execute(query, [user]) results = execur.fetchall() comments = [] for (commentid, postid, text, subreddit, created, permalink, ups, downs) in results: images = [] query = ''' select path, width, height, size, thumb, type from images where images.post = ? ''' execur = cur.execute(query, [postid]) image_results = execur.fetchall() for (path, width, height, size, thumb, imagetype) in image_results: images.append({ 'path': path, 'width': width, 'height': height, 'size': size, 'thumb': thumb, 'type': imagetype }) comments.append({ 'id': commentid, 'postid': postid, 'text': text, 'subreddit': subreddit, 'created': created, 'permalink': permalink, 'ups': ups, 'downs': downs, 'images': images }) cur.close() return {'user': user, 'comments': comments}
def __init__(self, include_old_employees=False, include_candidates=False, include_pre_employees=False, include_student=False, include_client=False, only='all'): self.DB = DB() self.logger = Logger().logger # self.con, self.cur = self.DB.connect() if only in ['all', 'departments']: self.departments = self._get_departments() self.companies = self._get_companies() if only in ['all', 'positions', 'employees']: self.titles = self._get_titles() if only in ['all', 'employees']: self.departments = self._get_departments() self.companies = self._get_companies() self.employees = self._get_employees(include_old_employees=include_old_employees, include_candidates=include_candidates, include_pre_employees=include_pre_employees, include_student=include_student, include_client=include_client) self.employees_by_status = self.people( group_by='employment_status') self.employees_by_username = self.people() if only in ['all', 'countries']: self.countries = self._get_countries() if only in ['all', 'levels', 'employees']: self.levels = self._get_levels() if only in ['all', 'locations', 'employees']: self.locations, self.locations_long = self._get_locations()
def kaydet(self): yazi = self.win.txtAd.text() db = DB() if db.ekleme(yazi) : self.win.lblSonuc.setText("Al rite") else: self.win.lblSonuc.setText("Nope")
def SelectFeriado(id): select_cursor = DB().run("Select * from Feriados where idFeriado = " + str(id) + ";") d = select_cursor.fetchall() feriado = Feriados() feriado.DeserializarFeriado(d[0]) return feriado
def __init__(self): self.id = None self.login = None self.password = None self.name = None self.db = DB() self.tickets = []
def ilceDoldur(self): db = DB() self.win.cmbILCE.clear() liste = db.ilceListele(self.win.cmbIL.currentIndex()) self.win.cmbILCE.addItem("Seçiniz") for IlceKod,IlceAd in liste: self.win.cmbILCE.addItem(IlceAd)
def admin_report(): admin_report = Admin_Report() db = DB() registered_users = db.get_registered_users() return render_template("admin.html", admin_report=admin_report, registered_users=registered_users)
def table(): db = DB('localhost', 'root', '123456', 'test', 3306) ChineseName = db.selectAllFromPeopleByusername(current_user.username) # print(ChineseName[0]) a = EnglishName(ChineseName[0]) a.getAllName() return render_template('table.html',username = current_user.username)
def edit(): db = DB('localhost', 'root', '123456', 'test', 3306) results = db.selectAllFromPeopleByusername(current_user.username) realname = results[0] EnglishRealname = results[1] email = results[2] domainName = results[4] researchInterest = results[5] myself = results[6] Research_institutions = results[7] department = results[8] position = results[9] ProfilePicturePath = results[10] # bibtex = results[11] # print(bibtex) data = db.SelectAllBibtexByUsername(current_user.username) BibtexfilePath = './bib/' + current_user.username + '/total.bib' if len(data) == 0: keylist = [] text = [] return render_template('edit.html', realname=realname, EnglishRealname=EnglishRealname, ProfilePicturePath=ProfilePicturePath, Research_institutions=Research_institutions, department=department, position=position, researchInterest=researchInterest, myself=myself, domainName=domainName, email=email, text=text, keylist=keylist) else: # path=[] # name=[] bp = BibtexParser(BibtexfilePath) text, keys, titles, years = bp.gettextbykey() return render_template('edit.html', realname=realname, EnglishRealname=EnglishRealname, ProfilePicturePath=ProfilePicturePath, Research_institutions=Research_institutions, department=department, position=position, researchInterest=researchInterest, myself=myself, email=email, keylist=keys, text=text, domainName=domainName)
def manager(self): """The function does not receive or return parameters. The thread wait for server asks to use the data base.""" self.db = DB('users and lessons', "yuval", "19hothot") self.connect("'users and lessons'") self.build_data_base() while True: for ask in self.asks: if ask[0] == "register me": print 'got to db functions' self.add_user(ask[1]) elif ask[0] == "check user name duplicates": self.check_username_duplicates(ask[1], ask[2]) elif ask[0] == "give me his details": self.select_details()
def __init__(self, interval=30): import sys sys.path.append( r"C:\Users\Kelvin\CloudStation\MSC COMPUTER SCIENCE\Dissertation\CODE\Dissertation\Dissertation" ) #sys.path.append(r"C:\Users\Kelvi\CloudStation\MSC COMPUTER SCIENCE\Dissertation\CODE\Dissertation\Dissertation") import pandas as pd from collections import OrderedDict import pytz from DB import DB db = DB(driver='{SQL Server}', server='ENVY15-NOTEBOOK\MSSQL2017', database='DBHKUDissertation', username='******', password='******') #db = DB(driver = '{SQL Server}', server = 'LAPTOP-194NACED\SQL2017', database = 'DBHKUDissertation', username = '******', password = '******') sql = """ select [TradingDatetime] , [Open] , [High] , [Low] , [Close] , [Volume] FROM [DBHKUDissertation].[dbo].[TableStock] where Ticker in ('sh000300') and Interval in (""" + str(interval) + """) """ data = OrderedDict() ticker = 'SH00300' df = db.read_sql(sql_string=sql) df.columns = ['open', 'high', 'low', 'close', 'volume'] df.index.names = ['datetime'] print(df.head()) data[ticker] = df data[ticker] = data[ticker][['open', 'high', 'low', 'close', 'volume']] print(data[ticker].head()) #data[ticker]['datetime'] = pd.to_datetime(data[ticker]['datetime'], unit='s') print(data[ticker].index) print(type(data[ticker].index)) data[ticker].index = data[ticker].index.tz_localize( pytz.timezone("Asia/Shanghai")).tz_convert(pytz.timezone("UTC")) print(data[ticker].index) print(type(data[ticker].index)) self.panel = pd.Panel(data) self.panel.minor_axis = ['open', 'high', 'low', 'close', 'volume'] #self.panel.marjo_axis = self.panel.major_axis.tz_localize(pytz.timezone("Asia/Shanghai")) print(self.panel)
def __init__(self, **kwargs): """Read arguments (and change settings) and initialize modules.""" # Default Data Inputs self.image = None self.plant_db = DB() # Default Parameter Inputs self.params = Parameters() # Load keyword argument inputs self._data_inputs(kwargs) self._parameter_inputs(kwargs) self.args = kwargs # Set remaining arguments to defaults self._set_defaults() # Changes based on inputs if self.args['calibration_img'] is not None: # self.coordinates = True self.args['coordinates'] = True if self.args['GUI']: self.args['save'] = False self.args['text_output'] = False if self.args['app']: self.args['verbose'] = False self.args['from_env_var'] = True self.plant_db.app = True # Remaining initialization self.p2c = None self.capture = Capture().capture self.final_marked_image = None self.plant_db.tmp_dir = None
def main(): data = [] db = DB(DB_NAME) create_tables(db) con = db.connection() cur = db.cursor() for file in glob.glob("*.csv"): f = open(file, 'rb') reader = csv.reader(f) headers = reader.next() with open(file) as csvfile: reader = csv.DictReader(csvfile) table_name = re.sub(r'[^a-zA-Z]', '', file.replace('.csv', '')) print table_name q = 'insert into ' + table_name + ' VALUES (%s, ' for i in range(1, len(headers)): q = q + '%s, ' q = q[:-2] + ')' val = '' for i in range(0, len(headers)): header_name = re.sub(r'[^a-zA-Z]', '', headers[i].replace(' ', '')) val = val + 'row[\'' + header_name + '\'], ' val = val[:-2] for row in reader: cur.execute(q, (eval(val))) con.commit() joinData = 'select coffee.date as date, coffee.value as coffeeValue,\ copper.mid as copperValue, CottonETF.close as CottonETFValue, \ GoldIndex.close as GlodValue, NASDAQVIX.settle as NASDAQVIXsettle,\ NYMEXAMEXNaturalGasIndex.close as GasClose, NYMEXCrudeOIl.settle as CrudeOIlSettle,\ SPVIX.close as SPVIXClose, SoybeanETF.close as BeanClose, WheatETF.close as WheatClose\ from coffee, copper,CottonETF, GoldIndex, GoldSilverIndex, NASDAQVIX,\ NYMEXAMEXNaturalGasIndex, NYMEXCrudeOIl, SPVIX, SoybeanETF,\ WheatETF where coffee.date>\'2014-01-01 00:00:00\' and\ coffee.date<\'2014-12-31 00:00:00\' and coffee.date=copper.date\ and CottonETF.date=copper.date\ and GoldIndex.date=copper.date\ and GoldSilverIndex.date=copper.date\ and NASDAQVIX.date=copper.date\ and NYMEXAMEXNaturalGasIndex.date=copper.date\ and NYMEXCrudeOIl.date=copper.date\ and SPVIX.date=copper.date\ and SoybeanETF.date=copper.date\ and WheatETF.date=copper.date order by coffee.date asc' header = ['date', 'coffeevalue', 'coppervalue', 'cottonetfvalue', 'glodvalue', 'nasdaqvixsettle',\ 'gasclose', 'crudeoilsettle', 'spvixclose', 'beanclose', 'wheatclose'] t = db.query(joinData) with open("D:/Big-Data-Project/outputData.csv", "wb") as f: writer = csv.DictWriter(f, fieldnames=header) writer.writeheader() writer = csv.writer(f) writer.writerows(t)
def decomposition(R, L): db = DB() S = Stack() S.push(R) while (not S.isEmpty()): A = S.pop() violation = False L.reset() while (not L.end() and not violation): F = L.getNext() if (BCNF_Violation(F, A, False)): violation = True if (not violation): # print("We're inserting A into the DB") db.insert(A) else: # print("We are pushing smaller stuff onto the stack") S.push(list(set(F.lhs()) | set(F.rhs()))) subtractSet = set(F.rhs()) - set(F.lhs()) S.push(list(set(A) - subtractSet)) ''' print("Relationship: " + str(A)) print("Printing stack push 1") print(str(list(set(F.lhs()) | set(F.rhs())))) print("Printing stack push 2") print(list(set(A) - subtractSet)) ''' return db
def analyseSocialUsers(config): """ Analsye the social users Args: config (dict): Config file params """ db = DB(config["DB_path"], top=config["topUsers"]) # Get the social users socialusers = db.getSocialUsers() # Get the checks of the social users df_social = db.getcheckinsSocialUsers(socialusers, config["start_date"]) df_social.to_csv(config["loc_social_checkins"], index=False) # Get the friends of the social users df_friends = db.getFriends(tuple(socialusers)) # Get the venue visited by social users and all friends all_ids = tuple( set(df_friends["first_user_id"].unique()).union( df_friends["second_user_id"].unique())) df_venue = db.getVenueIds(all_ids) # Close the DB connection db.close() # Compute the similarity between social users and their friends getClosenessFriends(df_venue, df_friends, config['loc_social_closeness']) return None
def __init__(self): self.__version__ = '3.1.3' logger_format = '%(asctime)s %(message)s' logging.basicConfig(format=logger_format, level=logging.INFO, datefmt="%H:%M:%S", filename='./logfile.log', filemode='w') logging.getLogger().addHandler(SysLogHandler(facility=SysLogHandler.LOG_DAEMON, address='/dev/log')) self._SIGKILL = False self.logging = logging self.config = DB('./config.json') self.network = Network(self) self.display = DisplayContent() self.detector = Detector() self.ultrasound = Ultrasound(self) self.temp = Temp() self.server = SimpleWebSocketServer('', 8069, WebSocketServer, self) self.cook = Cook(self) self.energy = Energy(self) self.history = History(self) self.audio = Audio(self) self.users = Users(self) self.automations = Automations(self) signal.signal(signal.SIGTERM, self.sig_handler) signal.signal(signal.SIGINT, self.sig_handler) self.log("Boot: v" + self.__version__)
def playerStandings(): """Returns a list of the players and their win records, sorted by wins. The first entry in the list should be the player in first place, or a player tied for first place if there is currently a tie. The database query uses database views to collect wins and total number of matches and ensures that a player is listed only once. Returns: A list of tuples, ordered by the number of wins, from most wins to least wins, each of which contains (id, name, wins, matches): id: the player's unique id (assigned by the database) name: the player's full name (as registered) wins: the number of matches the player has won matches: the number of matches the player has played """ conn = DB().execute(''' SELECT player_wins.pid, player_wins.pname, player_wins.wins, (player_wins.wins + player_losses.losses) AS matches FROM player_wins INNER JOIN player_losses ON player_wins.pid = player_losses.pid ORDER BY player_wins.wins DESC''') rows = conn["cursor"].fetchall() conn['conn'].close() return [(row[0], row[1], row[2], row[3]) for row in rows]
def do(self): #https://docs.microsoft.com/en-us/visualstudio/python/vs-tutorial-01-02 print('Inside do() method') startTime = datetime.now() db = DB() # TODO: Below DB connection string needs to be picked up from Heroku Env. variable db.create_global_connection( 1, 1, 'postgres://*****:*****@baasu.db.elephantsql.com:5432/wyewnhia' ) timeElapsed = datetime.now() - startTime print('>>> DB_ConnectionPool \t\tTime elapsed (hh:mm:ss.ms) {}'.format( timeElapsed)) blogs = db.findAll("blogs", {"is_active": True}) #blogs=db.findAll("articles", {"author":"Jon Russell", "publish_date":"2017-12-13"}) if blogs is not None: columnList = blogs.pop(0) urlIndex = columnList.index("url") nameIndex = columnList.index("name") for blog in blogs: if blog[nameIndex].strip() == "TECH_CRUNCH": agent = TechcrunchAgent() endsiteUrl = blog[urlIndex] self.crawl(db, agent, endsiteUrl)
def index(): if not request.cookies.get("visitId"): db = DB(DB_URL) visit_id = db.register_visit() else: visit_id = request.cookies.get("visitId") return {"visitId": str(visit_id)}
def savebibtex(): mylist = request.args.get('mylist') reallist = json.loads(mylist) db = DB('localhost', 'root', '123456', 'test', 3306) # bibtex数据中删除部分已经删除的数据 # 将total.bib中的数据条目删除 BibtexfilePath = './bib/' + current_user.username + '/total.bib' # BibtexfilePathnew = './bib/' + current_user.username + '/totalnew.bib' with open(BibtexfilePath, "r") as f: lines = f.readlines() with open(BibtexfilePath, "w",) as f_w: for i in range(0, len(reallist)): flag = 0 for line in lines: if reallist[i] in line and '@' in line: flag = 1 temp = line.strip() temp = temp.replace('\n', '') if flag == 1: f_w.write(line) if flag == 1 and temp == '}': break datas = db.SelectAllBibtexByUsername(current_user.username) if len(datas) == 0: return jsonify('1') else: currentlist = [] for data in datas: currentlist.append(data[1]) differences = list(set(currentlist).difference(set(reallist))) for difference in differences: db.DeleteBibtexByUsernameAndBibtex(current_user.username, difference) return jsonify('0')
def check_user_login(self, username, password): sql = "select * from users where username = '******'" db_conn = DB() results = db_conn.get_one(sql) if not results or results['user_password'] != password: return "0", "", username return "1", results["user_nickname"], username
def __initDB(self): sql = ''' CREATE TABLE IF NOT EXISTS `order` ( `id` int(11) NOT NULL AUTO_INCREMENT, `appKey` varchar(50) NOT NULL DEFAULT '', `mid` int(11) NOT NULL DEFAULT '0', `iid` varchar(50) NOT NULL DEFAULT '', `order_ids` varchar(500) NOT NULL DEFAULT '', `type` int(11) NOT NULL DEFAULT '0', `price` decimal(10,2) NOT NULL DEFAULT '0.00', `price_mean` decimal(10,2) NOT NULL DEFAULT '0.00', `total` int(11) NOT NULL DEFAULT 0, `total_success` int(11) NOT NULL DEFAULT 0, `total_cancel` int(11) NOT NULL DEFAULT 0, `is_buy` int(1) NOT NULL DEFAULT '-1', `is_open` int(11) NOT NULL DEFAULT '-1', `status` int(11) NOT NULL DEFAULT 0, `srv_first_time` datetime NOT NULL COMMENT '服务器返回时间', `srv_end_time` datetime NOT NULL COMMENT '服务器返回时间', `local_start_time` datetime NOT NULL COMMENT '发出交易指令时间', `local_start_usec` int(11) NOT NULL DEFAULT '0', `local_first_time` datetime NOT NULL COMMENT '发出交易指令时间', `local_first_usec` int(11) NOT NULL DEFAULT '0', `local_end_time` datetime NOT NULL COMMENT '发出交易指令时间', `local_end_usec` int(11) NOT NULL DEFAULT '0', `mtime` timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP ON UPDATE CURRENT_TIMESTAMP, PRIMARY KEY (`id`) ) ENGINE=InnoDB CHARSET=utf8;''' db = DB() db.insert(sql)
def __init__(self, u_info): ## User info self.u_info = u_info ## Load DB db = DB(self.u_info) ## Create 3D geometry scale_factor_xy = 2 xmax = db.canvas_size_y / (2**scale_factor_xy) ymax = db.canvas_size_x / (2**scale_factor_xy) zmax = db.num_tiles_z cube_size = max([xmax, ymax, zmax]) cube_size = np.ceil(cube_size) cube_size = cube_size.astype(np.int32) self.small_ids = np.zeros([cube_size, cube_size, cube_size], dtype=self.u_info.ids_dtype) for iz in range(db.num_tiles_z): full_map = m.ObtainFullSizeIdsPanel(self.u_info, db, iz) small_map = full_map[::(2**scale_factor_xy), ::( 2**scale_factor_xy)] self.small_ids[0:small_map.shape[0], 0:small_map.shape[1], iz] = small_map boundingbox_dict = {'x': xmax, 'y': ymax, 'z': zmax} with open( os.path.join(self.u_info.data_annotator_path, 'Boundingbox.json'), 'w') as f: json.dump(boundingbox_dict, f, indent=2, ensure_ascii=False) return None