def get_project_tasks(project_id): #print(">>>>>>>project: {}".format(project_id)) mTotal,mCompleted,mPercentage = api.get_project_percentage(project_id) db_i = db.DB() db_i.update_project_percentage(project_id,mTotal,mCompleted) db_i.db_close() return json.dumps({'total':mTotal, 'completed':mCompleted, 'percentage':mPercentage})
def loadPathsAndLibs(project, dumpspath): paths = {} paths['project path'] = os.path.join(dumpspath, 'fixdir', project) paths['titles'] = os.path.join(paths['project path'], 'titles') paths['new dir'] = os.path.join(paths['project path'], 'new') paths['olds dir'] = os.path.join(paths['project path'], 'olds') paths['fixes.py'] = os.path.join(paths['project path'], 'fixes.py') paths['dbfullpath'] = os.path.join(dumpspath, project + LPMCFILESTRING + '.db') paths['errors file'] = os.path.join(paths['project path'], 'errors.txt') paths['upload errors file'] = os.path.join(paths['project path'], 'uploaderrors.txt') paths['log uploaded file'] = os.path.join(paths['project path'], 'uploaded.log') paths['list'] = os.path.join(paths['project path'], 'listforfixes') try: with db.DB(paths['dbfullpath']) as mydb: paths['siteurl'] = mydb.getSiteURL() spec = importlib.util.spec_from_file_location("fixes", paths['fixes.py']) fixes = importlib.util.module_from_spec(spec) spec.loader.exec_module(fixes) return fixes, paths #return paths except ImportError: return None, None, None
class UserBase(object): """Base class that defines attributes and general methods for all user roles""" _file = db.DB("db/users.json") def __init__(self, email, first_name, surname, role): self._email = email self._first_name = first_name self._surname = surname self._role = role @staticmethod def get_db(): return UserBase._file # used for printing users details. depending on role # representation printing may differ def whois(self): """ prints details of User object that is invoked this method depending on specific role, the method implementation may differ because of additional fields for that roles """ print("Role: {}\nEmail: {}\nFirst Name: {}\nSurname: {}".format( self._role, self._email, self._first_name, self._surname )) # returns user's email, used as an id def get_email(self): return self._email
def getForMenu(self, excludedcats=[]): mdb = db.DB() ret = list() exccatlist = '' if len(excludedcats) > 0: exccatlist = ' and ID not in (%s)' % (','.join(excludedcats)) arr = mdb.query('SELECT * from category where status = %s %s' % (STATUS_ACTIVE, exccatlist)) for a in arr: if a['parentID'] == '': ret.append(a) for key, parent in ret: subcatlist = list() subcatnames = list() for a in arr: if a['parentID'] == parent['ID']: subcatlist.append(a) subcatnames.append(a['title']) if len(subcatlist) > 0: # how do you sort a sortless dict? Switch to tuples? ret[key]['subcatlist'] = subcatlist else: del ret[key] return ret
def fixthem(self, fromlist, namespace=None, stopcounter=-1): print('in fixthem') with db.DB(self.paths['dbfullpath']) as mydb: print('in fixthem db') self.titlecounter = 0 self.errorcounter = 0 with open(self.paths['titles'], 'wt', encoding='utf_8') as ftitles: print('in fixthem titles opened.') ftitles.write("summary=" + self.fixes.summary + '\n') if fromlist: #print('fromlist') thegenerator = self.getPageTitles else: thegenerator = mydb.iterTitles #print('generator found') for pagetitle in thegenerator(namespace): print('pagetitle:', pagetitle) if self.titlecounter == stopcounter: print(self.errorcounter, 'errorcounter') print('stopcounter found, exiting fixes.') return #print('getting content') oldwikitext, thets = mydb.getLemmaContent(pagetitle) #print('got content') ok = self.tryFixing(pagetitle, oldwikitext) #print(ok,'ok') if ok == True: ftitles.write( str(self.titlecounter) + ":" + pagetitle + '\n') #print('generated finished') print('closed:', paths['titles']) print('exited db')
def getHistogram(self): query = self.getHistogramQuery() fullPathImage = self.PATH + self.id + '.png' try: # conectar con la bd con = db.DB("localhost", "root", "", self.database, 3306) result = con.executeQuery(query) word = [item[0] for item in result] frequency = [item[1] for item in result] indices = np.arange(len(result)) plt.bar(indices, frequency, color='r') #plt.xlim(0.3, 0.4) #plt.locator_params(axis='y', nbins=6) #plt.locator_params(axis='x', nbins=10) plt.xticks(indices, word, rotation='vertical') plt.tight_layout() #plt.show() plt.savefig(fullPathImage) plt.close() print('en el grafico') except Exception as e: raise e finally: # si la conexion se ha creado --> cerrarla if (con): con.close()
def InitiateLocalFiles(bz2fullname, dbfullname, txtfullname, deleteoldfiles = False): '''Create the empty .db and .txt files.''' #myprint('dbfullname, txtfullname',dbfullname, txtfullname) if os.path.exists(dbfullname) or os.path.exists(txtfullname): if not deleteoldfiles: myprint(inspect.stack()[0][3],'At least one of the 2 files exists and deleteold=False') return False else: if os.path.exists(dbfullname): try:os.remove(dbfullname) except Exception as e: myprint('could not remove ' + dbfullname, inspect.stack()[0][3]) return False if os.path.exists(txtfullname): try:os.remove(txtfullname) except Exception as e: myprint('could not remove ' + txtfullname, inspect.stack()[0][3]) return False #dump dir is clear try: with db.DB(dbfullname) as tmpdb: tmpdb.createAnEmptyDB() open(txtfullname, 'a').close() #the 2 files have been inititated return True except Exception as e: myprint('could not create empty db or empty txt file', inspect.stack()[0][3]) return False
def getRange(self, start, num): mdb = db.DB() if start == False: limit = '' else: limit = ' LIMIT %s,%s' % (str(start), str(num)) return mdb.query(' SELECT releases.*, concat(cp.title, ' > ', c.title) as category_name from releases left outer join category c on c.ID = releases.categoryID left outer join category cp on cp.ID = c.parentID order by postdate desc'+limit)
def processReleasesStage5(self, groupID): mdb = db.DB() nzbs = nzb.NZB() c = consoletools.Consoletools() n = self.n nzbcount = 0 where = ' AND groupID = %s' % groupID if groupID else '' # create nzb. print 'Stage 5 -> Create the NZB, mark collections as ready for deletion.' stage5 = time.time() start_nzbcount = nzbcount resrel = mdb.queryDirect("SELECT ID, guid, name, categoryID FROM releases WHERE nzbstatus = 0 "+where+" LIMIT "+self.stage5limit) if resrel: for rowrel in resrel: if nzb.writeNZBforReleaseId(rowrel['ID'], rowrel['guid'], rowrel['name'], rowrel['categoryID'], nzb.getNZBPath(rowrel['guid'], page.site().nzbpath, True, page.site().nzbsplitlevel)): mdb.queryDirect("UPDATE releases SET nzbstatus = 1 WHERE ID = %s", (rowrel['ID'],)) mdb.queryDirect("UPDATE collections SET filecheck = 5 WHERE releaseID = %s", (rowrel['ID'],)) nzbcount += 1 c.overWrite('Creating NZBs:'+c.percentString(nzbcount,len(resrel))) timing = c.convertTime(int(time.time() - stage5)) print n+'%d NZBs created in %s.%s' % (nzbcount, timing, n) return nzbcount
def get_messages_for_id(id, filename): """Returns the messages shared with a person of a particular id as a list of Message objects. :id: The id of the person with whom the messages are shared. :filename: The filename of the database. """ load = load_messages_for_id(id) if load: return load print( "This is the first time you've searched for this user. Loading the database..." ) database = db.DB(username='', password='', hostname='', filename=filename, dbtype='sqlite') chat_user = database.tables.chat_message_join.all() all_messages = database.tables.message.all() message_ids = set() for row, chat_data in chat_user.iterrows(): if chat_data['chat_id'] == id: # The message belongs to the user in question message_ids.add(chat_data['message_id']) dirty_messages = all_messages.loc[all_messages['ROWID'].isin(message_ids)] return clean_messages(dirty_messages)
def processReleasesStage4(self, groupID): mdb = db.DB() c = consoletools.Consoletools() n = self.n retcount = 0 where = ' AND groupID = %s' % groupID if groupID else '' print 'Stage 4 -> Create releases.' stage4 = time.time() rescol = mdb.queryDirect("SELECT * FROM collections WHERE filecheck = 3 AND filesize > 0 " + where + " LIMIT 1000") if rescol: for rowcol in rescol: cleanArr = '#@$%^'+chr(214)+chr(169)+chr(167) cleanSearchName = rowcol['name'].translate(string.maketrans('', ''), cleanArr) cleanRelName = rowcol['subject'].translate(string.maketrans('', ''), cleanArr) relguid = hashlib.md5(str(uuid.uuid1())).hexdigest() if mdb.queryInsert("INSERT INTO releases (name, searchname, totalpart, groupID, adddate, guid, rageID, postdate, fromname, size, passwordstatus, haspreview, categoryID, nfostatus) \ VALUES (%s, %s, %d, %d, now(), %s, -1, %s, %s, %s, %d, -1, 7010, -1)", (cleanrelName, cleanSearchName, rowcol['totalFiles'], rowcol['groupID'], relguid, rowcol['date'], rowcol['fromname'], \ rowcol['filesize'], 0)): relid = mdb.getInsertID() # udpate collections table to say we inserted the release mdb.queryDirect("UPDATE collections SET filecheck = 4, releaseID = %d WHERE ID = %d", (relid, rowcol['ID'])) retcount += 1 print 'Added release %s.' % cleanRelName else: print 'Error inserting release: %s' % cleanRelName timing = c.convertTime(int(time.time() - stage4)) print '%d releases added in %s.%s' % (retcount, timing, n) return retcount
def __init__(self, anno, mese): self.anno = anno self.mese = mese self.db = db.DB() self.ngiorni = calendar.monthrange(anno, mese)[1] self.dal = datetime.datetime(anno, mese, 1)
def main(): myDB = db.DB('main') myDB.createTable('Person', { 'name': '---', 'age': 0, 'company': '---', 'active': False }) myDB.createTable('Company', {'NAME': '***', 'COUNTRY': '***'}) myDB.getTableByName('Person').RowCb.append(person) myDB.getTableByName('Company').RowCb.append(company) jc = myDB.addRowToTable('Person', name='Jose Carlos', age=50, company='CISCO') print(jc) matias = myDB.addRowToTable('Person', name='Matias', age=50, company='CISCO') print(matias) joselu = myDB.addRowToTable('Person', name='Joselu', age=50, company='NOKIA') print(joselu) cisco = myDB.addRowToTable('Company', NAME='CISCO', COUNTRY='USA') print(cisco) _jc = myDB.getRowFromTable('Person', jc.id) _matias = myDB.getRowFromTable('Person', matias.id) _joselu = myDB.getRowFromTable('Person', joselu.id) print(_jc) print(_matias) print(_joselu)
def process_news(res): outStr = "" failed_str = "" image = dict() for r in res: media_id = toStr(r['media_id']) title = toStr(r['content']['news_item'][0]['title']) key = title.split('号')[0] + '号' key = key.strip().strip('【') if db.msg.has_key(key): outStr += "\n\"%s\",\"%s\""%(title,media_id) image[key] = media_id else: failed_str += "\n\"%s\",\"%s\""%(title,media_id) mDB = db.DB() ret = mDB.updateFewImage(image) mDB.close() with open(path_base+"success.csv",'a+') as f: f.write(outStr) f.close() if failed_str != "": with open(path_base+"failed.csv",'a+') as f: f.write(failed_str) f.close() return ret
def updateSellerFruit(): post_data = request.get_json() sellerEmail = post_data['sellerEmail'] whichType = post_data['whichType'] enteredThing = post_data['enteredThing'] fruitName = post_data['fruitName'] d = db.DB() if whichType == "quantity": d.query_insert( "UPDATE seller_fruits set Quantity = {enteredThing} where sellerEmail='{sellerEmail}' and FruitName='{fruitName}'" .format(**post_data)) d.close_connection() return jsonify({'result': "quantity updated"}) elif whichType == "price": d.query_insert( "UPDATE seller_fruits set Price = {enteredThing} where sellerEmail='{sellerEmail}' and FruitName='{fruitName}'" .format(**post_data)) d.close_connection() return jsonify({'result': "price updated"}) else: d.query_insert( "DELETE from seller_fruits where sellerEmail='{sellerEmail}' and FruitName='{fruitName}'" .format(**post_data)) d.close_connection() return jsonify({'result': "fruit deleted"})
def getBrowseCount(self, cat, maxage=-1, excludedcats=list(), grp=''): mdb = db.DB() catsrch = '' if len(cat) > 0 and cat[0] != -1: catsrch = ' and (' for category in cat: if category != -1: categ = category.Category() if categ.isParent(category): children = categ.getChildren(category) chlist = '-99' for child in children: chlist += ', '+child['ID'] if chlist != '-99': catsrch += ' releases.categoryID in ('+chlist+') or ' else: catsrc += ' releases.categoryID = %d or ' % (category) catsrc += '1=1 )' if maxage > 0: maxage = ' and postdate > now() - interval %d day ' % maxage else: maxage = '' grpsql = '' if grp != '': grpsql = ' and groups.name = %s ' % mdb.escapeString(grp) exccatlist = '' if len(excludedcats) > 0: exccatlist = ' and categoryID not in ('+','.join(excludedcats)+')' res = mdb.queryOneRow("select count(releases.ID) as num from releases left outer join groups on groups.ID = releases.groupID where releases.passwordstatus <= (select value from site where setting='showpasswordedrelease') %s %s %s %s", (catsrch, maxage, exccatlist, grpsql)) return res['num']
def Login(): post_data = request.get_json() valid, msg = validationServer.validateSignUP( post_data, ['email', 'password', 'usertype']) if (valid): d = db.DB() # hashed = passwordEncrypt.encrypt_password(post_data.get('password')) passFromDb = d.get_rows( "SELECT password from users where email='{email}' and usertype='{usertype}' " .format(**post_data)) if passFromDb: passFromDb = ''.join(passFromDb[0]) if passwordEncrypt.check_encrypted_password( post_data.get('password'), passFromDb): d.close_connection() else: return jsonify({'result': "Invalid Password"}) access_token = create_access_token(identity=post_data['email']) refresh_token = create_refresh_token(identity=post_data['email']) return jsonify({ 'message': '{0}'.format(post_data['email']), 'access_token': access_token, 'refresh_token': refresh_token, 'usertype': post_data['usertype'] }) else: return jsonify({'result': "Invalid Email / Password / UserType"}) else: return jsonify({'result': msg})
def splitBunchedCollections(self): mdb = db.DB() # namecleaner = namecleaning res = mdb.queryDirect("SELECT b.ID as bID, b.name as bname, c.* FROM binaries b LEFT JOIN collections c ON b.collectionID = c.ID where c.filecheck = 10") if res: if len(res) > 0: print 'Extracting bunched up collections.' bunchedcnt = 0 cIDS = list() for row in res: cIDS.append(row['ID']) newMD5 = hashlib.md5(namecleaning.collectionsCleaner(row['bname'], 'split')+row['fromname']+row['groupID']+row['totalFiles']).hexdigest() cres = mdb.queryOneRow("SELECT ID FROM collections WHERE collectionhash = %s", (newMD5,)) if not cres: bunchedcnt += 1 csql = "INSERT INTO collections (name, subject, fromname, date, xref, groupID, totalFiles, collectionhash, filecheck, dateadded) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, 11, now())" collectionID = mdb.queryInsert(csql, (namecleaning.releaseCleaner(row['bname']), row['bname'], row['fromname'], row['date'], row['xref'], row['groupID'], row['totalFiles'], newMD5)) else: collectionID = cres['ID'] # update the collection table with the last seen date for the collection mdb.queryDirect("UPDATE collections set dateadded = now() where ID = %s", (collectionID,)) # update the parts/binaries with new info mdb.query("UPDATE binaries SET collectionID = %s where ID = %s", (collectionID, row['bID'],)) mdb.query("UPDATE parts SET binaryID = %s where binaryID = %s", (row['bID'], row['bID'],)) # remove the old collections for cID in list(set(cIDS)): mdb.query("DELETE FROM collections WHERE ID = %s", (cID,)) # update the collections to say we are done mdb.query("UPDATE collections SET filecheck = 0 WHERE filecheck = 11") print 'Extracted %d bunched collections.' % bunchedcnt
def getMap(self): query = self.getMapQuery() try: # conectar con la bd con = db.DB("localhost", "root", "", self.database, 3306) # get result from db result = con.executeQuery(query) lonAndLat = [] infoMap = [] for row in result: lonAndLat.append(self.utmToLatLng(30,row[0], row[1])) if len(row) > 2: mapElement = dict() for i in range(2, len(row)): mapElement[self.extrainfo.split(',')[i - 2]] = row[i] infoMap.append(mapElement) mb = mapbox.MapBox(lonAndLat, self.id) mb.createMapBoxMap() f = folium.Folium(lonAndLat, self.id, infoMap) f.createFoliumMap() except Exception as e: print(e) raise e finally: # si la conexion se ha creado --> cerrarla if (con): con.close()
def getBrowseRange(self, cat, start, num, orderby, maxage=-1, excludedcats=list(), grp=''): mdb = db.DB() catsrch = '' if len(cat) > 0 and cat[0] != -1: catsrch = ' and (' for category in cat: if category != -1: categ = category.Category() if categ.isParent(category): children = categ.getChildren(category) chlist = '-99' for child in children: chlist += ', '+child['ID'] if chlist != '-99': catsrch += ' releases.categoryID in ('+chlist+') or ' else: catsrc += ' releases.categoryID = %d or ' % (category) catsrc += '1=1 )' if maxage > 0: maxage = ' and postdate > now() - interval %d day ' % maxage else: maxage = '' grpsql = '' if grp != '': grpsql = ' and groups.name = %s ' % mdb.escapeString(grp) exccatlist = '' if len(excludedcats) > 0: exccatlist = ' and categoryID not in ('+','.join(excludedcats)+')' order = self.getBrowseOrder(orderby) return mdb.query(" SELECT releases.*, concat(cp.title, ' > ', c.title) as category_name, concat(cp.ID, ',', c.ID) as category_ids, groups.name as group_name, rn.ID as nfoID, re.releaseID as reID from releases left outer join groups on groups.ID = releases.groupID left outer join releasevideo re on re.releaseID = releases.ID left outer join releasenfo rn on rn.releaseID = releases.ID and rn.nfo is not null left outer join category c on c.ID = releases.categoryID left outer join category cp on cp.ID = c.parentID where releases.passwordstatus <= (select value from site where setting='showpasswordedrelease') %s %s %s %s order by %s %s", (limit, catsrch, maxagesql, exccatlist, grpsql, order[0], order[1]))
def __init__(self, config): self.config = config self.use_gps = config["use_gps"] self.detector_sensibility = config["detector_sensibility"] self.min_decision_frames = config["min_decision_frames"] self.DB = db.DB("tst", config["save_dir"]) self.DB.connect() self.session = self.DB.get_session() self.person_id = 0 self.frame_id = 0 self.detecting_id_cam1 = 0 self.detecting_id_cam2 = 0 self.identity_dict = {} self.identity_list = [] self.verify_dict = {} self.vidsource = videofeed.VideoFeed(config) self.cam1 = 0 self.cam2 = 1 self.cam1_rotate_deg = config.get('cam1_rotate_deg', 0) self.cam2_rotate_deg = config.get('cam2_rotate_deg', 0) self.stats = DetectorStats( cam_stats=[CameraStats(fps=self.vidsource.fps[0]), CameraStats(fps=self.vidsource.fps[1])]) if config.get('switch_cameras_on_start', False): self._switch_cams() self.GPS = None if self.use_gps: self.GPS = gps.GPS() self.GPS.start()
def get_block(blockhash): """ Assumtion: blocks loaded from the db are not manipulated -> can be cached including hash """ return CachedBlock.create_cached( Block.deserialize(db.DB(utils.get_db_path()).get(blockhash)))
def getNameByID(self, ID): mdb = db.DB() parent = mdb.queryOneRow( 'SELECT title FROM category WHERE ID = %s000' % (ID[0], )) cat = mdb.queryOneRow('SELECT title FROM category WHERE ID = %s', (ID, )) return parent['title'] + ' ' + cat['title']
def cmd_edit_goods(content, user): info = re.split(r'[+|,|,|.|。| |*]+', content) key = info[0].split('修改')[1].split('号')[0] + '号' lens = (len(info) - 1) / 2 m_msg = db.MSG(key, dict(), [0] * 10) if db.msg.has_key(key): m_msg.copyFrom(db.msg[key]) else: return "找不到关键字:%s" % key for i in range(lens): index = i * 2 + 1 k = info[index] v = info[index + 1] if 'l' in k: i = int(k.split('l')[1]) m_msg.price[i] = v elif m_msg.answer.has_key(k): m_msg.answer[k] = v print k + ": " + v else: return "找不到元素:%s" % k mDB = db.DB() result = mDB.updateMsg(m_msg) mDB.close() return "已经修改:%s" % key
def fixFromGenerator(project, dumpspath, thegenerator, stopcounter=-1): mdlfixes, paths = loadPathsAndLibs(project, dumpspath) with db.DB(paths['dbfullpath']) as mydb: titlecounter = 0 errorcounter = 0 with open(paths['titles'], 'wt', encoding='utf_8') as ftitles: ftitles.write("summary=" + mdlfixes.summary + '\n') for pagetitle in thegenerator: if titlecounter == stopcounter: print(errorcounter, 'errorcounter') print('stopcounter found, exiting fixes.') return oldwikitext, thets = mydb.getLemmaContent(title) newtext, garbage = fixes.fixthis(pagetitle, oldwikitext) if garbage != '': errorcounter += 1 appendError(pagetitle + ':' + garbage[:50].replace('\n', '⁋') + '\n') elif newtext != oldwikitext: titlecounter += 1 appendFixesData(paths, titlecounter, pagetitle, oldwikitext, newtext, ftitles) print('generated finished') print('closed:', paths['titles']) print('exited db')
def main(): #WTForms validators def set_checker(form, field): if len(request.values) > 2: return True raise ValidationError('Please select at least one set.') def blacklist_checker(form, field): if re.match(".*[,;]$", request.values['blacklist']): raise ValidationError( 'Please don\'t have trailing punctuation in the blacklist box.' ) class RandomizerForm(Form): base = BooleanField('Base') intrigue = BooleanField('Intrigue') seaside = BooleanField('Seaside') alchemy = BooleanField('Alchemy') prosperity = BooleanField('Prosperity') cornucopia = BooleanField('Cornucopia') hinterlands = BooleanField('Hinterlands') darkages = BooleanField('Dark Ages') guilds = BooleanField('Guilds') adventures = BooleanField('Adventures') blacklist = TextAreaField('Blacklisted cards', [blacklist_checker]) randomize_button = SubmitField('Get cards!', [set_checker]) form = RandomizerForm(request.values) if ('randomize_button' in request.values) and form.validate(): conn = db.DB() sets = [] for set in _.keys(request.values): if set == 'darkages': sets.append("\'Dark Ages\'") elif set != 'randomize_button' and set != 'blacklist': sets.append("\'" + set + "\'") where_string = ', '.join(sets) query = """CREATE OR REPLACE VIEW picked_cards as SELECT * FROM cards WHERE CardSet IN ({}) AND ({}) ORDER BY RAND() LIMIT 10""".format( where_string, not_statement.blacklist(request.values['blacklist'])) conn.execute(query) query = "SELECT * FROM picked_cards ORDER BY CardSet, Cost" content = conn.execute(query).fetchall() img_links = utils.cardImgLinker(content) return render_template('main.html', form=form, links=img_links) return render_template('main.html', form=form)
def dbinit(): """ initializing db connection """ with open('config.json') as json_conf_file: conf = json.load(json_conf_file) database = db.DB(conf["postgres"]) return billdb.BillDB(database)
def test_link(self,event): """ 测试连接操作 :return: """ db_action = db.DB() ip = self.ip.GetValue() port = self.port.GetValue() DB_name = self.DB_name.GetValue() username = self.user_name.GetValue() password = self.password.GetValue() message = self.message_name.GetValue() DB_type = self.DB_type.GetValue() db_action.afferent_message(ip, username, password, DB_type, DB_name, port) if message == "": self.state.SetLabel("数据名不可为空") return 0 result = db_action.connect(message) if result[0] == 0: fail = wx.Image("fail.bmp", wx.BITMAP_TYPE_BMP, ).ConvertToBitmap() self.button.SetBitmap(fail) self.state.SetLabel(str(result[1])) else: success = wx.Image("success.bmp", wx.BITMAP_TYPE_BMP, ).ConvertToBitmap() self.button.SetBitmap(success)
def __init__(self, dbenv=None): self.db = db.DB(dbenv) self._closed = True if HIGHEST_PROTOCOL: self.protocol = HIGHEST_PROTOCOL else: self.protocol = 1
def update(self, form): mdb = db.DB() site = self.row2Object(form) if site['nzbpath'][-1] != '/': site['nzbpath'] = site['nzbpath']+'/' # validate site settings if site['mediainfopath'] != '' and not os.path.isfile(site['mediainfopath']): return self.ERR_BADMEDIAINFOPATH if site['ffmpegpath'] != '' and not os.path.isfile(site['mediainfopath']): return self.ERR_BADFFMPEGPATH if site['unrarpath'] != '' and not os.path.isfile(site['unrarpath']): return self.ERR_BADUNRARPATH if site['nzbpath'] != '' and not os.path.isdir(site['nzbpath']): return self.ERR_BADNZBPATH if site['checkpasswordedrar'] == 1 and not os.path.isfile(site['unrarpath']): return self.ERR_BADTMPUNRARPATH if site['tmpunrarpath'] != '' and not os.path.isfile(site['tmpunrarpath']): return self.ERR_BADTMPUNRARPATH sql = sqlKeys = dict() for settingK, settingV in form.iteritems(): sql.append('WHEN %s THEN %s' % (mdb.escapeString(settingK), mdb.escapeString(settingV))) sqlKeys.append(mdb.escapeString(settingK)) mdb.query('UPDATE site SET value = CASE setting %s END WHERE setting IN (%s)', (' '.join(sql), ', '.join(sqlKeys))) return site