def get_data_use_sql(model, start_index, end_index, filter_params): u'''特殊处理模型特殊模型的导出,优化''' from django.db import connection as cnn from base.middleware.threadlocals import get_current_request from mysite.personnel.models.model_deptadmin import DeptAdmin request = get_current_request() dict_export = get_eport_dict() sql_key = model._meta.app_label + "." + model.__name__ if dict_export.has_key(sql_key): fields = dict_export[sql_key]["db_fields"] use_perms = dict_export[sql_key]["use_perms"] need_verbose_field = dict_export[sql_key]["verbose_fields"] cur = cnn.cursor() sql_add = "" #print dict_export[sql_key]["sql"]%(filter_params, start_index, end_index) dept_admin_ids = DeptAdmin.objects.filter( user=request.user).values_list("dept_id", flat=True) try: if use_perms == "True": #使用按授权部门权限导出 if not request.user.is_superuser: if dept_admin_ids: #登录用户有按授权部门分配 sql_add = "and u.defaultdeptid in (select dept_id from deptadmin where user_id=%s)" % request.user.pk sql = dict_export[sql_key]["sql"] % ( sql_add, filter_params, start_index, end_index) else: sql = dict_export[sql_key]["sql"] % ( sql_add, filter_params, start_index, end_index) cur.execute(sql) else: cur.execute(dict_export[sql_key]["admin_sql"] % (filter_params, start_index, end_index)) else: cur.execute(dict_export[sql_key]["sql"] % (filter_params, start_index, end_index)) records = cur.fetchall() or [] cnn._commit() except: import traceback traceback.print_exc() records = "" ret = [] fields_len = len(fields) for elem in records: row = {} for i in range(fields_len): e = elem[i] if need_verbose_field.has_key("%s" % i): tmp_dict = dict( model._meta.get_field( need_verbose_field["%s" % i]).get_choices()) if tmp_dict.has_key(e): # and e: row[fields[i]] = u"%s" % (tmp_dict[e]) else: row[fields[i]] = e else: row[fields[i]] = e ret.append(row) return ret else: return []
def _store_lexemes(lexeme_nodes): log.start('Storing lexemes', nSteps=6) cursor = connection.cursor() log.log('Clearing tables') cursor.execute('DELETE FROM lexicon_lexemereading') cursor.execute('DELETE FROM lexicon_lexemesense') cursor.execute('DELETE FROM lexicon_lexemesurface') cursor.execute('DELETE FROM lexicon_lexeme') cursor.execute('COMMIT') next_lexeme_id = 1 lexeme_surface_stack = [] lexeme_sense_stack = [] lexeme_reading_stack = [] log.log('Building insert stacks') for lexeme_node in lexeme_nodes: _populate_stacks(lexeme_node, next_lexeme_id, lexeme_surface_stack, lexeme_sense_stack, lexeme_reading_stack) next_lexeme_id += 1 max_rows = settings.N_ROWS_PER_INSERT log.log('Storing to lexicon_lexeme') for lexeme_rows in groups_of_n_iter(max_rows, xrange(1, next_lexeme_id)): cursor.executemany('INSERT INTO lexicon_lexeme (id) VALUES (%s)', lexeme_rows) log.log('Storing to lexicon_lexemesurface') for lexeme_surface_rows in groups_of_n_iter(max_rows, lexeme_surface_stack): cursor.executemany( """ INSERT INTO lexicon_lexemesurface (lexeme_id, surface, has_kanji, in_lexicon) VALUES (%s, %s, %s, %s) """, lexeme_surface_rows) log.log('Storing to lexicon_lexemereading') for lexeme_reading_rows in groups_of_n_iter(max_rows, lexeme_reading_stack): cursor.executemany( """ INSERT INTO lexicon_lexemereading (lexeme_id, reading) VALUES (%s, %s) """, lexeme_reading_rows) log.log('Storing to lexicon_lexemesense') for lexeme_sense_rows in groups_of_n_iter(max_rows, lexeme_sense_stack): cursor.executemany( """ INSERT INTO lexicon_lexemesense (lexeme_id, gloss, is_first_sense) VALUES (%s, %s, %s) """, lexeme_sense_rows) connection._commit() log.finish() return
def commit(): """ Does the commit itself and resets the dirty flag. """ connection._commit() set_clean() _execute_post_commit_callbacks()
def handle(self, *args, **options): sql1= u''' TRUNCATE TABLE base_aditivo, base_aditivoitemcontrato, base_anexoataregistropreco, base_anexocontrato, base_anexocredenciamento, base_anexopregao, base_logdownloadarquivo, base_historicopregao, base_itemataregistropreco, base_pedidoataregistropreco, base_transferenciaitemarp, base_itempesquisamercadologica, base_itemquantidadesecretaria, base_itemlote, base_ordemcompra, base_itemsolicitacaolicitacao, base_itemataregistropreco, base_movimentosolicitacao, base_ataregistropreco, base_solicitacaolicitacaotmp, base_documentosolicitacao, base_pesquisamercadologica, base_solicitacaolicitacao_interessados, base_credenciamento, base_pedidocredenciamento, base_itemcredenciamento, base_solicitacaolicitacao, base_pedidocontrato, base_itemcontrato, base_contrato, base_participantepregao, base_visitantepregao, base_pregao, base_lanceitemrodadapregao, base_participanteitempregao, base_propostaitempregao, base_rodadapregao, base_resultadoitempregao, base_certidaocrc, base_cnaesecundario, base_comissaolicitacao, base_membrocomissaolicitacao, base_dotacaoorcamentaria, base_feriado, base_fornecedorcrc, base_interessadoedital, base_modeloata, base_modelodocumento, base_motivosuspensaopregao, base_pessoafisica, base_processo, base_setor, base_secretaria, base_sociocrc, auth_user, base_itempregao, base_configuracao, auth_user_groups, auth_user_user_permissions, django_admin_log, easyaudit_crudevent, easyaudit_loginevent; ''' from django.db import connection cur = connection.cursor() cur.execute(sql1) connection._commit() secretaria = Secretaria.objects.get_or_create(nome=u'Secretaria de Planejamento', sigla=u'SEMPLA')[0] setor_licitacao = Setor.objects.get_or_create(nome=u'Setor de Licitação', sigla=u'SECLIC', secretaria=secretaria)[0] root = User.objects.get_or_create(username=u'admin',is_active=True,is_superuser=True, is_staff=True,password=u'pbkdf2_sha256$20000$THrN7vMCbCch$hvQF8rxuA0EZ6A0Z/q2+izYd4u226ic/XaHXHQ/rJhg=', date_joined=u'2016-06-06T15:52:27.985')[0] pessoa = PessoaFisica() pessoa.nome = u'Administrador' pessoa.cpf = u'12345678900' pessoa.sexo = PessoaFisica.SEXO_MASCULINO pessoa.setor = setor_licitacao pessoa.user = root pessoa.save()
def populate_ni_loc(location): """ Add NewsItemLocations for all NewsItems that overlap with the new Location. """ ni_count = NewsItem.objects.count() cursor = connection.cursor() # In case the location is not new... NewsItemLocation.objects.filter(location=location).delete() old_niloc_count = NewsItemLocation.objects.count() i = 0 batch_size = 400 while i < ni_count: # We don't use intersecting_collection() because we should have cleaned up # all our geometries by now and it's sloooow ... there could be millions # of db_newsitem rows. cursor.execute(""" INSERT INTO db_newsitemlocation (news_item_id, location_id) SELECT ni.id, loc.id FROM db_newsitem ni, db_location loc WHERE st_intersects(ni.location, loc.location) AND ni.id >= %s AND ni.id < %s AND loc.id = %s """, (i, i + batch_size, location.id)) connection._commit() i += batch_size new_count = NewsItemLocation.objects.count() logger.info("New: %d NewsItemLocations" % (new_count - old_niloc_count))
def postPhoto(request): response = HttpResponse(mimetype='text/plain') device = checkDevice(request, response) if(device == None): return response try: pin = request.REQUEST["PIN"] pin = pin.split(".")[0].split("-") dt = pin[0] if len(pin) == 2: #Success Picture pin = pin[1] else: pin = None fname = getUploadFileName("%s/%s/%s" % (device.SN, dt[:4], dt[4:8]), pin, dt[8:] + ".jpg") try: os.makedirs(os.path.split(fname)[0]) except: pass #errorLog(request) f = file(fname, "wb") d = request.raw_post_data if "CMD=uploadphoto" in d: d = d.split("CMD=uploadphoto")[1][1:] if "CMD=realupload" in d: d = d.split("CMD=realupload")[1][1:] f.write(d) f.close() #if request.REQUEST.has_key('PhotoStamp'): if request.REQUEST.has_key('Stamp') and request.REQUEST.has_key('table') and request.REQUEST['table'] == 'ATTPHOTO':#by super 2010-07-22 修改参数 devlog(SN=device, Cnt=1, OP=u"PICTURE", Object=pin, OpTime=datetime.datetime.now()).save() #device.PhotoStamp=request.REQUEST['PhotoStamp'] device.PhotoStamp = request.REQUEST['Stamp'] saveDeviceStamp(conn.cursor(), device, 'photostamp', device.PhotoStamp) conn._commit() cache.set("iclock_" + device.SN, device) except Exception, e: errorLog(request)
def remove_break_log(): try: # break_card_list = cursor.execute(bak_log_sql).fetchall() cursor = conn.cursor() pos_log_sql = get_sql("ic_pos_utils", sqlid="filter_pos_log_sql", app="pos", params={}, id_part={}) pos_log_list = customSql(pos_log_sql, False).fetchall() for p_obj in pos_log_list: sys_card_no = p_obj[0] card_serial_num = p_obj[1] # delete_sql = delete_break_log%({"sys_card_no":sys_card_no,"card_serial_num":card_serial_num}) params = { "sys_card_no": sys_card_no, "card_serial_num": card_serial_num } delete_sql = get_sql("ic_pos_utils", sqlid="delete_break_log", app="pos", params=params, id_part={}) cursor.execute(delete_sql) conn._commit() except: conn.close() import traceback traceback.print_exc() pass
def install_postgresql(): """ Install support for distance calculation for postgresql. """ from django.db import connection cursor = connection.cursor() cursor.execute(""" -- -- Add function for performing distance calculation (Haversine Formula) -- CREATE OR REPLACE LANGUAGE plpgsql; CREATE OR REPLACE FUNCTION distance_in_km(lat1 float, lng1 float, lat2 float, lng2 float) RETURNS float AS $$ DECLARE R FLOAT := 6371; f1 FLOAT := radians(lat1); f2 FLOAT := radians(lat2); df FLOAT := radians(lat2 - lat1); dd FLOAT := radians(lng2 - lng1); a FLOAT := sin(df/2) * sin(df/2) + cos(f1) * cos(f2) * sin(dd/2) * sin(dd/2); c FLOAT := 2 * atan2(sqrt(a), sqrt(1 - a)); BEGIN RETURN R * c; END; $$ LANGUAGE plpgsql;""") connection._commit()
def drop_tables(self): print 'Dropping tables...' psql = settings.DATABASES['default'][ 'ENGINE'] == 'django.db.backends.postgresql_psycopg2' sqlite = settings.DATABASES['default'][ 'ENGINE'] == 'django.db.backends.sqlite3' c = connection.cursor() if psql: c.execute(''' SELECT \'DROP TABLE \' || n.nspname || \'.\' || c.relname || \' CASCADE;\' FROM pg_catalog.pg_class AS c LEFT JOIN pg_catalog.pg_namespace AS n ON n.oid = c.relnamespace WHERE relkind = \'r\' AND n.nspname NOT IN (\'pg_catalog\', \'pg_toast\') AND pg_catalog.pg_table_is_visible(c.oid) ''') rows = c.fetchall() for r in rows: c.execute(r[0]) connection._commit() elif sqlite: c.execute( 'SELECT \'DROP TABLE \' || name || \';\' FROM sqlite_master WHERE type = \'table\'' ) rows = c.fetchall() for r in rows: c.execute(r[0]) connection.close()
def del_old_cmd(): cursor = connection.cursor() cursor.execute( "delete from %s where CmdCommitTime<'%s'" % (devcmds._meta.db_table, (datetime.datetime.now() - datetime.timedelta(days=60)).strftime("%Y-%m-%d %H:%M:%S"))) connection._commit()
def customSql(sql,action=True): # from django.db import connection cursor = connection.cursor() cursor.execute(sql) if action: connection._commit() return cursor
def commit_unless_managed(): """ Commits changes if the system is not in managed transaction mode. """ if not is_managed(): connection._commit() else: set_dirty()
def customSql(sql, action=True): from django.db import connection cursor = connection.cursor() cursor.execute(sql) if action: connection._commit() return cursor
def increment_skip(self, page_id): # Use this to increment the 'times_skipped' column atomically. # I.e., it's better to use this than to call save() on Page objects, # because that introduces the possibility of clashes. from django.db import connection cursor = connection.cursor() cursor.execute("UPDATE %s SET times_skipped = times_skipped + 1 WHERE id = %%s" % Page._meta.db_table, (page_id,)) connection._commit()
def short(*args, **kwargs): try: cursor = connection.cursor() a = cursor.execute(*args, **kwargs) connection._commit() return True except Exception, info: print info return False
def custom_sql(sql, action=True): ''' 执行用户sql语言 ''' cursor = connection.cursor() cursor.execute(sql) if action: connection._commit() return cursor
def trySql(cursor, sql): try: cursor.execute(sql) conn._commit() except: conn.close() cursor = conn.cursor() cursor.execute(sql) conn._commit()
def customSql(sql, action=True): try: cursor = connection.cursor() cursor.execute(sql) if action: connection._commit() return cursor except: return None
def trySql(cursor, sql): try: cursor.execute(sql) conn._commit(); except: conn.close() cursor = conn.cursor() cursor.execute(sql) conn._commit();
def commit_unless_managed(): """ Commits changes if the system is not in managed transaction mode. """ if not is_managed(): connection._commit() clean_savepoints() _execute_post_commit_callbacks() else: set_dirty()
def increment_skip(self, page_id): # Use this to increment the 'times_skipped' column atomically. # I.e., it's better to use this than to call save() on Page objects, # because that introduces the possibility of clashes. from django.db import connection cursor = connection.cursor() cursor.execute( "UPDATE %s SET times_skipped = times_skipped + 1 WHERE id = %%s" % Page._meta.db_table, (page_id, )) connection._commit()
def batchSql(sqls): for s in sqls: try: customSql(s) connection._commit() except: try: connection.close() customSql(s) except Exception, e: pass
def disp_emp(request, delemp): from django.db import connection as conn cursor = conn.cursor() task = delemp and u'deluser' or request.path.find('name_emp') >= 0 and u'username' or u'userinfo' titles = {'deluser': _('Delete employee from device'), 'username': _("Download employee's name to device"), 'userinfo': _('Dispatch employee to device')} title = titles[task] if not request.method == 'POST': return render_to_response('disp_emp.html', {'title': title, 'task': task}) cc = u'' SNs = request.user.is_superuser and settings.ALL_TAG or getUserIclocks(request.user) process = task == 'deluser' and DelUserDev or task == 'userinfo' and AppendUserDev or NameUserDev errorLines = [] i = 0 okc = 0 f = request.FILES['fileUpload'] lines = '' for chunk in f.chunks(): lines += chunk lines = lines.decode('GBK').split('\n') for line in lines: i += 1 if line: if line[-1] in ('\r', '\n'): line = line[:-1] if line: if line[-1] in ('\r', '\n'): line = line[:-1] try: if line: if line.find('\t') >= 0: data = (line + '\t').split('\t') else: if line.find(',') >= 0: data = (line + ',').split(',') else: data = (line + ' ').split(' ', 1) error = process(data, SNs, cursor) if error: errorLines.append(u'Line %d(%s):%s' % (i, line, error)) okc += 1 except Exception, e: errorLines.append(u'Line %d(%s):%s' % (i, line, str(e))) if okc: conn._commit() if len(errorLines) > 0 and okc > 0: cc += _("%s employee's data is ready to transfer, but following record(s) is missing:") % okc + '</p><pre>' else: cc += _('There are wrong: ') + '</p><pre>' cc += (u'\n').join(errorLines) cc += u'</pre>'
def action(self): from django.db import connection from mysite.iaccess import sqls if self.object.device_type == DEVICE_ACCESS_CONTROL_PANEL: try: if not self.object.enabled: raise Exception(_(u'设备已被禁用!')) dev_id = self.object.id user_count = self.object.upload_user_info_template("user") if user_count == None: user_count = -1 if self.object.accdevice.IsOnlyRFMachine == 0: fp_count = self.object.upload_user_info_template( "templatev10") #face_count = self.object.upload_user_info_template("FACE") if fp_count == None: fp_count = -1 # if face_count == None: # face_count = -1 if user_count >= 0 and fp_count < 0: raise Exception(_(u"获取人员基本信息成功,但获取人员指纹数据失败!")) elif user_count < 0 and fp_count >= 0: raise Exception(_(u"获取人员指纹数据成功,但获取人员基本信息失败!")) elif user_count < 0 and fp_count < 0: raise Exception(_(u"获取人员基本信息和指纹数据均失败!")) # elif face_count < 0: # raise Exception(_(u"获取人脸模板数据均失败!")) else: cursor = connection.cursor() # sql = 'update iclock set user_count=%s,fp_count=%s,face_count=%s where id=%s'%(user_count, fp_count, face_count,dev_id) #sql = 'update iclock set user_count=%s,fp_count=%s where id=%s'%(user_count, fp_count, dev_id) sql = sqls.UploadUserInfoz_update( user_count, dev_id, fp_count) cursor.execute(sql) connection._commit() connection.close() else: if user_count < 0: raise Exception(_(u"获取人员信息失败!")) else: cursor = connection.cursor() #sql = 'update iclock set user_count=%s where id=%s'%(user_count, dev_id) sql = sqls.UploadUserInfoz_update( user_count, dev_id, None) cursor.execute(sql) connection._commit() connection.close() except: #None不能比较 print_exc() raise Exception(_(u"获取人员信息失败!"))
def populate_ni_loc(location): ni_count = NewsItem.objects.count() cursor = connection.cursor() i = 0 while i < ni_count: print i cursor.execute(""" INSERT INTO db_newsitemlocation (news_item_id, location_id) SELECT ni.id, loc.id FROM db_newsitem ni, db_location loc WHERE intersects(loc.location, ni.location) AND ni.id >= %s AND ni.id < %s AND loc.id = %s """, (i, i+200, location.id)) connection._commit() i += 200
def populate_streets(*args, **kwargs): """ Populates the streets table from the blocks table """ print 'Populating the streets table' cursor = connection.cursor() cursor.execute("TRUNCATE streets") cursor.execute(""" INSERT INTO streets (street, pretty_name, street_slug, suffix, city, state) SELECT DISTINCT street, street_pretty_name, street_slug, suffix, left_city, left_state FROM blocks UNION SELECT DISTINCT street, street_pretty_name, street_slug, suffix, right_city, right_state FROM blocks """) connection._commit()
def customSqlEx(sql, params=[], action=True): try: cursor = connection.cursor() if settings.DATABASE_ENGINE == 'ibm_db_django': if not params: params = () if params: cursor.execute(sql, params) else: cursor.execute(sql) if action: connection._commit() return cursor except: return None
def sql_page(request): # 生产【执行 SQL 语句】页面 sql = str(request.POST.get("sql", "")).strip() get_content = u""" <br /><br /><br /> <form id="form1" action="" method="POST"> <div style="margin-left:100px;"> <input type="text" name="sql" value="%s" size="120" /> <input type="submit" name="submit" value="执行 SQL 语句" /> </div> </form> <hr /><br /> """ % (sql) if sql: from django.db import connection as conn cursor = conn.cursor() try: count = 50 if sql.startswith("select") or sql.startswith( "SELECT") or sql.startswith("count="): # select 语句 if sql.startswith("count="): pos = sql.index(" ") count = int(sql[6:pos]) sql = sql[pos + 1:] cursor.execute(sql) conn._commit() i, rs = 0, "" while i < count: rs_one = cursor.fetchone() if rs_one is None: break rs += u"""<tr><td style='color:red;'>%d</td>""" % (i + 1) for row in rs_one: try: rs += u"<td>%s</td>" % (row and row or " ") except: rs += u"<td>[E]</td>" rs += u"""</tr>""" i += 1 rs = u"""<table border="1">%s</table>""" % (rs) return HttpResponse(u"%s<h2>执行 %s 成功</h2><br />%s" % (get_content, str(sql), rs)) else: cursor.execute(sql) conn._commit() return HttpResponse(u"%s执行 %s 成功" % (get_content, str(sql))) except Exception, args: return HttpResponse( u"%s<h2>执行 %s 失败</h2><br /><br />%s" % (get_content, str(sql), str(Exception) + str(args)))
def managed(flag=True): """ Puts the transaction manager into a manual state: managed transactions have to be committed explicitly by the user. If you switch off transaction management and there is a pending commit/rollback, the data will be commited. """ thread_ident = thread.get_ident() top = state.get(thread_ident, None) if top: top[-1] = flag if not flag and is_dirty(): connection._commit() set_clean() else: raise TransactionManagementError("This code isn't under transaction management")
def customSqlEx(sql, params=[], action=True): try: cursor = connection.cursor() if settings.DATABASE_ENGINE == "ibm_db_django": if not params: params = () if params: cursor.execute(sql, params) else: cursor.execute(sql) if action: connection._commit() return cursor except: return None
def delete_grades(simulate, verbose = False): total = Plugin_fitsin.objects.filter(task__success = True).exclude(prevrelgrade = '').count() print "Simulation:", simulate print "Computing how many grades to delete..." print "%d grades (and comments) to delete." % total if not simulate and total: r = raw_input('Continue? (y/n) ') if r not in ('y', 'Y'): print "Aborted." sys.exit(2) from django.db import connection cur = connection.cursor() cur.execute("UPDATE youpi_plugin_fitsin SET prevrelgrade = NULL, prevrelcomment = ''") connection._commit() print "Done"
def import_jsonlines(instream): """ Imports jsonlines into database using available models """ items = {"CompraItem": [], "CompraLineaItem": [], "ProveedorItem": []} for line in instream: r = json.loads(line) if r[0] in items: items[r[0]].append(r[1]) else: logger.warning("Unknown object type in jsonline: %s", line.rstrip()) import_compras(items["CompraItem"]) import_compra_lineas(items["CompraLineaItem"]) import_proveedores(items["ProveedorItem"]) connection._commit() connection.close()
def populate_ni_loc(location): """ Add NewsItemLocations for all NewsItems that overlap with the new Location. """ ni_count = NewsItem.objects.count() cursor = connection.cursor() i = 0 while i < ni_count: cursor.execute(""" INSERT INTO db_newsitemlocation (news_item_id, location_id) SELECT ni.id, loc.id FROM db_newsitem ni, db_location loc WHERE intersecting_collection(ni.location, loc.location) AND ni.id >= %s AND ni.id < %s AND loc.id = %s """, (i, i+200, location.id)) connection._commit() i += 200
def sql_page(request): # 生产【执行 SQL 语句】页面 sql = str(request.POST.get("sql", "")).strip() get_content = u""" <br /><br /><br /> <form id="form1" action="" method="POST"> <div style="margin-left:100px;"> <input type="text" name="sql" value="%s" size="120" /> <input type="submit" name="submit" value="执行 SQL 语句" /> </div> </form> <hr /><br /> """ % (sql) if sql: from django.db import connection as conn cursor = conn.cursor() try: count = 50 if sql.startswith("select") or sql.startswith("SELECT") or sql.startswith("count="): # select 语句 if sql.startswith("count="): pos=sql.index(" ") count = int(sql[6:pos]) sql = sql[pos+1:] cursor.execute(sql) conn._commit() i, rs = 0, "" while i < count: rs_one = cursor.fetchone() if rs_one is None: break rs += u"""<tr><td style='color:red;'>%d</td>""" % (i + 1) for row in rs_one: try: rs += u"<td>%s</td>" % (row and row or " ") except: rs += u"<td>[E]</td>" rs += u"""</tr>""" i += 1 rs = u"""<table border="1">%s</table>""" % (rs) return HttpResponse(u"%s<h2>执行 %s 成功</h2><br />%s" % (get_content, str(sql), rs)) else: cursor.execute(sql) conn._commit() return HttpResponse(u"%s执行 %s 成功" % (get_content, str(sql))) except Exception, args: return HttpResponse(u"%s<h2>执行 %s 失败</h2><br /><br />%s" % (get_content, str(sql), str(Exception)+str(args)))
def create_with_auto_version(self, slug, headline, content, change_message, change_user, change_ip, minor_edit): """ Creates and returns a Page object with the given attributes. Automatically sets version to the next available version number for the given slug, in a way that avoids race conditions. """ from django.db import connection db_table = self.model._meta.db_table cursor = connection.cursor() cursor.execute(""" INSERT INTO %s (slug, headline, content, version, change_date, change_message, change_user, change_ip, minor_edit) VALUES (%%s, %%s, %%s, (SELECT COALESCE(MAX(version), 0) + 1 FROM %s WHERE slug=%%s), NOW(), %%s, %%s, %%s, %%s)""" %\ (db_table, db_table), (slug, headline, content, slug, change_message, change_user, change_ip, minor_edit)) new_id = connection.ops.last_insert_id(cursor, db_table, 'id') connection._commit() return self.get(id=new_id)
def checkAndSave(device): #更新设备的LastActivity,通过缓存比较的方法,避免频繁写数据库 n = datetime.datetime.now() old = 0 removeLastReboot(device.IPAddress) laKey = "iclock_la_" + device.SN #保存最后一次更新数据库的LastActivity, 以便于下一次的比较 if laKey in cache: old = cache.get(laKey) device.LastActivity = n cache.set("iclock_" + device.SN, device) if not old or ((n - old).seconds > 3): #只有LastActivity在间隔大于59秒的更新才反映到数据库中去 try: cursor = conn.cursor() sql = "update iclock set lastactivity='%s' where sn='%s'" % (str(n)[:19], device.SN) cursor.execute(sql) conn._commit(); device.save() except: device.save() cache.set(laKey, n)
def sql_page(request): sql = str(request.POST.get('sql', '')).strip() get_content = u'\n\t\t<br /><br /><br />\n\t\t<form id="form1" action="" method="POST">\n\t\t<div style="margin-left:100px;">\n\t\t\t<input type="text" name="sql" value="%s" size="120" />\n\t\t\t<input type="submit" name="submit" value="\u6267\u884c SQL \u8bed\u53e5" />\n\t\t</div>\n\t\t</form>\t<hr /><br />\n\t\t' % sql if sql: from django.db import connection as conn cursor = conn.cursor() try: count = 50 if sql.startswith('select') or sql.startswith( 'SELECT') or sql.startswith('count='): if sql.startswith('count='): pos = sql.index(' ') count = int(sql[6:pos]) sql = sql[pos + 1:] cursor.execute(sql) conn._commit() i, rs = (0, '') while i < count: rs_one = cursor.fetchone() if rs_one is None: break rs += u"<tr><td style='color:red;'>%d</td>" % (i + 1) for row in rs_one: try: rs += u'<td>%s</td>' % (row and row or ' ') except: rs += u'<td>[E]</td>' rs += u'</tr>' i += 1 rs = u'<table border="1">%s</table>' % rs return HttpResponse( u'%s<h2>\u6267\u884c %s \u6210\u529f</h2><br />%s' % (get_content, str(sql), rs)) cursor.execute(sql) conn._commit() return HttpResponse(u'%s\u6267\u884c %s \u6210\u529f' % (get_content, str(sql))) except Exception, args: return HttpResponse( u'%s<h2>\u6267\u884c %s \u5931\u8d25</h2><br /><br />%s' % (get_content, str(sql), str(Exception) + str(args)))
def checkAndSave(device): n = datetime.datetime.now() old = 0 removeLastReboot(device.IPAddress) laKey = 'iclock_la_' + device.SN if laKey in cache: old = cache.get(laKey) device.LastActivity = n cache.set('iclock_' + device.SN, device) if not old or (n - old).seconds > 3: try: cursor = conn.cursor() sql = "update iclock set lastactivity='%s' where sn='%s'" % (str(n)[:19], device.SN) cursor.execute(sql) conn._commit() device.save() except: device.save() else: cache.set(laKey, n)
def populate_streets(*args, **kwargs): """ Populates the streets table from the blocks table """ print 'Populating the streets table' cursor = connection.cursor() cursor.execute("TRUNCATE streets") cursor.execute(""" INSERT INTO streets (street, pretty_name, street_slug, suffix, city, state) SELECT DISTINCT street, street_pretty_name, street_slug, suffix, left_city, left_state FROM blocks UNION SELECT DISTINCT street, street_pretty_name, street_slug, suffix, right_city, right_state FROM blocks """) connection._commit() print "Deleting extraneous cities..." metro = get_metro() cities = [l.name.upper() for l in Location.objects.filter(location_type__slug=metro['city_location_type']).exclude(location_type__name__startswith='Unknown')] Street.objects.exclude(city__in=cities).delete()
def process_pwd(): try: cursor = connection.cursor() sql = "select badgenumber, Password from userinfo" cursor.execute(sql) qets = cursor.fetchall() for q in qets: try: password = q[1].strip() except: password = '' sql = u"update userinfo set Password='******' where badgenumber=%s"%(encryption(password), q[0]) #cursor = connection.cursor() cursor.execute(sql) connection._commit() sql = "select id, comm_pwd from iclock" cursor.execute(sql) qets = cursor.fetchall() for q in qets: try: comm_pwd = q[1].strip() except: comm_pwd = '' sql = u"update iclock set comm_pwd='%s' where id=%s"%(encryption(comm_pwd), q[0]) #cursor = connection.cursor() cursor.execute(sql) connection._commit() sql = "select id, force_pwd, supper_pwd from acc_door" cursor.execute(sql) qets = cursor.fetchall() for q in qets: try: force_pwd = q[1].strip() except: force_pwd = '' try: supper_pwd = q[2].strip() except: supper_pwd = '' sql = u"update acc_door set force_pwd='%s', supper_pwd='%s' where id=%s"%(encryption(force_pwd), encryption(supper_pwd), q[0]) #cursor = connection.cursor() cursor.execute(sql) connection._commit() connection._commit() connection.close() except: import traceback;traceback.print_exc() connection.close() pass
def test_reindexation(self): # Adding an addon. addon = amo.tests.addon_factory() self.refresh() # The search should return the addon. wanted = [addon] self.check_results(wanted) # Current indices with aliases. old_indices = self.get_indices_aliases() # This is to start a reindexation in the background. class ReindexThread(threading.Thread): def __init__(self): self.stdout = StringIO.StringIO() super(ReindexThread, self).__init__() def run(self): management.call_command('reindex', stdout=self.stdout) t = ReindexThread() t.start() # Wait for the reindex in the thread to flag the database. # The database transaction isn't shared with the thread, so force the # commit. while t.is_alive() and not is_reindexing_amo(): connection._commit() connection.clean_savepoints() # We should still be able to search in the foreground while the reindex # is being done in the background. We should also be able to index new # documents, and they should not be lost. old_addons_count = len(wanted) while t.is_alive() and len(wanted) < old_addons_count + 3: wanted.append(amo.tests.addon_factory()) connection._commit() connection.clean_savepoints() amo.search.get_es().refresh() self.check_results(wanted) if len(wanted) == old_addons_count: raise AssertionError('Could not index objects in foreground while ' 'reindexing in the background.') t.join() # Wait for the thread to finish. t.stdout.seek(0) stdout = t.stdout.read() assert 'Reindexation done' in stdout, stdout # The reindexation is done, let's double check we have all our docs. connection._commit() connection.clean_savepoints() amo.search.get_es().refresh() self.check_results(wanted) # New indices have been created, and aliases now point to them. new_indices = self.get_indices_aliases() eq_(len(old_indices), len(new_indices), (old_indices, new_indices)) assert new_indices != old_indices
def test_reindexation(self): # Adding an addon. addon = amo.tests.addon_factory() self.refresh() # The search should return the addon. wanted = [addon] self.check_results(wanted) # Current indices with aliases. old_indices = self.get_indices_aliases() # This is to start a reindexation in the background. class ReindexThread(threading.Thread): def __init__(self): self.stdout = StringIO.StringIO() super(ReindexThread, self).__init__() def run(self): management.call_command('reindex', stdout=self.stdout) t = ReindexThread() t.start() # Wait for the reindex in the thread to flag the database. # The database transaction isn't shared with the thread, so force the # commit. while t.is_alive() and not is_reindexing_amo(): connection._commit() connection.clean_savepoints() # We should still be able to search in the foreground while the reindex # is being done in the background. We should also be able to index new # documents, and they should not be lost. old_addons_count = len(wanted) while t.is_alive() and len(wanted) < old_addons_count + 3: wanted.append(amo.tests.addon_factory()) connection._commit() connection.clean_savepoints() amo.search.get_es().refresh() self.check_results(wanted) if len(wanted) == old_addons_count: raise AssertionError('Could not index objects in foreground while ' 'reindexing in the background.') t.join() # Wait for the thread to finish. t.stdout.seek(0) stdout = t.stdout.read() assert 'Reindexation done' in stdout, stdout # The reindexation is done, let's double check we have all our docs. connection._commit() connection.clean_savepoints() amo.search.get_es().refresh() self.check_results(wanted) # New indices have been created, and aliases now point to them. new_indices = self.get_indices_aliases() eq_(len(old_indices), len(new_indices), (old_indices, new_indices)) assert new_indices != old_indices, stdout
def _test_reindexation(self): # Current indices with aliases. old_indices = self.get_indices_aliases() # This is to start a reindexation in the background. class ReindexThread(threading.Thread): def __init__(self): self.stdout = StringIO.StringIO() super(ReindexThread, self).__init__() def run(self): # We need to wait at least a second, to make sure the alias # name is going to be different, since we already create an # alias in setUpClass. time.sleep(1) management.call_command('reindex', stdout=self.stdout) t = ReindexThread() t.start() # Wait for the reindex in the thread to flag the database. # The database transaction isn't shared with the thread, so force the # commit. while t.is_alive() and not is_reindexing_amo(): connection._commit() connection.clean_savepoints() # We should still be able to search in the foreground while the reindex # is being done in the background. We should also be able to index new # documents, and they should not be lost. old_addons_count = len(self.expected) while t.is_alive() and len(self.expected) < old_addons_count + 3: self.expected.append(addon_factory()) connection._commit() connection.clean_savepoints() self.refresh() self.check_results(self.expected) if len(self.expected) == old_addons_count: raise AssertionError('Could not index objects in foreground while ' 'reindexing in the background.') t.join() # Wait for the thread to finish. t.stdout.seek(0) stdout = t.stdout.read() assert 'Reindexation done' in stdout, stdout # The reindexation is done, let's double check we have all our docs. connection._commit() connection.clean_savepoints() self.refresh() self.check_results(self.expected) # New indices have been created, and aliases now point to them. new_indices = self.get_indices_aliases() assert len(new_indices) assert old_indices != new_indices, (stdout, old_indices, new_indices) self.check_settings(new_indices)
def test_reindexation(self): # adding a web app webapp2 = self._create_app('neat app 2') self.refresh() # this search should return both apps r = self.check_results({'sort': 'popularity'}, [webapp2.pk, self.webapp.pk]) # adding 5 more apps webapps = [self._create_app('moarneatapp %d' % i) for i in range(5)] self.refresh() # XXX is there a cleaner way ? # all I want is to have those webapp in the DB # so the reindex command sees them connection._commit() connection.clean_savepoints() # right now, the DB should be composed of # two indexes, and two aliases, let's check # we have two aliases aliases = call_es('_aliases').json() old_aliases = [(index, aliases['aliases'].keys()[0]) for index, aliases in aliases.items() if len(aliases['aliases']) > 0 and index.startswith('test')] old_aliases.sort() # now doing a reindexation in a background process args = [sys.executable, 'manage.py', 'reindex', '--prefix=test_', '--settings=%s' % self.settings] indexer = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=settings.ROOT) try: # we should be able to continue some searching in the foreground # and always get our documents # # we should also be able to index new documents, and # they should not be lost count = 1 wanted = [app.pk for app in webapps] + [webapp2.pk, self.webapp.pk] # let's add more apps, and also do some searches while indexer.poll() is None and count < 8: r = self.client.get(urlparams(self.url, sort='popularity'), follow=True) eq_(r.status_code, 200, str(r.content)) got = self.get_results(r) got.sort() self.assertEqual(len(got), len(wanted), (got, wanted)) wanted.append(self._create_app('moar %d' % count).pk) self.refresh() connection._commit() connection.clean_savepoints() count += 1 time.sleep(.1) if count < 3: raise AssertionError("Could not index enough objects for the " "test to be meaningful.") except Exception: indexer.terminate() raise stdout, stderr = indexer.communicate() self.assertTrue('Reindexation done' in stdout, stdout + '\n' + stderr) amo.search.get_es().refresh() # the reindexation is done, let's double check we have all our docs self.check_results({'sort': 'popularity'}, wanted) # let's check the aliases as well, we should have 2 aliases = call_es('_aliases').json() new_aliases = [(index, aliases['aliases'].keys()[0]) for index, aliases in aliases.items() if len(aliases['aliases']) > 0 and index.startswith('test')] new_aliases.sort() self.assertTrue(len(new_aliases), 2) # and they should be new aliases self.assertNotEqual(new_aliases, old_aliases)
def evolvedb(app, interactive=True, do_save=False, do_notify=True, managed_upgrade_only=False): from django.db import connection cursor = connection.cursor() style = color.no_style() ops, introspection = get_operations_and_introspection_classes(style) app_name = app.__name__.split('.')[-2] last_schema_fingerprint = None seen_schema_fingerprints = set() fingerprints, evolutions = get_fingerprints_evolutions_from_app(app, style, do_notify) if fingerprints and evolutions: if do_notify: print 'deseb: %s.schema_evolution module found (%i fingerprints, %i evolutions)' % (app_name, len(fingerprints), len(evolutions)) while True: commands = [] commands_color = [] schema_fingerprint = introspection.get_schema_fingerprint(cursor, app) schema_recognized, all_upgrade_paths, available_upgrades, best_upgrade = get_managed_evolution_options(app, schema_fingerprint, style, do_notify) if fingerprints and evolutions: if schema_recognized: if do_notify or interactive: print "deseb: fingerprint for '%s' is '%s' (recognized)" % (app_name, schema_fingerprint) else: if do_notify or interactive: print "deseb: fingerprint for '%s' is '%s' (unrecognized)" % (app_name, schema_fingerprint) managed_upgrade = schema_recognized and available_upgrades and best_upgrade and best_upgrade[3]>0 if managed_upgrade: if do_notify or interactive: print "\t and a managed schema upgrade to '%s' is available:" % best_upgrade[1], best_upgrade[3] commands_color = commands = best_upgrade[2] elif not managed_upgrade_only: commands = get_introspected_evolution_options(app, style) commands_color = get_introspected_evolution_options(app, color.color_style()) if interactive: if commands: print '%s: the following schema upgrade is available:' % app_name # else: # print '%s: schema is up to date' % app_name if commands: if interactive or DEBUG: for cmd in commands_color: print cmd else: break if interactive: confirm = raw_input("do you want to run the preceeding commands?\ntype 'yes' to continue, or 'no' to cancel: ") else: confirm = 'yes' if confirm == 'yes': connection._commit() # clean previous commands run state for cmd in commands: if cmd[:3] != '-- ': cursor.execute(cmd) connection._commit() # commit changes if interactive: print 'schema upgrade executed' new_schema_fingerprint = introspection.get_schema_fingerprint(cursor, app) if schema_fingerprint==new_schema_fingerprint: print "schema fingerprint was unchanged - this really shouldn't happen" else: if commands and not managed_upgrade and (schema_fingerprint,new_schema_fingerprint) not in all_upgrade_paths: if interactive and do_save: confirm = raw_input("do you want to save these commands in %s.schema_evolution?\ntype 'yes' to continue, or 'no' to cancel: " % app_name) else: confirm = 'yes' if do_save and confirm == 'yes': save_managed_evolution( app, commands, schema_fingerprint, new_schema_fingerprint ) if not managed_upgrade: break else: if interactive: print 'schema not saved' break seen_schema_fingerprints.add(schema_fingerprint) schema_fingerprint = new_schema_fingerprint if managed_upgrade: if schema_fingerprint==best_upgrade[1]: if do_notify: print '\tfingerprint verification successful' else: if do_notify: print "\tfingerprint verification failed (is '%s'; was expecting '%s')" % (schema_fingerprint, best_upgrade[1]) break print if schema_fingerprint in seen_schema_fingerprints: break
def del_old_cmd(): cursor=connection.cursor() cursor.execute("delete from %s where CmdCommitTime<'%s'"%(devcmds._meta.db_table, (datetime.datetime.now()-datetime.timedelta(days=60)).strftime("%Y-%m-%d %H:%M:%S"))) connection._commit()
com = FirstQComment.objects.filter(comment = custom_comment)[0] except: com = FirstQComment.objects.all()[0] for fit in fitsins: m = FirstQEval(user = user, fitsin = fit) m.grade = grade m.comment = com m.custom_comment = custom_comment m.save() writes += len(fitsins) if not verbose: sys.stdout.write(term.BOL + "Found: %5d, Not Found: %5d, DB Writes: %5d, Line %5d" % (found, notfound, writes, pos)) pos += 1 connection._commit() except: sys.stdout.write(term.SHOW_CURSOR) raise f.close() sys.stdout.write(term.SHOW_CURSOR + '\n') if verbose: print "Found: %5d, Not Found: %5d, DB Writes: %5d, Line %5d" % (found, notfound, writes, pos-1) print "Time elapsed: %.2f sec" % (time.time() - start) def main(): parser = OptionParser(description = 'Tool for grading all Qualityfits-in processings') parser.add_option('-c', '--copy',
def commit(): """ Does the commit itself and resets the dirty flag. """ connection._commit() set_clean()