def adapt_macaddr(maddr): from psycopg2.extensions import adapt, AsIs return AsIs("{0}::macaddr".format(adapt(str(maddr))))
def addapt_numpy_float32(numpy_float32): return AsIs(numpy_float32)
'ELSE last_value + %s ' 'END ' 'FROM "%s"' % (self.flavor.param, name), (increment, )) else: cursor.execute('SELECT CASE WHEN NOT is_called THEN last_value ' 'ELSE last_value + increment_by ' 'END ' 'FROM "%s"' % name) return cursor.fetchone()[0] register_type(UNICODE) if PYDATE: register_type(PYDATE) if PYDATETIME: register_type(PYDATETIME) if PYTIME: register_type(PYTIME) if PYINTERVAL: register_type(PYINTERVAL) register_adapter(float, lambda value: AsIs(repr(value))) register_adapter(Decimal, lambda value: AsIs(str(value))) def convert_json(value): return json.loads(value, object_hook=JSONDecoder()) register_default_json(loads=convert_json) register_default_jsonb(loads=convert_json)
cursor.execute(""" VACUUM ANALYZE target_instances; """) connection.commit() #some sort of magic connection.set_isolation_level(0) cursor.execute(""" VACUUM ANALYZE strat_phrases; """) connection.commit() #some sort of magic connection.set_isolation_level(0) cursor.execute( """ VACUUM ANALYZE %(my_app)s_sentences_%(my_product)s; """, { "my_app": AsIs(config['app_name']), "my_product": AsIs(config['product'].lower()) }) connection.commit() #============================================================================== # FIND STRATIGRAPHIC PHRASES NEAREST TO ORPHAN TARGET INSTANCES #============================================================================== #how many sentences back from orphan to look for stratigraphic phrases strat_distance = 3 #initialize the dump variable strat_target_distant = [] #list of docids with orphaned targets
def _adapt_datetime(dt): return AsIs("%s" % adapt(DateTime.iso8601(dt)))
def with_query(self, **kwargs): results = kwargs.get('results', [None])[0] page = kwargs.get('page', [None])[0] sort = kwargs.get('sortField', ['id']) sort = 'q.' + sort[0] order = kwargs.get('sortOrder', ['ASC'])[0] if order == 'ascend': order = 'ASC' elif order == 'descend': order = 'DESC' if results and page: questions_range = int(results) * (int(page) - 1), int(results) * (int(page)) else: questions_range = None course_condition = self._course_query(kwargs.get('courses[]', None)) quizzes_query = self._question_query_quizzes( kwargs.get('quizzes[]', None)) author_condition = self._question_query_author( kwargs.get('authors[]', None)) tags_query = self._question_query_tags(kwargs.get('tags[]', None)) having_query = self._having_clause(quizzes_query, tags_query, kwargs) from django.db import connection with connection.cursor() as cursor: cursor.execute( """ WITH q(id,last_modify_date,title,author_id,responses) AS ( SELECT pq.id, pq.last_modify_date, pq.title, pq.author_id, COUNT(pr.id) AS responses FROM polls_question pq LEFT JOIN polls_response pr ON pr.question_id = pq.id WHERE true {} {} GROUP BY pq.id ) SELECT q.id FROM q LEFT JOIN polls_quizquestion pqq ON pqq.question_id=q.id LEFT JOIN polls_question_tags pqt ON pqt.question_id=q.id WHERE true {} {} GROUP BY q.id, %s {} ORDER BY %s %s ;""".format(course_condition, author_condition, quizzes_query, tags_query, having_query), [AsIs(sort), AsIs(sort), AsIs(order)]) result_list = [] length = 0 for index, row in enumerate(cursor.fetchall()): if questions_range: if index + 1 <= questions_range[ 1] and index + 1 > questions_range[0]: question = self.model.objects.get(pk=row[0]) result_list.append(question) else: question = self.model.objects.get(id=row[0]) result_list.append(question) length += 1 return result_list, length
def adapt_money(m): return AsIs('(%s,%s)::currency_amount' % (adapt(m.amount), adapt(m.currency)))
def itemID(self, itemID): self.__itemID = AsIs(self.__convert(itemID, 'itemID'))
def table(self, table): self.__table = AsIs(self.__convert(table, 'table'))
def timestamp(self, timestamp): self.__timestamp = AsIs(self.__convert(timestamp, 'timestamp'))
def userID(self, userID): self.__userID = AsIs(self.__convert(userID, 'userID'))
def delete(cls, id): """ Delete record from table cls.table with id of cls.table_key """ cls.test_connection() cls._cur.execute("DELETE FROM %s WHERE %s = %s;", (AsIs(cls.table), AsIs(cls.table_key), id))
def query_property(self, select_list, query_list, orderby, paging_size=20, page_num=1): """ Run query from a list of GET param, plus version NOTE: only for property :param select_list: str array :param query_list: str array :param orderby: str, default is "id" :param paging_size: int, default is 20 :param page_num: int, default is 1 :return: """ def streetnameHandler(streetname): """ For sql like, replace space with % and remove suffix e.g.: 1615 Greencastle Ave -> 1615%Greencastle% """ res = '%' + "%".join(streetname.split()[:-1]) + '%' return res table = "properties" # NOTE: DO NOT TOUCH IT!!! offset = (page_num - 1) * paging_size # orderby handler orderby = "id" if not orderby else orderby if query_list.get('descend') == 'true': # will ignore other val orderby += " DESC" # or handler, if query_list.get('or') == 'true': or_condition = [] # remove 'or', streetname, city, postalcode from query_list for field in ["streetname", "city", "postalcode"]: if query_list.get(field): tmp_val = query_list.pop(field) tmp_val = '%' + "%".join(tmp_val.split()) + '%' query_str = "upper(%s) like upper('%s')" or_condition.append(query_str % ( field, tmp_val, )) # generate AND (streetname like '%x%' OR city like '%x%' OR postalcode=x) or_condition = "(" + " OR ".join(or_condition) + ")" else: or_condition = "" # streetname if query_list.get('streetname'): query_list['streetname'] = streetnameHandler( query_list['streetname']) # generate SQL WHERE condition = self.__gencondition(query_list) if or_condition: if condition: condition += " AND " + or_condition else: condition = or_condition # when query list is empty (do not consider 'descend'), return everything condition = "id>0" if not condition else condition cur = self.__connect() query = "SELECT %s \ FROM %s \ WHERE %s \ ORDER BY %s \ LIMIT %s \ OFFSET %s;" try: cur.execute(query, ( AsIs(','.join(select_list)), AsIs(table), AsIs(condition), AsIs(orderby), AsIs(str(paging_size)), AsIs(str(offset)), )) total_count = self.__gettotal(table, condition) return self.__getresult(cur, paging=True, total=total_count) except Exception as e: return { "query": query, "success": "False", "count": 0, "results": None, "colnames": None }
from psycopg2.extensions import AsIs import psycopg2 import obd import sys import os sys.path.insert(0, os.path.realpath(os.path.dirname(__file__))) from datetime import datetime from smartOBD.dbconnect import dbconn, cur import smartOBD from smartOBD import asynco import time dbtable = asynco.userGet() speed = 25 while True: # infinite loop speed += 1 data = [datetime.now(), speed, 25, 25, 25, 25, 25] cur.execute("delete from %s;", [AsIs(dbtable)]) cur.execute("insert into %s VALUES(%s, %s, %s, %s, %s, %s,%s);", (AsIs(dbtable), data[0], data[1], data[2], data[3], data[5], data[4], data[6])) dbconn.commit() time.sleep(0.5)
def drop_table_model_company_country(cr): tablename = 'company_country_config_settings' if tools.table_exists(cr, tablename): _logger.info("Dropping table %s", tablename) cr.execute("DROP TABLE IF EXISTS %s;", (AsIs(tablename), ))
def get_all_nyc_growth_scores(params): conn = connection cur = conn.cursor() cur.execute( """ select z.rank, z.count, z.percentile, z.per_proficient, z.dbn, z.school_name, z.subject, z.grade, %(calculation)s as growth, z.total_tested, z.total_mean, z.num_level_1, z.per_level_1, z.num_level_2, z.per_level_2, z.num_level_3, z.per_level_3, z.num_level_4, z.per_level_4 FROM ( select (count - rank)/count::numeric * 100 as percentile, * FROM ( select rank() OVER (PARTITION BY nys.year, nys.subject ORDER BY nys.per_proficient desc), count(*) OVER (PARTITION BY nys.year, nys.subject), nys.*, b.sum FROM ( select distinct this.id, this.year, this.dbn, this.school_name, this.subject, this.grade, this.total_tested, this.total_mean, this.num_level_1, this.per_level_1, this.num_level_2, this.per_level_2, this.num_level_3, this.per_level_3, this.num_level_4, this.per_level_4, this.num_proficient, this.per_proficient - that.per_proficient as per_proficient FROM city_data.nys_test_results_all_nyc_schools as this JOIN ( select * from city_data.nys_test_results_all_nyc_schools where year = %(academic_year)s - 1 ) as that on that.dbn = this.dbn and that.subject = this.subject and that.grade = this.grade where this.year = %(academic_year)s ) as nys JOIN ( select dbn, sum(grade::numeric) from city_data.nys_test_results_all_nyc_schools where (grade != 'All Grades' and grade is not null) and year = %(academic_year)s group by dbn ) as b on b.dbn = nys.dbn where b.sum >= %(three_eight_cohort)s and nys.year = %(academic_year)s and nys.grade = %(grade_level)s and substring(nys.dbn, 1,2) = '84' ) as a order by a.subject, a.rank ) as z """, { 'academic_year': params['academic_year'], 'grade_level': params['grade_level'], 'three_eight_cohort': params['three_eight_cohort'], 'calculation': AsIs(params['calculation']) }) rows = dictfetchall(cur) cur.close() return rows
def get_variable_data_from_intersection_table(self, var): query = """SELECT %s FROM intersection_table_birth;""" return (self.c.db.get_regular_cursor_query(query, (AsIs(var), )))
def remove_from_time_series(time_series_table, file_name): curs = conn.cursor() query = "DELETE FROM %(table)s WHERE location = %(filename)s;" data = {"table": AsIs(time_series_table), "filename": file_name} curs.execute(query, data) conn.commit()
def bootstrap_4_migration(env): """Convert customized website views to Bootstrap 4 and multiwebsite. This process is a bit complex, so here's the big picture: In v11, there's no upstream support for multi-websites, but databases can actually have several websites and some records (pages, menus, views) get multi-website features behind the scenes, so we have to treat them like semi-supported. Usually it follows this logic: - Website-specific records only appear in their website. - Website-agnostic records appear in all websites. - View changes affect all websites. In v12, there's full multi-website support, usually following this logic: - Website-specific records override website-agnostic ones. - View changes create a website-specific copy (Copy-On-Write, COW) always. This migration tries to make v11 changes look like they were done in v12: - Unmodified views (noupdate=0) are not touched. - Website-agnostic modified (noupdate=1) views are copied and marked as noupdate=0. Those copied views are made website-specific and migrated to Bootstrap 4. This must be done in the "pre" stage, because the "mid" stage could update or even delete missing views now that they are marked as noupdate=0, but that's exactly what you want because when you create a new website with the UI in v12, that website will be "virgin", including only the raw views that come directly from the modules, without any modifications made for other websites. """ # Preserve old dummy images that have changed image_replacements = _preserve_v11_dummy_images(env) # Create a column to remember where do new views come from oldview_id_col = openupgrade.get_legacy_name("bs4_migrated_from") table_name = env["ir.ui.view"]._table openupgrade.logged_query( env.cr, "ALTER TABLE %s ADD COLUMN %s INTEGER", (AsIs(table_name), AsIs(oldview_id_col)), ) # Create another column to store migration metadata old_metadata_col = openupgrade.get_legacy_name("bs4_migration_metadata") openupgrade.logged_query( env.cr, "ALTER TABLE %s ADD COLUMN %s TEXT", (AsIs(table_name), AsIs(old_metadata_col)), ) # Find report views, which should never be converted here report_names = env["ir.actions.report"].search([ ("report_name", "!=", False), ("report_type", "=like", "qweb-%"), ]).mapped("report_name") # Find updatable views, to be excluded; standard addon update is enough udpatable_ids = env["ir.model.data"].search([ ("model", "=", "ir.ui.view"), ("noupdate", "=", False), ]).mapped("res_id") # In Odoo v11, a page can be related to multiple websites, and the related # view doesn't need to be related to a website (although it could be); # in v12, a page is related to a view, which is related (or not) to a sigle # website. Usually it would be impossible to link a page to multiple # websites using UI in v11, so it's not supported env.cr.execute( """SELECT r.website_id, p.view_id FROM website_website_page_rel AS r INNER JOIN website_page AS p ON r.website_page_id = p.id INNER JOIN ir_ui_view AS v ON p.view_id = v.id WHERE v.website_id IS NULL ORDER BY r.website_id DESC""", ) for page_website_id, view_id in env.cr.fetchall(): # If a page happens to be in different websites, only the first one # will be stored in the view; the others will be lost, which isn't a # problem since v11 didn't support multi-websites openupgrade.logged_query( env.cr, "UPDATE ir_ui_view SET website_id = %s WHERE id = %s", (page_website_id, view_id), ) # Find views to convert all_views = env['ir.ui.view'].with_context(active_test=False).search([ ("id", "not in", udpatable_ids), ("key", "not in", report_names), ("type", "=", "qweb"), ]) # Find available websites env.cr.execute("SELECT id FROM website") website_ids = list(chain.from_iterable(env.cr.fetchall())) # Convert in place views that already have a website env.cr.execute( "SELECT id FROM %s WHERE id IN %s AND website_id IS NOT NULL", (AsIs(table_name), tuple(all_views.ids)), ) views_with_website = env["ir.ui.view"].browse( list(chain.from_iterable(env.cr.fetchall())), prefetch=all_views._prefetch, ) for view in views_with_website: new_arch = convert_string_bootstrap_3to4(view.arch_db) new_arch = _apply_v11_dummy_image_replacement( image_replacements, new_arch, ) view.arch_db = new_arch # Convert a website-specific copy of the view for the rest views_without_website = all_views - views_with_website for website_id, oldview in product(website_ids, views_without_website): # Skip if the view is a page and already has a website-specific copy env.cr.execute( """SELECT v.id FROM ir_ui_view AS v INNER JOIN website_page AS p ON p.view_id = v.id WHERE v.website_id = %s AND p.url IN ( SELECT url FROM website_page WHERE view_id = %s )""", (website_id, oldview.id), ) if env.cr.fetchall(): continue # Skip if a website-specific copy already exists env.cr.execute( "SELECT id FROM %s WHERE key = %s AND website_id = %s", (AsIs(table_name), oldview.key, website_id), ) if env.cr.fetchall(): continue # Create the copy and convert it, otherwise new_arch = convert_string_bootstrap_3to4(oldview.arch_db) new_arch = _apply_v11_dummy_image_replacement( image_replacements, new_arch, ) newview = oldview.copy({ "arch_db": new_arch, "key": oldview.key, # Avoid automatic deduplication }) # Website-specific copy of the related website.page record openupgrade.logged_query( env.cr, """INSERT INTO website_page ( create_date, create_uid, write_date, write_uid, date_publish, is_published, url, website_indexed, view_id) SELECT create_date, create_uid, write_date, write_uid, date_publish, is_published, url, website_indexed, %(new)s FROM website_page WHERE view_id = %(old)s""", { "new": newview.id, "old": oldview.id }, ) # Obtain related website.menu details env.cr.execute( """SELECT wm.create_date, wm.create_uid, wm.write_date, wm.write_uid, wm.name, wm.url, wm.new_window, wm.sequence, wm.parent_id FROM website_menu AS wm INNER JOIN website_page AS wp ON wm.page_id = wp.id INNER JOIN ir_ui_view AS v ON wp.view_id = v.id WHERE view_id = %s AND wm.website_id IS NULL""", (oldview.id, )) menus = [] # Use only JSON-serializable types for menu in env.cr.dictfetchall(): for key in menu: if not isinstance(menu[key], (str, int, float, bool, type(None))): menu[key] = str(menu[key]) menus.append(menu) # Store needed info in the migration columns openupgrade.logged_query( env.cr, "UPDATE %s SET %s = %s, %s = %s, website_id = %s WHERE id = %s", ( AsIs(table_name), AsIs(oldview_id_col), oldview.id, AsIs(old_metadata_col), json.dumps({ "menus": menus, }), website_id, newview.id, )) # Inherit translated SEO metadata from original views openupgrade.copy_fields_multilang( env.cr, "ir.ui.view", env["ir.ui.view"]._table, [ "website_meta_title", "website_meta_description", "website_meta_keywords" ], oldview_id_col, ) # Inherit arch translations from parent views openupgrade.copy_fields_multilang( env.cr, "ir.ui.view", env["ir.ui.view"]._table, ["arch_db"], oldview_id_col, translations_only=True, ) # Set website-agnostic views as updatable model_data = env["ir.model.data"].search([ ("model", "=", "ir.ui.view"), ("res_id", "in", views_without_website.ids), ]) model_data.write({"noupdate": False})
def remove_from_daily_six_anomaly(table_name, date_string, phenophase): curs = conn.cursor() query = "DELETE FROM %(table)s WHERE rast_date = to_date(%(rast_date)s, 'YYYYMMDD') AND phenophase = %(phenophase)s;" data = {"table": AsIs(table_name), "rast_date": date_string, "phenophase": phenophase} curs.execute(query, data) conn.commit()
def adapt_money_basket(b): return AsIs( "_wrap_amounts('%s'::jsonb)" % json.dumps({k: str(v) for k, v in b.amounts.items() if v}).replace("'", "''") )
def get_raster_info(table_name, date): query = "SELECT ST_AsGDALRaster(ST_Union(rast), 'Gtiff') FROM %s WHERE rast_date = %s;" data = (AsIs(table_name), date.strftime("%Y-%m-%d")) return get_raster_info_from_query(query, data)
def adapt_point(point): return AsIs("'(%s, %s)'::point" % (adapt(point.x), adapt(point.y)))
def get_raster_array(table_name, column_name, value): query = "SELECT ST_AsGDALRaster(ST_Union(rast), 'Gtiff') FROM %s WHERE %s = %s;" data = (AsIs(table_name), AsIs(column_name), value) return get_raster_from_query(query, data)
def addapt_numpy_float64(numpy_float64): return AsIs(numpy_float64)
def remove_from_table_by_filename(raster_path, table_name): curs = conn.cursor() query = "DELETE FROM %(table)s WHERE filename = %(filename)s;" data = {"table": AsIs(table_name), "filename": os.path.basename(raster_path)} curs.execute(query, data) conn.commit()
def dict_insert_string(table, _dict): columns = _dict.keys() values = [_dict[column] for column in columns] return 'insert into %s (%s) values %s' % (table, AsIs( ','.join(columns)), tuple(values))
def sync_menu_views_pages_websites(env): # Main menu and children must be website-agnostic main_menu = env.ref('website.main_menu') child_menus = env["website.menu"].search([ ("id", "child_of", main_menu.ids), ("website_id", "!=", False), ]) child_menus.write({"website_id": False}) # Duplicate the main menu for all websites for website in env["website"].search([]): website.copy_menu_hierarchy(main_menu) # Find views that were website-specified in pre stage old_metadata_col = openupgrade.get_legacy_name("bs4_migration_metadata") env.cr.execute("SELECT %s, id FROM %s WHERE %s IS NOT NULL", ( AsIs(old_metadata_col), AsIs(env["ir.ui.view"]._table), AsIs(old_metadata_col), )) for data, specific_view_id in env.cr.fetchall(): data = json.loads(data) specific_view = env["ir.ui.view"].browse(specific_view_id) for page in specific_view.page_ids: menus = env["website.menu"].search([ ("id", "child_of", page.website_id.menu_id.ids), ("url", "=", page.url), ]) # If menus exist, it means the agnostic view wasn't removed and # they already contain all the needed information, except that they # are linked to the website-agnostic page. Let's fix that. if menus: menus.write({ "page_id": page.id, }) # In case the menus disappeared, it's possibly because the # website-agnostic view was removed during the normal module update # and the cascade FK removed the pages and menus. In such # case, let's recreate them. else: for menu in data["menus"]: if menu["url"] != page.url: continue # Find the new website-specific parent menu agnostic_parent = \ env["website.menu"].browse(menu["parent_id"]) specific_parent = env["website.menu"].search([ ("id", "child_of", page.website_id.menu_id.ids), ("name", "=", agnostic_parent.name), ("url", "=", agnostic_parent.url), "|", ("parent_id", "=", page.website_id.menu_id.id), "&", ("parent_id.name", "=", agnostic_parent.parent_id.name), ("parent_id.url", "=", agnostic_parent.parent_id.url) ]) or page.website_id.menu_id # Re-create the menu menus.create( dict( menu, page_id=page.id, parent_id=specific_parent.id, website_id=page.website_id.id, ))
def get_current_sensor_data(self, sid): self.curr.execute( "SELECT value FROM %(data_table_str)s ORDER BY event_time DESC LIMIT 1;", {'data_table_str': AsIs(sid)}) return self.curr.fetchone()
def addapt_numpy_int64(numpy_int64): return AsIs(numpy_int64)