def generateOrders(*args): count = None if len(args) == 0: count = inputCount() else: count = args[0] clientsCount = session.query(Client).count() startTime = time.time() if len(args) == 0: print('\nRows generating...') if int(count) / 5 > clientsCount: generateClients(int(int(count) / 5) - clientsCount) session.execute( f"INSERT INTO \"Order\" (taxes_sum, transaction_date, client_id) " f"SELECT generateint(500)::numeric, " f"generatedate(), " f"getrandomrow('Client')::int " f"FROM generate_series(1, {count})") session.commit() endTime = time.time() if len(args) == 0: print('Rows generated! Elapsed time: ' + str(endTime - startTime)[:9] + 's') input('\nPress any key to continue...')
def generateOrderRelation(*args): count = None if len(args) == 0: count = inputCount() else: count = args[0] if session.query(Product).count() < 10: raise Exception( 'Too few product in the database. Please scrap or parse some data') ordersCount = session.query(Order).count() startTime = time.time() if len(args) == 0: print('\nRows generating...') if int(count) / 10 > ordersCount: generateOrders(int(int(count) / 10) - ordersCount) session.execute( f"INSERT INTO \"Link_Product-Order\" (product_id, order_id) " f"SELECT getrandomrow('Product')::int, " f"getrandomrow('Order')::int " f"FROM generate_series(1,{count})") session.commit() endTime = time.time() if len(args) == 0: print('Rows generated! Elapsed time: ' + str(endTime - startTime)[:9] + 's') input('\nPress any key to continue...')
def update_payment_link_by_id(id, **kwargs): try: update = payment_links_table.update().values(**kwargs).where( payment_links_table.c.id == int(id)) session.execute(update) except Exception as e: print(e) return redirect(url_for('.create_payment_link'))
def commit_history(self): # first delete any old stored history # note that if the max lines config variable is shortened that some history will hang around # I don't see this as a huge problem session.execute('DELETE FROM history LIMIT %d' % len(self.history) ) # now insert everything we have stored so far session.executemany( 'INSERT INTO history VALUES (?,?,?)', self.history) conn.commit()
def _handle(self, item): values = item.body['values'] model = get_model(item.attrs['model']) try: session.execute('insert into %s values %s' % ( model.__tablename__, ','.join(values) )) session.commit() except Exception as e: print(f'Error in bulk insert {item.id}')
def forget_all(self,keyvalue): phrase = self.get_phrase_by_text(keyvalue) try: session.execute('DELETE FROM responses WHERE phrase_id=?',[phrase['id']]) session.execute('DELETE FROM phrase WHERE phrase_id=?',[phrase['id']] ) conn.commit() except (AttributeError,KeyError): raise PhraseNotFound('That phrase does not exist') return True
def main(): session.execute('truncate table {}'.format(NogizakaMemberTable.__table__)) ret = namelist() for elem in ret: print(elem) member = NogizakaMemberTable() member.name = elem["name"] member.namekana = elem["name_kana"] member.birthday = elem["birthday"] session.add(member) session.commit()
def forget_response(self,keyvalue,response): phrase = self.get_phrase_by_text(keyvalue) try: response = self.get_response_by_text(response, phrase['id']) try: response_id = response['id'] session.execute('DELETE FROM responses WHERE id=?',[response_id]) conn.commit() except (AttributeError,KeyError): raise ResponseNotFound('That response does not exist') except (AttributeError,KeyError): raise PhraseNotFound('That phrase does not exist')
def parse2(): data_dict = parseCsv( '../data/csv/DatafinitiElectronicsProductsPricingData.csv', [ 'name', 'brand', 'categories', 'dateAdded', 'manufacturer', 'primaryCategories', 'prices.amountMin' ], 'name') # Adding Products to database: products_count = 0 categories_count = 0 links_count = 0 for row in data_dict: new_product = Product(row['name'], row['brand'], row['manufacturer'], row['dateAdded'], int(float(row['prices.amountMin']))) session.add(new_product) session.commit() products_count += 1 session.refresh(new_product) # Adding categories to database: category_names = row['categories'].split(',') for category_name in category_names: checked_categories = session.query(Category).filter( Category.name == category_name).all() if len(checked_categories) == 0: new_category = Category(category_name, row['primaryCategories']) session.add(new_category) session.commit() categories_count += 1 session.refresh(new_category) # Adding link: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=new_category.id) session.execute(ins) else: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=checked_categories[0].id) session.execute(ins) links_count += 1 session.commit() print( f'Added {products_count} products, {categories_count} categories and {links_count} links.' ) input('\nPress any key to continue...')
def getProductsByCategory(): category = input('Enter category for scrapping: ') data = scrape(f'https://www.amazon.com/s?k={category}') if not data['products']: raise Exception('Null') products_count = 0 categories_count = 0 links_count = 0 for raw in data['products']: print(f"Get product from {raw['url']}") if raw['price'] != None: price = int(float(re.findall("\d+\.\d+", raw['price'])[0])) else: price = random.randint(200, 5000) new_product = Product(raw['title'], '', '', datetime.today().strftime('%Y-%m-%d'), price) session.add(new_product) session.commit() products_count += 1 session.refresh(new_product) # Adding categories to database: checked_categories = session.query(Category).filter( Category.name == category).all() if len(checked_categories) == 0: new_category = Category(category, category) session.add(new_category) session.commit() categories_count += 1 session.refresh(new_category) # Adding link: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=new_category.id) session.execute(ins) else: ins = links_products_categories.insert().values( product_id=new_product.id, category_id=checked_categories[0].id) session.execute(ins) links_count += 1 session.commit() print( f'\nAdded {products_count} products, {categories_count} categories and {links_count} links.' ) input('\nPress any key to continue...')
def setUp(self): global engine global session global insp url = os.getenv("PIFPAF_POSTGRESQL_URL") if not url: self.skipTest("No database URL set") engine = sqlalchemy.create_engine(url) session = Session(engine) insp = inspect(engine) session.execute( "create table books(id int primary key, name varchar(20), author varchar(20), read boolean);" ) session.commit() self.add_data()
def get_command_id(command_id): """ Returns as json the command details that have been processed --- tags: [commands] responses: 200: description: Commands returned OK 400: description: Commands not found """ commands = session.query(Command) commands = (str(commands)) commands += " where commands.id={0}".format(command_id) result = session.execute(commands) json_data = [] for r in result: json_data.append({ 'id': r[0], 'command_string': r[1], 'length': r[2], 'duration': r[3], 'output': r[4].decode() }) if not json_data: return "Commands not found" json_data = json.dumps(json_data) return jsonify(json.loads(json_data))
def create_payment_link(): insert = sqlalchemy.insert(payment_links_table).values(created=False) result = session.execute(insert) return redirect(url_for( '.title_and_description', id=result.lastrowid, ))
def getCount(self): try: return session.execute( select([func.count()]).select_from(self.instance)).scalar() except Exception as err: print("Get count error! ", err) raise err
def save_residues(tit: pypka.Titration, pid: int) -> pypka.Titration: # Save residue-level details ( all_sites, _, _, _, ) = tit.getSiteInteractions() for site in all_sites: residue_details = { "pid": pid, "residue_number": site.getResNumber(), "residue_type": site.res_name, "chain": site.molecule.chain, } res_insert = insert(Residue).values(residue_details) res_insert = res_insert.on_conflict_do_update( index_elements=["pid", "residue_number", "residue_type", "chain"], set_={ "pid": pid }, ).returning(Residue.resid) result = session.execute(res_insert) resid = result.fetchall()[0][0] site.resid = resid session.commit() return tit
def get_payment_link_by_id(id): select = sqlalchemy.select([payment_links_table ]).where(payment_links_table.c.id == int(id)) result = session.execute(select).fetchone() if not result: abort(404) id, created, title, slug, description, ammount, metadata = ( session.execute(select).fetchone()) return { 'id': id, 'created': created, 'title': title, 'slug': slug, 'description': description, 'ammount': ammount, 'metadata': metadata, }
def payment_link(slug): try: link = session.query(payment_links_table).filter_by(slug=slug).one() except sqlalchemy.orm.exc.NoResultFound: abort(404) if request.method == 'POST': insert = sqlalchemy.insert(transactions_table).values( ammount=link.ammount, metadata=link.metadata, ) session.execute(insert) return redirect(url_for('.confirmation', slug=slug)) return render_template( "pay.html", link=link, )
def listar(nome_tabela): result = session.execute(nome_tabela.__table__.select()) con = 1 res = {} for row in result: res[con] = dict(row) con += 1 return res
def get(self, poll_id): poll = session.execute('SELECT options FROM polls WHERE id = :id', { 'id': poll_id }).fetchone() if not poll: return 404, 404 response = [] for index, option in enumerate(poll[0].split(",")): votes = session.execute( 'SELECT count(*), count(DISTINCT ip) FROM votes WHERE poll_id = :poll_id AND option=:option', { 'poll_id': poll_id, 'option': index }).fetchone() response.append({ 'name': option, 'votes': votes[0] if votes else 0, 'unique_votes': votes[1] if votes else 0 }) return response, 201
def add_data(self): session.execute( "insert into books values(1, 'Sapiens', 'Harari', false);") session.execute( "insert into books values(2, 'Meluha', 'Amish', true);") session.execute( "insert into books values(3, 'Harry Potter', 'Rowling', true);") session.commit()
def remember(self,keyvalue,response,added_by): phrase = self.get_phrase_by_text(keyvalue) try: phrase_id = phrase['id'] # we have this phrase, check if we already have this response response = self.get_response_by_text(response, phrase_id) try: response_id = responses['id'] return response_id except (AttributeError,KeyError): pass except (AttributeError,KeyError): phrase_id = str(uuid4()) session.execute("INSERT INTO phrase (id,added,phrase,added_by) VALUES(?,?,?,?)", [phrase_id, datetime.now().isoformat(), keyvalue, added_by.name] ) # insert the response response_id = str(uuid4()) session.execute("INSERT INTO responses(id,phrase_id,response,added,added_by) VALUES(?,?,?,?,?)", [response_id, phrase_id, response, datetime.now().isoformat(), added_by.name] ) return response_id
def generateClients(*args): count = None if len(args) == 0: count = inputCount() else: count = args[0] startTime = time.time() if len(args) == 0: print('\nRows generating...') session.execute( f"INSERT INTO \"Client\" (name, birthday_date, email) " f"SELECT generatestring(10), " f"generatedate(), " f"concat(generatestring(8), '@', generatestring(5), '.com') " f"from generate_series(1, {count})") session.commit() endTime = time.time() if len(args) == 0: print('Rows generated! Elapsed time: ' + str(endTime - startTime)[:9] + 's') input('\nPress any key to continue...')
def get_csv_data(rich_transactions): column_names = _column_names(rich_transactions) subquery = _get_subquery(rich_transactions) results = session.execute(subquery.select()) def datemaker(i): return (datetime.utcnow() - timedelta(seconds=(i * 2345))).strftime('%d %b %Y at %-I:%M%p') rows = [['Ammount', 'Date'] + column_names] + [[ result.ammount, datemaker(index), ] + [dict(result).get(column_name) for column_name in column_names ] for index, result in enumerate(results.fetchall())] return "\n".join(",".join(str(column) for column in row) for row in rows)
def get_transactions_and_column_names(rich_transactions): column_names = _column_names(rich_transactions) subquery = _get_subquery(rich_transactions) results = session.execute(subquery.select()) def datemaker(i): return (datetime.utcnow() - timedelta(seconds=(i * 2345))).strftime('%d %b %Y at %-I:%M%p') transactions = reversed([[ { 'text': '£{:,.2f}'.format(result.ammount) }, { 'text': datemaker(index) }, ] + [{ 'html': dict(result).get(column_name), 'classes': '' if dict(result).get(column_name) else 'none', } for column_name in column_names] for index, result in enumerate(results.fetchall()) ]) column_names = [ { 'text': 'Ammount' }, { 'text': 'Date' }, ] + [{ 'text': column_name } for column_name in column_names] return transactions, column_names
def populate(): for ammount, metadata in ([ (100, { 'post': 'Bangkok', 'fee type': 'Fee 19', 'country': 'Thailand' }), (100, { 'post': 'Bangkok', 'fee type': 'Fee 19', 'country': 'Thailand' }), (100, { 'post': 'Phuket', 'fee type': 'Fee 19', 'country': 'Thailand' }), (55, { 'post': 'Phuket', 'fee type': 'Fee 4', 'country': 'Thailand' }), (30, { 'post': 'Phuket', 'fee type': 'Fee 6', 'country': 'Thailand' }), (100, { 'post': 'Paris', 'fee type': 'Fee 19', 'country': 'France' }), (100, { 'post': 'Marseille', 'fee type': 'Fee 19', 'country': 'France' }), (100, { 'post': 'Ibiza', 'fee type': 'Fee 19', 'country': 'Spain' }), (100, { 'post': 'Ibiza', 'fee type': 'Fee 19', 'country': 'Spain' }), (100, { 'post': 'Ibiza', 'fee type': 'Fee 19', 'country': 'Spain' }), (55, { 'post': 'Ibiza', 'fee type': 'Fee 4', 'country': 'Spain' }), (100, { 'post': 'Barcelona', 'fee type': 'Fee 19', 'country': 'Spain' }), ] * 3) + [ (100, { 'post': 'Washington', 'fee type': 'Fee 19', 'country': 'USA' }), (55, { 'post': 'Boston', 'fee type': 'Fee 4', 'country': 'USA' }), (100, { 'post': 'Montreal', 'fee type': 'Fee 19', 'country': 'Canada' }), ]: insert = sqlalchemy.insert(transactions_table).values( ammount=ammount, metadata=json.dumps(metadata), ) session.execute(insert) return redirect(url_for('transactions.transactions_index'))
def delete_keys(cls, device_auth_data_id, keys): delete_query = cls.__table__.delete().\ where(cls.device_auth_data_id == device_auth_data_id).\ where(cls.app_key_hex.in_(keys)) session.execute(delete_query)
def foo(): session.execute('SELECT 1') return "ok"
def get_response(self,response_id): session.execute("SELECT * FROM response WHERE response_id=?",[response_id]) return self.zip_response(session.fetchone())
def __init__(self,history=50): self.history_max_lines = int(history) # init history session.execute('SELECT * FROM history LIMIT %d ORDER BY added DESC', self.history_max_lines) self.history = session.fetchall().reverse() # reverse so we can append rather than insert
def get_phrase_by_text(self,text): session.execute("SELECT * FROM phrase WHERE phrase=?",[text]) return self.zip_phrase(session.fetchone())
def tearDown(self): session.execute("drop table books;") session.commit()
def get_response_by_text(self,text,phrase_id): session.execute("SELECT * FROM response WHERE response=? AND phrase_id=?",[text,phrase_id]) return self.zip_response(session.fetchone())
# iterate over all the word categories and load up each yml file words = {} for file_name in listdir('./better_words/words'): category, extension = splitext(file_name) if extension == '.yml': with open('./better_words/words/' + file_name, 'r') as stream: words[category] = yaml.load(stream) words = parse_yaml(words) # Purge old word data from the DB session.query(Category).delete() session.query(Word).delete() session.query(Suggestion).delete() session.query(Link).delete() session.execute('DELETE FROM link_to_word_associations') session.execute('DELETE FROM suggestion_to_word_associations') for category, value in words._asdict().iteritems(): # Create the category, and store its response c = Category(slug=category, response=value.response) session.add(c) # Create each word entry for within the category for entry in value.entries: prefix = category + '_' + entry.type + '_' suggestion_models = [] for suggestion in entry.suggestions: model = Suggestion(id=create_id(suggestion, prefix), word=suggestion)
def init_postgis(tmpdir, dataset, key= None): print("INITIAILIZING POSTGIS SQL") nm = dataset["dataset"] dsid = getDatasetId(nm) dsname = dsid # session.execute(f"UPDATE segment SET hull=null WHERE segment.dsid= {dsid}") # session.execute(f"UPDATE segment SET kde_density=null WHERE segment.dsid= {dsid}") # cells = sq(Segment).filter(Segment.dsid==dsname).filter(Segment.n_umis>20) # valid_cells = pd.Series(cells,index =[ c.id for c in cells]) # def make_convex_hull_safe(x): # if len(x.umis)<3: # return None # try: # return sp_spatial.ConvexHull([[u.x,u.y] for u in x.umis] ) # except Exception as e: # return None # hulls = valid_cells.apply(make_convex_hull_safe) # cells_and_hulls = pd.concat([valid_cells.rename("cell"),hulls.rename("hull")],axis=1) # for k,r in cells_and_hulls.iterrows(): # hull=r.hull # points = hull.points # print(points) # indices = hull.simplices # poly_string = "POLYGON(("+", ".join([f"{p[0]} {p[1]}" for p in hull.points[np.concatenate([hull.vertices,[hull.vertices[0]]])]])+"))" # r.cell.hull = poly_string # session.add(r.cell) raw_sql = text(""" UPDATE umi SET xumi_xy=ST_SetSRID(ST_MakePoint(x, y),4326) FROM segment WHERE segment.id = umi.seg AND umi.dsid=:name; UPDATE segment SET hull = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.n_umis>20 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; """).params(name=dsid) session.execute(raw_sql) def array2raster(newRasterfn,rasterOrigin,pixelWidth,pixelHeight,array): cols = array.shape[1] rows = array.shape[0] originX = rasterOrigin[0] originY = rasterOrigin[1] driver = gdal.GetDriverByName('GTiff') outRaster = driver.Create(newRasterfn, cols, rows, 1, gdal.GDT_Byte) outRaster.SetGeoTransform((originX, pixelWidth, 0, originY, 0, pixelHeight)) outband = outRaster.GetRasterBand(1) outband.WriteArray(array) outRasterSRS = osr.SpatialReference() outRasterSRS.ImportFromEPSG(4326) outRaster.SetProjection(outRasterSRS.ExportToWkt()) outband.FlushCache() cells = sq(Segment).filter(Segment.dsid==dsname).filter(Segment.n_umis>20) for i,c in enumerate(cells): #f c.id != 300228: continue #c0 = c print("SETTING") coords = np.array([(u.x,u.y) for u in c.umis]).T points_kde = stats.gaussian_kde(coords) sf = 20 r = max(c.eval0, c.eval1)*sf n = 50 h = c.hull if type(h) == type("STR"): h = WKBElement(h) pts = to_shape(h).exterior.xy pts_df = pd.DataFrame(pts).T.rename({0:"x",1:"y"},axis="columns") xrange=[pts_df.x.min(), pts_df.x.max()] yrange=[pts_df.y.min(), pts_df.y.max()] X,Y = np.meshgrid(np.linspace(xrange[0],xrange[1],n),np.linspace(yrange[0],yrange[1],n)) grid =np.vstack([X.ravel(), Y.ravel()]).T vals = points_kde.evaluate(grid.T) vsquare = vals.reshape(n,n) array =( vsquare / np.max(vsquare) * 255).astype(int) reversed_arr = array[::-1] # reverse array so the tif looks like the array rasterOrigin = (xrange[0], yrange[0]) pixelWidth = (xrange[1] - xrange[0]) / n pixelHeight = (yrange[1] - yrange[0]) / n newRasterfn = 'test.tif' array2raster(newRasterfn,rasterOrigin,pixelWidth,pixelHeight,array) # convert array to raster hexbits = open(newRasterfn,"rb").read().hex() sub = r"\x" + f"{hexbits}" query = f"""UPDATE segment SET kde_density=ST_FromGDALRaster('{sub}') WHERE id={c.id}""" session.execute(query) session.commit() raw_sql = text(""" UPDATE umi SET kde_val= ST_NearestValue(kde_density, ST_SetSRID(umi.xumi_xy,4326 )) FROM segment WHERE segment.id = umi.seg AND umi.dsid=:name; UPDATE segment SET hull1 = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND umi.kde_val >= 1 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET hull128 = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND umi.kde_val >= 128 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET hull12 = ch.hull FROM( SELECT umi.seg as seg, ST_ConvexHull(ST_Collect(xumi_xy)) as hull FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND umi.kde_val >= 12 AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET center = ch.centroid FROM( SELECT umi.seg as seg, ST_Centroid(ST_Collect(xumi_xy)) as centroid FROM umi JOIN segment ON segment.id= umi.seg WHERE segment.kde_density is not null AND segment.dsid=:name GROUP BY umi.seg ) as ch WHERE ch.seg = segment.id AND segment.dsid=:name; UPDATE segment SET area12 = ST_Area(hull12); update segment set rval=(5915587277.0*id)%255/256 WHERE segment.dsid=:name; update segment set gval=(5915587277.0*id)/256%255/256 WHERE segment.dsid=:name; update segment set bval=(5915587277.0*id)/256/256%255/256 WHERE segment.dsid=:name; """).params(name=dsid) session.execute(raw_sql) raw_sql2 = text(""" UPDATE dataset SET raster_2k_red = ST_MapAlgebra( ST_AddBand( ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326), 1, '8BUI'::text, 5, 0), images.union_img, '[rast1]*0+[rast2]', null, 'UNION') FROM ( SELECT rs.dsid as dsid, ST_Union(resampled, 'SUM' ) as union_img FROM ( SELECT ST_MapAlgebra( ST_Resample(kde_density, ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326),'Bilinear'), 1,null,FORMAT('[rast] * %s ',segment.rval) ) AS resampled, dsid FROM segment WHERE segment.kde_density IS NOT null AND segment.dsid=:name ) AS rs GROUP BY rs.dsid ) AS images WHERE images.dsid = dataset.id AND dataset.id=:name; UPDATE dataset SET raster_2k_green = ST_MapAlgebra( ST_AddBand( ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326), 1, '8BUI'::text, 5, 0), images.union_img, '[rast1]*0+[rast2]', null, 'UNION') FROM ( SELECT rs.dsid as dsid, ST_Union(resampled, 'SUM' ) as union_img FROM ( SELECT ST_MapAlgebra( ST_Resample(kde_density, ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326),'Bilinear'), 1,null,FORMAT('[rast] * %s ',segment.gval) ) AS resampled, dsid FROM segment WHERE segment.kde_density IS NOT null AND segment.dsid=:name ) AS rs GROUP BY rs.dsid ) AS images WHERE images.dsid = dataset.id AND dataset.id=:name; UPDATE dataset SET raster_2k_blue = ST_MapAlgebra( ST_AddBand( ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326), 1, '8BUI'::text, 5, 0), images.union_img, '[rast1]*0+[rast2]', null, 'UNION') FROM ( SELECT rs.dsid as dsid, ST_Union(resampled, 'SUM' ) as union_img FROM ( SELECT ST_MapAlgebra( ST_Resample(kde_density, ST_MakeEmptyRaster(2000,2000, -20, -20, .02, .02, 0, 0, 4326),'Bilinear'), 1,null,FORMAT('[rast] * %s ',segment.bval) ) AS resampled, dsid FROM segment WHERE segment.kde_density IS NOT null AND segment.dsid=:name ) AS rs GROUP BY rs.dsid ) AS images WHERE images.dsid = dataset.id AND dataset.id=:name; UPDATE dataset SET raster_2k_all = ST_AddBand( ST_AddBand( raster_2k_red,raster_2k_green), raster_2k_blue) WHERE dataset.id=:name; """).params(name=dsid) session.execute(raw_sql2) raw_sql3 = text(""" UPDATE segment SET points = sq.new_geo FROM ( SELECT segment.id as seg_id, ST_Collect(umi.xumi_xy) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id=sq.seg_id; UPDATE segment SET points_xym_total_reads = sq.new_geo FROM ( SELECT segment.id AS seg_id, ST_Collect(ST_MakePointM(ST_X(umi.xumi_xy),ST_Y(umi.xumi_xy),umi.total_reads)) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id = sq.seg_id; UPDATE segment SET points_xym_kde = sq.new_geo FROM ( SELECT segment.id AS seg_id, ST_Collect(ST_MakePointM(ST_X(umi.xumi_xy),ST_Y(umi.xumi_xy),umi.kde_val)) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id = sq.seg_id; """).params(name=dsid) session.execute(raw_sql3) raw_sql4 = text(""" UPDATE segment SET points12 = sq.new_geo FROM ( SELECT segment.id as seg_id, ST_Collect(umi.xumi_xy) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE umi.kde_val > 12 AND segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id=sq.seg_id; UPDATE segment SET points128 = sq.new_geo FROM ( SELECT segment.id as seg_id, ST_Collect(umi.xumi_xy) as new_geo FROM segment JOIN umi ON umi.seg = segment.id WHERE umi.kde_val > 128 AND segment.dsid=:name GROUP BY segment.id ) AS sq WHERE segment.id=sq.seg_id; """).params(name=dsid) session.execute(raw_sql4) session.commit() def alpha_shape(points, alpha): """ Compute the alpha shape (concave hull) of a set of points. @param points: Iterable container of points. @param alpha: alpha value to influence the gooeyness of the border. Smaller numbers don't fall inward as much as larger numbers. Too large, and you lose everything! """ if len(points) < 4: # When you have a triangle, there is no sense # in computing an alpha shape. return geometry.MultiPoint(list(points)).convex_hull coords = np.array([point.coords[0] for point in points]) tri = Delaunay(coords) triangles = coords[tri.vertices] a = ((triangles[:,0,0] - triangles[:,1,0]) ** 2 + (triangles[:,0,1] - triangles[:,1,1]) ** 2) ** 0.5 b = ((triangles[:,1,0] - triangles[:,2,0]) ** 2 + (triangles[:,1,1] - triangles[:,2,1]) ** 2) ** 0.5 c = ((triangles[:,2,0] - triangles[:,0,0]) ** 2 + (triangles[:,2,1] - triangles[:,0,1]) ** 2) ** 0.5 s = ( a + b + c ) / 2.0 areas = (s*(s-a)*(s-b)*(s-c)) ** 0.5 circums = a * b * c / (4.0 * areas) filtered = triangles[circums < (1.0 / alpha)] edge1 = filtered[:,(0,1)] edge2 = filtered[:,(1,2)] edge3 = filtered[:,(2,0)] edge_points = np.unique(np.concatenate((edge1,edge2,edge3)), axis = 0).tolist() m = geometry.MultiLineString(edge_points) triangles = list(polygonize(m)) return cascaded_union(triangles), edge_points for i,c in enumerate(sq(Segment).filter(Segment.dsid== dsid).filter(Segment.n_umis>20).all()): shp = to_shape(c.points) alpha = .1/np.mean(np.var(np.array([p.coords[0] for p in shp]).T,1))**.5 if i %20 == 0: print(i,alpha) concave_hull, edge_points = alpha_shape(shp, alpha=alpha) if concave_hull.geom_type == 'MultiPolygon': concave_hull = max(concave_hull, key=lambda a: a.area) wkb_element = from_shape(concave_hull) c.hull = wkb_element shp = to_shape(c.points12) alpha = .5/np.mean(np.var(np.array([p.coords[0] for p in shp]).T,1))**.5 concave_hull, edge_points = alpha_shape(shp, alpha=alpha) if concave_hull.geom_type == 'MultiPolygon': concave_hull = max(concave_hull, key=lambda a: a.area) wkb_element = from_shape(concave_hull) c.hull12 = wkb_element shp = to_shape(c.points128) alpha = .5/np.mean(np.var(np.array([p.coords[0] for p in shp]).T,1))**.5 concave_hull, edge_points = alpha_shape(shp, alpha=alpha) if concave_hull.geom_type == 'MultiPolygon': concave_hull = max(concave_hull, key=lambda a: a.area) wkb_element = from_shape(concave_hull) c.hull128 = wkb_element session.add(c) #pTODO] PUT BACK IN COMMITS WHEN THIS CODE IS READY TO RUN! #session.commit() return 0
def get_phrase(self,phrase_id): session.execute("SELECT * FROM phrase WHERE id=?",[phrase_id]) return self.zip_phrase(session.fetchone())