def query(request, basequery, sortsql='1 asc'): cursor = connections[request.database].cursor() basesql, baseparams = basequery.query.get_compiler(basequery.db).as_sql(with_col_aliases=False) # Assure the item hierarchy is up to date Buffer.rebuildHierarchy(database=basequery.db) # Execute a query to get the onhand value at the start of our horizon startohdict = {} query = ''' select buffers.name, sum(oh.onhand) from (%s) buffers inner join buffer on buffer.lft between buffers.lft and buffers.rght inner join ( select out_flowplan.thebuffer as thebuffer, out_flowplan.onhand as onhand from out_flowplan, (select thebuffer, max(id) as id from out_flowplan where flowdate < '%s' group by thebuffer ) maxid where maxid.thebuffer = out_flowplan.thebuffer and maxid.id = out_flowplan.id ) oh on oh.thebuffer = buffer.name group by buffers.name ''' % (basesql, request.report_startdate) cursor.execute(query, baseparams) for row in cursor.fetchall(): startohdict[row[0]] = float(row[1]) # Execute the actual query query = ''' select buf.name as row1, buf.item_id as row2, buf.location_id as row3, d.bucket as col1, d.startdate as col2, d.enddate as col3, coalesce(sum(%s),0.0) as consumed, coalesce(-sum(%s),0.0) as produced from (%s) buf -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Include child buffers inner join buffer on buffer.lft between buf.lft and buf.rght -- Consumed and produced quantities left join out_flowplan on buffer.name = out_flowplan.thebuffer and d.startdate <= out_flowplan.flowdate and d.enddate > out_flowplan.flowdate and out_flowplan.flowdate >= '%s' and out_flowplan.flowdate < '%s' -- Grouping and sorting group by buf.name, buf.item_id, buf.location_id, buf.onhand, d.bucket, d.startdate, d.enddate order by %s, d.startdate ''' % ( sql_max('out_flowplan.quantity', '0.0'), sql_min('out_flowplan.quantity', '0.0'), basesql, request.report_bucket, request.report_startdate, request.report_enddate, request.report_startdate, request.report_enddate, sortsql ) cursor.execute(query, baseparams) # Build the python result prevbuf = None for row in cursor.fetchall(): if row[0] != prevbuf: prevbuf = row[0] startoh = startohdict.get(prevbuf, 0) endoh = startoh + float(row[6] - row[7]) else: startoh = endoh endoh += float(row[6] - row[7]) yield { 'buffer': row[0], 'item': row[1], 'location': row[2], 'bucket': row[3], 'startdate': python_date(row[4]), 'enddate': python_date(row[5]), 'startoh': round(startoh, 1), 'produced': round(row[6], 1), 'consumed': round(row[7], 1), 'endoh': round(endoh, 1), }
def query(request, basequery, sortsql='1 asc'): basesql, baseparams = basequery.query.get_compiler(basequery.db).as_sql(with_col_aliases=True) # Get the time units units = OverviewReport.getUnits(request) # Assure the item hierarchy is up to date Resource.rebuildHierarchy(database=basequery.db) # Execute the query cursor = connections[request.database].cursor() query = ''' select res.name as row1, res.location_id as row2, coalesce(max(plan_summary.avg_util),0) as avgutil, d.bucket as col1, d.startdate as col2, coalesce(sum(out_resourceplan.available),0) * (case when res.type = 'buckets' then 1 else %f end) as available, coalesce(sum(out_resourceplan.unavailable),0) * (case when res.type = 'buckets' then 1 else %f end) as unavailable, coalesce(sum(out_resourceplan.load),0) * (case when res.type = 'buckets' then 1 else %f end) as loading, coalesce(sum(out_resourceplan.setup),0) * (case when res.type = 'buckets' then 1 else %f end) as setup from (%s) res -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Include child resources inner join %s res2 on res2.lft between res.lft and res.rght -- Utilization info left join out_resourceplan on res2.name = out_resourceplan.theresource and d.startdate <= out_resourceplan.startdate and d.enddate > out_resourceplan.startdate and out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' -- Average utilization info left join ( select theresource, ( coalesce(sum(out_resourceplan.load),0) + coalesce(sum(out_resourceplan.setup),0) ) * 100.0 / coalesce(%s,1) as avg_util from out_resourceplan where out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' group by theresource ) plan_summary on res2.name = plan_summary.theresource -- Grouping and sorting group by res.name, res.location_id, res.type, d.bucket, d.startdate order by %s, d.startdate ''' % ( units[0], units[0], units[0], units[0], basesql, request.report_bucket, request.report_startdate, request.report_enddate, connections[basequery.db].ops.quote_name('resource'), request.report_startdate, request.report_enddate, sql_max('sum(out_resourceplan.available)', '0.0001'), request.report_startdate, request.report_enddate, sortsql ) cursor.execute(query, baseparams) # Build the python result for row in cursor.fetchall(): if row[5] != 0: util = row[7] * 100 / row[5] else: util = 0 yield { 'resource': row[0], 'location': row[1], 'avgutil': round(row[2], 2), 'bucket': row[3], 'startdate': python_date(row[4]), 'available': round(row[5], 1), 'unavailable': round(row[6], 1), 'load': round(row[7], 1), 'setup': round(row[8], 1), 'utilization': round(util, 2) }
def query(request, basequery, sortsql='1 asc'): cursor = connections[request.database].cursor() basesql, baseparams = basequery.query.get_compiler( basequery.db).as_sql(with_col_aliases=False) # Assure the item hierarchy is up to date Buffer.rebuildHierarchy(database=basequery.db) # Execute a query to get the onhand value at the start of our horizon startohdict = {} query = ''' select buffers.name, sum(oh.onhand) from (%s) buffers inner join buffer on buffer.lft between buffers.lft and buffers.rght inner join ( select out_flowplan.thebuffer as thebuffer, out_flowplan.onhand as onhand from out_flowplan, (select thebuffer, max(id) as id from out_flowplan where flowdate < '%s' group by thebuffer ) maxid where maxid.thebuffer = out_flowplan.thebuffer and maxid.id = out_flowplan.id ) oh on oh.thebuffer = buffer.name group by buffers.name ''' % (basesql, request.report_startdate) cursor.execute(query, baseparams) for row in cursor.fetchall(): startohdict[row[0]] = float(row[1]) # Execute the actual query query = ''' select buf.name as row1, buf.item_id as row2, buf.location_id as row3, d.bucket as col1, d.startdate as col2, d.enddate as col3, coalesce(sum(%s),0.0) as consumed, coalesce(-sum(%s),0.0) as produced from (%s) buf -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Include child buffers inner join buffer on buffer.lft between buf.lft and buf.rght -- Consumed and produced quantities left join out_flowplan on buffer.name = out_flowplan.thebuffer and d.startdate <= out_flowplan.flowdate and d.enddate > out_flowplan.flowdate and out_flowplan.flowdate >= '%s' and out_flowplan.flowdate < '%s' -- Grouping and sorting group by buf.name, buf.item_id, buf.location_id, buf.onhand, d.bucket, d.startdate, d.enddate order by %s, d.startdate ''' % (sql_max('out_flowplan.quantity', '0.0'), sql_min('out_flowplan.quantity', '0.0'), basesql, request.report_bucket, request.report_startdate, request.report_enddate, request.report_startdate, request.report_enddate, sortsql) cursor.execute(query, baseparams) # Build the python result prevbuf = None for row in cursor.fetchall(): if row[0] != prevbuf: prevbuf = row[0] startoh = startohdict.get(prevbuf, 0) endoh = startoh + float(row[6] - row[7]) else: startoh = endoh endoh += float(row[6] - row[7]) yield { 'buffer': row[0], 'item': row[1], 'location': row[2], 'bucket': row[3], 'startdate': python_date(row[4]), 'enddate': python_date(row[5]), 'startoh': round(startoh, 1), 'produced': round(row[6], 1), 'consumed': round(row[7], 1), 'endoh': round(endoh, 1), }
def query(request, basequery, sortsql='1 asc'): basesql, baseparams = basequery.query.get_compiler( basequery.db).as_sql(with_col_aliases=True) # Get the time units units = OverviewReport.getUnits(request) # Assure the item hierarchy is up to date Resource.rebuildHierarchy(database=basequery.db) # Execute the query cursor = connections[request.database].cursor() query = ''' select res.name as row1, res.location_id as row2, coalesce(max(plan_summary.avg_util),0) as avgutil, d.bucket as col1, d.startdate as col2, coalesce(sum(out_resourceplan.available),0) * (case when res.type = 'buckets' then 1 else %f end) as available, coalesce(sum(out_resourceplan.unavailable),0) * (case when res.type = 'buckets' then 1 else %f end) as unavailable, coalesce(sum(out_resourceplan.load),0) * (case when res.type = 'buckets' then 1 else %f end) as loading, coalesce(sum(out_resourceplan.setup),0) * (case when res.type = 'buckets' then 1 else %f end) as setup from (%s) res -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Include child resources inner join %s res2 on res2.lft between res.lft and res.rght -- Utilization info left join out_resourceplan on res2.name = out_resourceplan.theresource and d.startdate <= out_resourceplan.startdate and d.enddate > out_resourceplan.startdate and out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' -- Average utilization info left join ( select theresource, ( coalesce(sum(out_resourceplan.load),0) + coalesce(sum(out_resourceplan.setup),0) ) * 100.0 / coalesce(%s,1) as avg_util from out_resourceplan where out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' group by theresource ) plan_summary on res2.name = plan_summary.theresource -- Grouping and sorting group by res.name, res.location_id, res.type, d.bucket, d.startdate order by %s, d.startdate ''' % (units[0], units[0], units[0], units[0], basesql, request.report_bucket, request.report_startdate, request.report_enddate, connections[basequery.db].ops.quote_name('resource'), request.report_startdate, request.report_enddate, sql_max('sum(out_resourceplan.available)', '0.0001'), request.report_startdate, request.report_enddate, sortsql) cursor.execute(query, baseparams) # Build the python result for row in cursor.fetchall(): if row[5] != 0: util = row[7] * 100 / row[5] else: util = 0 yield { 'resource': row[0], 'location': row[1], 'avgutil': round(row[2], 2), 'bucket': row[3], 'startdate': python_date(row[4]), 'available': round(row[5], 1), 'unavailable': round(row[6], 1), 'load': round(row[7], 1), 'setup': round(row[8], 1), 'utilization': round(util, 2) }