def loadResources(cursor): print('Importing resources...') cnt = 0 starttime = time() Resource.rebuildHierarchy(database=cursor.db.alias) cursor.execute('''SELECT name, description, maximum, maximum_calendar_id, location_id, type, cost, maxearly, setup, setupmatrix_id, category, subcategory, owner_id FROM %s order by lvl asc, name'''% connections[database].ops.quote_name('resource')) for i,j,t,k,l,m,n,o,p,q,r,s,u in cursor.fetchall(): cnt += 1 try: if m == "infinite": x = frepple.resource_infinite(name=i,description=j,category=r,subcategory=s) elif not m or m == "default": x = frepple.resource_default(name=i,description=j,category=r,subcategory=s) if k: x.maximum_calendar = frepple.calendar(name=k) if o: x.maxearly = o if t: x.maximum = t else: raise ValueError("Resource type '%s' not recognized" % m) if l: x.location = frepple.location(name=l) if n: x.cost = n if p: x.setup = p if q: x.setupmatrix = frepple.setupmatrix(name=q) if u: x.owner = frepple.resource(name=u) except Exception as e: print("Error:", e) print('Loaded %d resources in %.2f seconds' % (cnt, time() - starttime))
def loadResources(self): print('Importing resources...') cnt = 0 starttime = time() Resource.rebuildHierarchy(database=self.database) self.cursor.execute(''' SELECT name, description, maximum, maximum_calendar_id, location_id, type, cost, maxearly, setup, setupmatrix_id, category, subcategory, owner_id, source FROM %s %s ORDER BY lvl ASC, name ''' % (connections[self.cursor.db.alias].ops.quote_name('resource'), self.filter_where) ) for i in self.cursor.fetchall(): cnt += 1 try: if i[5] == "infinite": x = frepple.resource_infinite( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) elif i[5] == "buckets": x = frepple.resource_buckets( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) if i[3]: x.maximum_calendar = frepple.calendar(name=i[3]) if i[7]: x.maxearly = i[7] elif not i[5] or i[5] == "default": x = frepple.resource_default( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) if i[3]: x.maximum_calendar = frepple.calendar(name=i[3]) if i[7]: x.maxearly = i[7].total_seconds() if i[2]: x.maximum = i[2] else: raise ValueError("Resource type '%s' not recognized" % i[5]) if i[4]: x.location = frepple.location(name=i[4]) if i[6]: x.cost = i[6] if i[8]: x.setup = i[8] if i[9]: x.setupmatrix = frepple.setupmatrix(name=i[9]) if i[12]: x.owner = frepple.resource(name=i[12]) except Exception as e: print("Error:", e) print('Loaded %d resources in %.2f seconds' % (cnt, time() - starttime))
def loadResources(self): print('Importing resources...') cnt = 0 starttime = time() Resource.rebuildHierarchy(database=self.database) self.cursor.execute(''' SELECT name, description, maximum, maximum_calendar_id, location_id, type, cost, maxearly, setup, setupmatrix_id, category, subcategory, owner_id, source FROM %s %s ORDER BY lvl ASC, name ''' % (connections[self.cursor.db.alias].ops.quote_name('resource'), self.filter_where) ) for i in self.cursor.fetchall(): cnt += 1 try: if i[5] == "infinite": x = frepple.resource_infinite( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) elif i[5] == "buckets": x = frepple.resource_buckets( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) if i[3]: x.maximum_calendar = frepple.calendar(name=i[3]) if i[7]: x.maxearly = i[7] elif not i[5] or i[5] == "default": x = frepple.resource_default( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) if i[3]: x.maximum_calendar = frepple.calendar(name=i[3]) if i[7]: x.maxearly = i[7] if i[2]: x.maximum = i[2] else: raise ValueError("Resource type '%s' not recognized" % i[5]) if i[4]: x.location = frepple.location(name=i[4]) if i[6]: x.cost = i[6] if i[8]: x.setup = i[8] if i[9]: x.setupmatrix = frepple.setupmatrix(name=i[9]) if i[12]: x.owner = frepple.resource(name=i[12]) except Exception as e: print("Error:", e) print('Loaded %d resources in %.2f seconds' % (cnt, time() - starttime))
def loadResources(cursor): print('Importing resources...') cnt = 0 starttime = time() Resource.rebuildHierarchy(database=cursor.db.alias) cursor.execute('''SELECT name, description, maximum, maximum_calendar_id, location_id, type, cost, maxearly, setup, setupmatrix_id, category, subcategory, owner_id FROM %s order by lvl asc, name''' % connections[cursor.db.alias].ops.quote_name('resource')) for i, j, t, k, l, m, n, o, p, q, r, s, u in cursor.fetchall(): cnt += 1 try: if m == "infinite": x = frepple.resource_infinite(name=i, description=j, category=r, subcategory=s) elif not m or m == "default": x = frepple.resource_default(name=i, description=j, category=r, subcategory=s) if k: x.maximum_calendar = frepple.calendar(name=k) if o: x.maxearly = o if t: x.maximum = t else: raise ValueError("Resource type '%s' not recognized" % m) if l: x.location = frepple.location(name=l) if n: x.cost = n if p: x.setup = p if q: x.setupmatrix = frepple.setupmatrix(name=q) if u: x.owner = frepple.resource(name=u) except Exception as e: print("Error:", e) print('Loaded %d resources in %.2f seconds' % (cnt, time() - starttime))
def query(reportclass, request, basequery, sortsql='1 asc'): basesql, baseparams = basequery.query.get_compiler( basequery.db).as_sql(with_col_aliases=False) # Get the time units units = OverviewReport.getUnits(request) # Assure the item hierarchy is up to date Resource.rebuildHierarchy(database=basequery.db) # Execute the query cursor = connections[request.database].cursor() query = ''' select res.name, res.description, res.category, res.subcategory, res.type, res.maximum, res.maximum_calendar_id, res.cost, res.maxearly, res.setupmatrix_id, res.setup, location.name, location.description, location.category, location.subcategory, location.available_id, coalesce(max(plan_summary.avg_util),0) as avgutil, res.available_id available_calendar, %s d.bucket as col1, d.startdate as col2, coalesce(sum(out_resourceplan.available),0) * (case when res.type = 'buckets' then 1 else %f end) as available, coalesce(sum(out_resourceplan.unavailable),0) * (case when res.type = 'buckets' then 1 else %f end) as unavailable, coalesce(sum(out_resourceplan.load),0) * (case when res.type = 'buckets' then 1 else %f end) as loading, coalesce(sum(out_resourceplan.setup),0) * (case when res.type = 'buckets' then 1 else %f end) as setup from (%s) res left outer join location on res.location_id = location.name -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Utilization info left join out_resourceplan on res.name = out_resourceplan.resource and d.startdate <= out_resourceplan.startdate and d.enddate > out_resourceplan.startdate and out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' -- Average utilization info left join ( select resource, ( coalesce(sum(out_resourceplan.load),0) + coalesce(sum(out_resourceplan.setup),0) ) * 100.0 / coalesce(greatest(sum(out_resourceplan.available), 0.0001),1) as avg_util from out_resourceplan where out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' group by resource ) plan_summary on res.name = plan_summary.resource -- Grouping and sorting group by res.name, res.description, res.category, res.subcategory, res.type, res.maximum, res.maximum_calendar_id, res.available_id, res.cost, res.maxearly, res.setupmatrix_id, res.setup, location.name, location.description, location.category, location.subcategory, location.available_id, %s d.bucket, d.startdate order by %s, d.startdate ''' % (reportclass.attr_sql, units[0], units[0], units[0], units[0], basesql, request.report_bucket, request.report_startdate, request.report_enddate, request.report_startdate, request.report_enddate, request.report_startdate, request.report_enddate, reportclass.attr_sql, sortsql) cursor.execute(query, baseparams) # Build the python result for row in cursor.fetchall(): numfields = len(row) if row[numfields - 4] != 0: util = row[numfields - 2] * 100 / row[numfields - 4] else: util = 0 result = { 'resource': row[0], 'description': row[1], 'category': row[2], 'subcategory': row[3], 'type': row[4], 'maximum': row[5], 'maximum_calendar': row[6], 'cost': row[7], 'maxearly': row[8], 'setupmatrix': row[9], 'setup': row[10], 'location__name': row[11], 'location__description': row[12], 'location__category': row[13], 'location__subcategory': row[14], 'location__available': row[15], 'avgutil': round(row[16], 2), 'available_calendar': row[17], 'bucket': row[numfields - 6], 'startdate': row[numfields - 5].date(), 'available': round(row[numfields - 4], 1), 'unavailable': round(row[numfields - 3], 1), 'load': round(row[numfields - 2], 1), 'setup': round(row[numfields - 1], 1), 'utilization': round(util, 2) } idx = 17 for f in getAttributeFields(Resource): result[f.field_name] = row[idx] idx += 1 for f in getAttributeFields(Location): result[f.field_name] = row[idx] idx += 1 yield result
def query(request, basequery, sortsql='1 asc'): basesql, baseparams = basequery.query.get_compiler(basequery.db).as_sql(with_col_aliases=True) # Get the time units units = OverviewReport.getUnits(request) # Assure the item hierarchy is up to date Resource.rebuildHierarchy(database=basequery.db) # Execute the query cursor = connections[request.database].cursor() query = ''' select res.name as row1, res.location_id as row2, coalesce(max(plan_summary.avg_util),0) as avgutil, d.bucket as col1, d.startdate as col2, coalesce(sum(out_resourceplan.available),0) * (case when res.type = 'buckets' then 1 else %f end) as available, coalesce(sum(out_resourceplan.unavailable),0) * (case when res.type = 'buckets' then 1 else %f end) as unavailable, coalesce(sum(out_resourceplan.load),0) * (case when res.type = 'buckets' then 1 else %f end) as loading, coalesce(sum(out_resourceplan.setup),0) * (case when res.type = 'buckets' then 1 else %f end) as setup from (%s) res -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Include child resources inner join %s res2 on res2.lft between res.lft and res.rght -- Utilization info left join out_resourceplan on res2.name = out_resourceplan.theresource and d.startdate <= out_resourceplan.startdate and d.enddate > out_resourceplan.startdate and out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' -- Average utilization info left join ( select theresource, ( coalesce(sum(out_resourceplan.load),0) + coalesce(sum(out_resourceplan.setup),0) ) * 100.0 / coalesce(%s,1) as avg_util from out_resourceplan where out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' group by theresource ) plan_summary on res2.name = plan_summary.theresource -- Grouping and sorting group by res.name, res.location_id, res.type, d.bucket, d.startdate order by %s, d.startdate ''' % ( units[0], units[0], units[0], units[0], basesql, request.report_bucket, request.report_startdate, request.report_enddate, connections[basequery.db].ops.quote_name('resource'), request.report_startdate, request.report_enddate, sql_max('sum(out_resourceplan.available)', '0.0001'), request.report_startdate, request.report_enddate, sortsql ) cursor.execute(query, baseparams) # Build the python result for row in cursor.fetchall(): if row[5] != 0: util = row[7] * 100 / row[5] else: util = 0 yield { 'resource': row[0], 'location': row[1], 'avgutil': round(row[2], 2), 'bucket': row[3], 'startdate': python_date(row[4]), 'available': round(row[5], 1), 'unavailable': round(row[6], 1), 'load': round(row[7], 1), 'setup': round(row[8], 1), 'utilization': round(util, 2) }
def run(cls, database=DEFAULT_DB_ALIAS, **kwargs): import frepple if cls.filter: filter_and = "and %s " % cls.filter filter_where = "where %s " % cls.filter else: filter_and = "" filter_where = "" with connections[database].chunked_cursor() as cursor: cnt = 0 starttime = time() Resource.rebuildHierarchy(database=database) cursor.execute(''' SELECT name, description, maximum, maximum_calendar_id, location_id, type, cost, maxearly, setup, setupmatrix_id, category, subcategory, owner_id, source, available_id FROM %s %s ORDER BY lvl ASC, name ''' % (connections[cursor.db.alias].ops.quote_name('resource'), filter_where) ) for i in cursor: cnt += 1 try: if i[5] == "infinite": x = frepple.resource_infinite( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) elif i[5] == "buckets": x = frepple.resource_buckets( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) if i[3]: x.maximum_calendar = frepple.calendar(name=i[3]) if i[7] is not None: x.maxearly = i[7] elif not i[5] or i[5] == "default": x = frepple.resource_default( name=i[0], description=i[1], category=i[10], subcategory=i[11], source=i[13] ) if i[3]: x.maximum_calendar = frepple.calendar(name=i[3]) if i[7] is not None: x.maxearly = i[7].total_seconds() if i[2] is not None: x.maximum = i[2] else: raise ValueError("Resource type '%s' not recognized" % i[5]) if i[4]: x.location = frepple.location(name=i[4]) if i[6]: x.cost = i[6] if i[8]: x.setup = i[8] if i[9]: x.setupmatrix = frepple.setupmatrix(name=i[9]) if i[12]: x.owner = frepple.resource(name=i[12]) if i[14]: x.available = frepple.calendar(name=i[14]) except Exception as e: logger.error("**** %s ****" % e) logger.info('Loaded %d resources in %.2f seconds' % (cnt, time() - starttime))
def query(reportclass, request, basequery, sortsql="1 asc"): basesql, baseparams = basequery.query.get_compiler( basequery.db).as_sql(with_col_aliases=False) # Get the time units units = OverviewReport.getUnits(request) # Assure the item hierarchy is up to date Resource.rebuildHierarchy(database=basequery.db) # Execute the query query = """ select res.name, res.description, res.category, res.subcategory, res.type, res.constrained, res.maximum, res.maximum_calendar_id, res.cost, res.maxearly, res.setupmatrix_id, res.setup, location.name, location.description, location.category, location.subcategory, location.available_id, res.avgutil, res.available_id available_calendar, res.owner_id, %s d.bucket as col1, d.startdate as col2, coalesce(sum(out_resourceplan.available),0) / (case when res.type = 'buckets' then 1 else %f end) as available, coalesce(sum(out_resourceplan.unavailable),0) / (case when res.type = 'buckets' then 1 else %f end) as unavailable, coalesce(sum(out_resourceplan.load),0) / (case when res.type = 'buckets' then 1 else %f end) as loading, coalesce(sum(out_resourceplan.setup),0) / (case when res.type = 'buckets' then 1 else %f end) as setup from (%s) res left outer join location on res.location_id = location.name -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Utilization info left join out_resourceplan on res.name = out_resourceplan.resource and d.startdate <= out_resourceplan.startdate and d.enddate > out_resourceplan.startdate and out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' -- Grouping and sorting group by res.name, res.description, res.category, res.subcategory, res.type, res.maximum, res.maximum_calendar_id, res.available_id, res.cost, res.maxearly, res.setupmatrix_id, res.setup, location.name, location.description, location.category, location.subcategory, location.available_id, res.avgutil, res.owner_id, res.constrained, %s d.bucket, d.startdate order by %s, d.startdate """ % ( reportclass.attr_sql, units[0], units[0], units[0], units[0], basesql, request.report_bucket, request.report_startdate, request.report_enddate, request.report_startdate, request.report_enddate, reportclass.attr_sql, sortsql, ) # Build the python result with connections[request.database].chunked_cursor() as cursor_chunked: cursor_chunked.execute(query, baseparams) for row in cursor_chunked: numfields = len(row) if row[numfields - 4] != 0: util = round(row[numfields - 2] * 100 / row[numfields - 4], 2) else: util = 0 result = { "resource": row[0], "description": row[1], "category": row[2], "subcategory": row[3], "type": row[4], "constrained": row[5], "maximum": row[6], "maximum_calendar": row[7], "cost": row[8], "maxearly": row[9], "setupmatrix": row[10], "setup": row[11], "location__name": row[12], "location__description": row[13], "location__category": row[14], "location__subcategory": row[15], "location__available": row[16], "avgutil": round(row[17], 2), "available_calendar": row[18], "owner": row[19], "bucket": row[numfields - 6], "startdate": row[numfields - 5], "available": row[numfields - 4], "unavailable": row[numfields - 3], "load": row[numfields - 2], "setuptime": row[numfields - 1], "utilization": util, } idx = 20 for f in getAttributeFields(Resource): result[f.field_name] = row[idx] idx += 1 for f in getAttributeFields(Location): result[f.field_name] = row[idx] idx += 1 yield result
def run(cls, database=DEFAULT_DB_ALIAS, **kwargs): import frepple with connections[database].cursor() as cursor: # Update item metrics try: try: window = frepple.settings.current + timedelta(days=int( Parameter.getValue("metrics.demand_window", database, "999"))) except Exception: print("Warning: invalid parameter 'metrics.demand_window'") window = datetime(2030, 12, 31) Item.createRootObject(database=database) cursor.execute( """ create temporary table item_hierarchy (parent character varying(300), child character varying(300)); insert into item_hierarchy select parent.name, item.name from item inner join item parent on item.lft > parent.lft and item.lft < parent.rght; create index on item_hierarchy (child); create temporary table out_problem_tmp as select item.name as item_id, out_problem.name, out_problem.weight, out_problem.weight*coalesce(item.cost,0) as weight_cost from out_problem inner join demand on demand.name = out_problem.owner inner join item on item.name = demand.item_id where out_problem.name in ('unplanned', 'late') and out_problem.startdate < %s; """, (window, ), ) cursor.execute(""" create temporary table metrics as select item.name as item_id, coalesce(sum(case when out_problem_tmp.name = 'late' then 1 end),0) as latedemandcount, coalesce(sum(case when out_problem_tmp.name = 'late' then out_problem_tmp.weight end),0) as latedemandquantity, coalesce(sum(case when out_problem_tmp.name = 'late' then out_problem_tmp.weight_cost end),0) as latedemandvalue, coalesce(sum(case when out_problem_tmp.name = 'unplanned' then 1 end),0) as unplanneddemandcount, coalesce(sum(case when out_problem_tmp.name = 'unplanned' then out_problem_tmp.weight end),0) as unplanneddemandquantity, coalesce(sum(case when out_problem_tmp.name = 'unplanned' then out_problem_tmp.weight_cost end),0) as unplanneddemandvalue from item left outer join out_problem_tmp on out_problem_tmp.item_id = item.name where item.lft = item.rght - 1 group by item.name; create unique index on metrics (item_id); insert into metrics select parent, coalesce(sum(latedemandcount),0), coalesce(sum(latedemandquantity),0), coalesce(sum(latedemandvalue),0), coalesce(sum(unplanneddemandcount),0), coalesce(sum(unplanneddemandquantity),0), coalesce(sum(unplanneddemandvalue),0) from item_hierarchy left outer join metrics on item_hierarchy.child = metrics.item_id group by parent; """) cursor.execute(""" update item set latedemandcount = metrics.latedemandcount, latedemandquantity = metrics.latedemandquantity, latedemandvalue = metrics.latedemandvalue, unplanneddemandcount = metrics.unplanneddemandcount, unplanneddemandquantity = metrics.unplanneddemandquantity, unplanneddemandvalue = metrics.unplanneddemandvalue from metrics where item.name = metrics.item_id and (item.latedemandcount is distinct from metrics.latedemandcount or item.latedemandquantity is distinct from metrics.latedemandquantity or item.latedemandvalue is distinct from metrics.latedemandvalue or item.unplanneddemandcount is distinct from metrics.unplanneddemandcount or item.unplanneddemandquantity is distinct from metrics.unplanneddemandquantity or item.unplanneddemandvalue is distinct from metrics.unplanneddemandvalue); """) cursor.execute(""" drop table item_hierarchy; drop table out_problem_tmp; drop table metrics; """) except Exception as e: print("Error updating item metrics: %s" % e) # Update resource metrics try: Resource.rebuildHierarchy(database) cursor.execute(""" with resource_hierarchy as (select child.name child, parent.name parent from resource child inner join resource parent on child.lft between parent.lft and parent.rght where child.lft = child.rght-1), cte as ( select parent, count(out_problem.id) as overloadcount from resource_hierarchy left outer join out_problem on out_problem.name = 'overload' and out_problem.owner = resource_hierarchy.child group by parent ) update resource set overloadcount = cte.overloadcount from cte where cte.parent = resource.name and resource.overloadcount is distinct from cte.overloadcount; """) except Exception as e: print("Error updating resource metrics: %s" % e)
def query(request, basequery, sortsql='1 asc'): basesql, baseparams = basequery.query.get_compiler( basequery.db).as_sql(with_col_aliases=True) # Get the time units units = OverviewReport.getUnits(request) # Assure the item hierarchy is up to date Resource.rebuildHierarchy(database=basequery.db) # Execute the query cursor = connections[request.database].cursor() query = ''' select res.name as row1, res.location_id as row2, coalesce(max(plan_summary.avg_util),0) as avgutil, d.bucket as col1, d.startdate as col2, coalesce(sum(out_resourceplan.available),0) * (case when res.type = 'buckets' then 1 else %f end) as available, coalesce(sum(out_resourceplan.unavailable),0) * (case when res.type = 'buckets' then 1 else %f end) as unavailable, coalesce(sum(out_resourceplan.load),0) * (case when res.type = 'buckets' then 1 else %f end) as loading, coalesce(sum(out_resourceplan.setup),0) * (case when res.type = 'buckets' then 1 else %f end) as setup from (%s) res -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Include child resources inner join %s res2 on res2.lft between res.lft and res.rght -- Utilization info left join out_resourceplan on res2.name = out_resourceplan.theresource and d.startdate <= out_resourceplan.startdate and d.enddate > out_resourceplan.startdate and out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' -- Average utilization info left join ( select theresource, ( coalesce(sum(out_resourceplan.load),0) + coalesce(sum(out_resourceplan.setup),0) ) * 100.0 / coalesce(%s,1) as avg_util from out_resourceplan where out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' group by theresource ) plan_summary on res2.name = plan_summary.theresource -- Grouping and sorting group by res.name, res.location_id, res.type, d.bucket, d.startdate order by %s, d.startdate ''' % (units[0], units[0], units[0], units[0], basesql, request.report_bucket, request.report_startdate, request.report_enddate, connections[basequery.db].ops.quote_name('resource'), request.report_startdate, request.report_enddate, sql_max('sum(out_resourceplan.available)', '0.0001'), request.report_startdate, request.report_enddate, sortsql) cursor.execute(query, baseparams) # Build the python result for row in cursor.fetchall(): if row[5] != 0: util = row[7] * 100 / row[5] else: util = 0 yield { 'resource': row[0], 'location': row[1], 'avgutil': round(row[2], 2), 'bucket': row[3], 'startdate': python_date(row[4]), 'available': round(row[5], 1), 'unavailable': round(row[6], 1), 'load': round(row[7], 1), 'setup': round(row[8], 1), 'utilization': round(util, 2) }
def query(reportclass, request, basequery, sortsql='1 asc'): basesql, baseparams = basequery.query.get_compiler(basequery.db).as_sql(with_col_aliases=False) # Get the time units units = OverviewReport.getUnits(request) # Assure the item hierarchy is up to date Resource.rebuildHierarchy(database=basequery.db) # Execute the query query = ''' select res.name, res.description, res.category, res.subcategory, res.type, res.maximum, res.maximum_calendar_id, res.cost, res.maxearly, res.setupmatrix_id, res.setup, location.name, location.description, location.category, location.subcategory, location.available_id, res.avgutil, res.available_id available_calendar, res.owner_id, %s d.bucket as col1, d.startdate as col2, coalesce(sum(out_resourceplan.available),0) / (case when res.type = 'buckets' then 1 else %f end) as available, coalesce(sum(out_resourceplan.unavailable),0) / (case when res.type = 'buckets' then 1 else %f end) as unavailable, coalesce(sum(out_resourceplan.load),0) / (case when res.type = 'buckets' then 1 else %f end) as loading, coalesce(sum(out_resourceplan.setup),0) / (case when res.type = 'buckets' then 1 else %f end) as setup from (%s) res left outer join location on res.location_id = location.name -- Multiply with buckets cross join ( select name as bucket, startdate, enddate from common_bucketdetail where bucket_id = '%s' and enddate > '%s' and startdate < '%s' ) d -- Utilization info left join out_resourceplan on res.name = out_resourceplan.resource and d.startdate <= out_resourceplan.startdate and d.enddate > out_resourceplan.startdate and out_resourceplan.startdate >= '%s' and out_resourceplan.startdate < '%s' -- Grouping and sorting group by res.name, res.description, res.category, res.subcategory, res.type, res.maximum, res.maximum_calendar_id, res.available_id, res.cost, res.maxearly, res.setupmatrix_id, res.setup, location.name, location.description, location.category, location.subcategory, location.available_id, res.avgutil, res.owner_id, %s d.bucket, d.startdate order by %s, d.startdate ''' % ( reportclass.attr_sql, units[0], units[0], units[0], units[0], basesql, request.report_bucket, request.report_startdate, request.report_enddate, request.report_startdate, request.report_enddate, reportclass.attr_sql, sortsql ) # Build the python result with connections[request.database].chunked_cursor() as cursor_chunked: cursor_chunked.execute(query, baseparams) for row in cursor_chunked: numfields = len(row) if row[numfields - 4] != 0: util = round(row[numfields - 2] * 100 / row[numfields - 4], 2) else: util = 0 result = { 'resource': row[0], 'description': row[1], 'category': row[2], 'subcategory': row[3], 'type': row[4], 'maximum': row[5], 'maximum_calendar': row[6], 'cost': row[7], 'maxearly': row[8], 'setupmatrix': row[9], 'setup': row[10], 'location__name': row[11], 'location__description': row[12], 'location__category': row[13], 'location__subcategory': row[14], 'location__available': row[15], 'avgutil': round(row[16], 2), 'available_calendar': row[17], 'owner': row[18], 'bucket': row[numfields - 6], 'startdate': row[numfields - 5].date(), 'available': row[numfields - 4], 'unavailable': row[numfields - 3], 'load': row[numfields - 2], 'setup': row[numfields - 1], 'utilization': util } idx = 19 for f in getAttributeFields(Resource): result[f.field_name] = row[idx] idx += 1 for f in getAttributeFields(Location): result[f.field_name] = row[idx] idx += 1 yield result