def exportResourceplans(self): if self.verbosity: logger.info("Exporting resourceplans...") starttime = time() cursor = connections[self.database].cursor() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources with tempfile.TemporaryFile(mode="w+t", encoding='utf-8') as tmp: for i in frepple.resources(): for j in i.plan(buckets): print(("%s\t%s\t%s\t%s\t%s\t%s\t%s" % ( i.name, str(j['start']), round(j['available'], 8), round(j['unavailable'], 8), round(j['setup'], 8), round(j['load'], 8), round(j['free'], 8) )),file=tmp) tmp.seek(0) cursor.copy_from( tmp, 'out_resourceplan', columns=('resource','startdate','available','unavailable','setup','load','free') ) tmp.close() #update owner records with sum of children quantities if self.verbosity: logger.info('Exported resourceplans in %.2f seconds' % (time() - starttime))
def exportResourceplans(cursor): print("Exporting resourceplans...") starttime = time() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = startdate - timedelta(days=30) enddate = enddate + timedelta(days=30) startdate = datetime(startdate.year, startdate.month, startdate.day) if enddate > datetime(2030, 12, 30): # This is the max frePPLe can represent. enddate = datetime(2030, 12, 30) else: enddate = datetime(enddate.year, enddate.month, enddate.day) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources cnt = 0 for i in frepple.resources(): for j in i.plan(buckets): print(j['start'], str(j['start'])) cursor.executemany( "insert into out_resourceplan \ (theresource,startdate,available,unavailable,setup,%s,free) \ values (%%s,%%s,%%s,%%s,%%s,%%s,%%s)" % connections[database].ops.quote_name('load'), [( i.name, str(j['start']), round(j['available'], settings.DECIMAL_PLACES), round(j['unavailable'], settings.DECIMAL_PLACES), round(j['setup'], settings.DECIMAL_PLACES), round(j['load'], settings.DECIMAL_PLACES), round(j['free'], settings.DECIMAL_PLACES) ) for j in i.plan(buckets)] ) cnt += 1 # Finalize cursor.execute("select count(*) from out_resourceplan") print('Exported %d resourceplans in %.2f seconds' % (cursor.fetchone()[0], time() - starttime))
def exportResourceplans(process): print("Exporting resourceplans...") starttime = time() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources process.stdin.write( "COPY out_resourceplan (theresource,startdate,available,unavailable,setup,load,free) FROM STDIN;\n".encode( encoding ) ) for i in frepple.resources(): for j in i.plan(buckets): process.stdin.write( ( "%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % ( i.name, str(j["start"]), round(j["available"], 4), round(j["unavailable"], 4), round(j["setup"], 4), round(j["load"], 4), round(j["free"], 4), ) ).encode(encoding) ) process.stdin.write("\\.\n".encode(encoding)) print("Exported resourceplans in %.2f seconds" % (time() - starttime))
def exportResourceplans(cursor): print("Exporting resourceplans...") starttime = time() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = startdate - timedelta(days=30) startdate = datetime(startdate.year, startdate.month, startdate.day) enddate = enddate + timedelta(days=30) enddate = datetime(enddate.year, enddate.month, enddate.day) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources cnt = 0 try: for i in frepple.resources(): cursor.executemany( "insert into out_resourceplan \ (theresource,startdate,available,unavailable,setup,%s,free) \ values (%%s,%%s,%%s,%%s,%%s,%%s,%%s)" % connections[database].ops.quote_name('load'), [( i.name, str(j['start']), round(j['available'],settings.DECIMAL_PLACES), round(j['unavailable'],settings.DECIMAL_PLACES), round(j['setup'],settings.DECIMAL_PLACES), round(j['load'],settings.DECIMAL_PLACES), round(j['free'],settings.DECIMAL_PLACES) ) for j in i.plan(buckets) ]) cnt += 1 if cnt % 100 == 0: transaction.commit(using=database) except Exception as e: print(e) finally: transaction.commit(using=database) # Finalize transaction.commit(using=database) cursor.execute("select count(*) from out_resourceplan") print('Exported %d resourceplans in %.2f seconds' % (cursor.fetchone()[0], time() - starttime))
def exportResourceplans(): print("Exporting resourceplans...") starttime = time() writer = csv.writer( open("resources.csv", "w", newline="", encoding="utf-8"), quoting=csv.QUOTE_ALL ) writer.writerow( ("#resource", "startdate", "available", "unavailable", "setup", "load", "free") ) # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplan exists at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if not startdate: startdate = frepple.settings.current if not enddate: enddate = frepple.settings.current startdate -= timedelta(weeks=5) enddate += timedelta(weeks=5) startdate = startdate.replace(hour=0, minute=0, second=0) enddate = enddate.replace(hour=0, minute=0, second=0) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources for i in frepple.resources(): for j in i.plan(buckets): writer.writerow( ( i.name, j["start"], j["available"], j["unavailable"], j["setup"], j["load"], j["free"], ) ) print("Exported resourceplans in %.2f seconds" % (time() - starttime))
def exportResourceplans(self, process): if self.verbosity: print("Exporting resourceplans...") starttime = time() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources process.stdin.write( 'COPY out_resourceplan (resource,startdate,available,unavailable,setup,load,free) FROM STDIN;\n' .encode(self.encoding)) for i in frepple.resources(): for j in i.plan(buckets): process.stdin.write( ("%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % (i.name, str(j['start']), round(j['available'], 6), round(j['unavailable'], 6), round(j['setup'], 6), round(j['load'], 6), round(j['free'], 6))).encode( self.encoding)) process.stdin.write('\\.\n'.encode(self.encoding)) if self.verbosity: print('Exported resourceplans in %.2f seconds' % (time() - starttime))
def exportResources(self, cursor): with transaction.atomic(using=self.database, savepoint=False): print("Exporting resources...") starttime = time() cursor.execute("select name from resource") primary_keys = set([ i[0] for i in cursor.fetchall() ]) cursor.executemany( "insert into resource \ (name,description,maximum,maximum_calendar_id,location_id,type,cost, \ maxearly,setup,setupmatrix_id,category,subcategory,source,lastmodified) \ values(%s,%s,%s,%s,%s,%s,%s,%s * interval '1 second',%s,%s,%s,%s,%s,%s)", [ ( i.name, i.description, i.maximum, i.maximum_calendar and i.maximum_calendar.name or None, i.location and i.location.name or None, i.__class__.__name__[9:], round(i.cost, 6), i.maxearly, i.setup, i.setupmatrix and i.setupmatrix.name or None, i.category, i.subcategory, i.source, self.timestamp ) for i in frepple.resources() if i.name not in primary_keys and not i.hidden and (not self.source or self.source == i.source) ]) cursor.executemany( "update resource \ set description=%s, maximum=%s, maximum_calendar_id=%s, location_id=%s, \ type=%s, cost=%s, maxearly=%s * interval '1 second', setup=%s, setupmatrix_id=%s, \ category=%s, subcategory=%s, source=%s, lastmodified=%s \ where name=%s", [ ( i.description, i.maximum, i.maximum_calendar and i.maximum_calendar.name or None, i.location and i.location.name or None, i.__class__.__name__[9:], round(i.cost, 6), round(i.maxearly, 6), i.setup, i.setupmatrix and i.setupmatrix.name or None, i.category, i.subcategory, i.source, self.timestamp, i.name ) for i in frepple.resources() if i.name in primary_keys and not i.hidden and (not self.source or self.source == i.source) ]) cursor.executemany( "update resource set owner_id=%s where name=%s", [ (i.owner.name, i.name) for i in frepple.resources() if i.owner and not i.hidden and (not self.source or self.source == i.source) ]) print('Exported resources in %.2f seconds' % (time() - starttime))
def exportResources(self, cursor): with transaction.atomic(using=self.database, savepoint=False): print("Exporting resources...") starttime = time() cursor.execute("SELECT name FROM %s" % connections[self.database].ops.quote_name('resource')) primary_keys = set([ i[0] for i in cursor.fetchall() ]) cursor.executemany( '''insert into %s (name,description,maximum,maximum_calendar_id,location_id,type,cost, maxearly,setup,setupmatrix_id,category,subcategory,source,lastmodified) values(%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s,%%s)''' % connections[self.database].ops.quote_name('resource'), [ ( i.name, i.description, i.maximum, i.maximum_calendar and i.maximum_calendar.name or None, i.location and i.location.name or None, i.__class__.__name__[9:], round(i.cost, settings.DECIMAL_PLACES), round(i.maxearly, settings.DECIMAL_PLACES), i.setup, i.setupmatrix and i.setupmatrix.name or None, i.category, i.subcategory, i.source, self.timestamp ) for i in frepple.resources() if i.name not in primary_keys and not i.hidden and (not self.source or self.source == i.source) ]) cursor.executemany( '''update %s set description=%%s, maximum=%%s, maximum_calendar_id=%%s, location_id=%%s, type=%%s, cost=%%s, maxearly=%%s, setup=%%s, setupmatrix_id=%%s, category=%%s, subcategory=%%s, source=%%s, lastmodified=%%s where name=%%s''' % connections[self.database].ops.quote_name('resource'), [ ( i.description, i.maximum, i.maximum_calendar and i.maximum_calendar.name or None, i.location and i.location.name or None, i.__class__.__name__[9:], round(i.cost, settings.DECIMAL_PLACES), round(i.maxearly, settings.DECIMAL_PLACES), i.setup, i.setupmatrix and i.setupmatrix.name or None, i.category, i.subcategory, i.source, self.timestamp, i.name ) for i in frepple.resources() if i.name in primary_keys and not i.hidden and (not self.source or self.source == i.source) ]) cursor.executemany( "update %s set owner_id=%%s where name=%%s" % connections[self.database].ops.quote_name('resource'), [ (i.owner.name, i.name) for i in frepple.resources() if i.owner and not i.hidden and (not self.source or self.source == i.source) ]) print('Exported resources in %.2f seconds' % (time() - starttime))
def exportOperationPlanResources(self): if self.verbosity: logger.info("Exporting operationplan resources...") starttime = time() cursor = connections[self.database].cursor() currentTime = self.timestamp with tempfile.TemporaryFile(mode="w+t", encoding='utf-8') as tmp: for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.quantity >= 0: continue if not j.operationplan.id: print( "Warning: skip exporting uninitialized operationplan: ", j.operationplan.operation.name, j.operationplan.quantity, j.operationplan.start, j.operationplan.end ) else: print(("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % ( j.operationplan.id, j.resource.name, round(-j.quantity, 8), str(j.startdate), str(j.enddate), j.setup and j.setup or "\\N", j.status, currentTime )), file=tmp) tmp.seek(0) cursor.copy_from( tmp, 'operationplanresource', columns=('operationplan_id', 'resource_id', 'quantity', 'startdate', 'enddate', 'setup', 'status', 'lastmodified') ) tmp.close() if self.verbosity: logger.info('Exported operationplan resources in %.2f seconds' % (time() - starttime))
def getData(): for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.quantity >= 0: continue if not j.operationplan.reference: print( "Warning: skip exporting uninitialized operationplan: ", j.operationplan.operation.name, j.operationplan.quantity, j.operationplan.start, j.operationplan.end, ) else: yield "%s\v%s\v%s\v%s\v%s\v%s\v%s\v%s\n" % ( clean_value(j.operationplan.reference), clean_value(j.resource.name), round(-j.quantity, 8), str(j.startdate), str(j.enddate), clean_value(j.setup), j.status, self.timestamp, )
def exportLoadplans(process): print("Exporting loadplans...") starttime = time() process.stdin.write( "COPY out_loadplan (operationplan_id, theresource, quantity, startdate, enddate, setup) FROM STDIN;\n".encode( encoding ) ) for i in frepple.resources(): for j in i.loadplans: if j.quantity < 0: process.stdin.write( ( "%s\t%s\t%s\t%s\t%s\t%s\n" % ( j.operationplan.id, j.resource.name, round(-j.quantity, 4), str(j.startdate), str(j.enddate), j.setup and j.setup or "\\N", ) ).encode(encoding) ) process.stdin.write("\\.\n".encode(encoding)) print("Exported loadplans in %.2f seconds" % (time() - starttime))
def getData(timestamp, cluster=-1): import frepple for i in frepple.resources(): if cluster != -1 and cluster != i.cluster: continue for j in i.loadplans: if j.quantity >= 0: continue elif not j.operationplan.reference: logger.warn( "Warning: skip exporting uninitialized operationplan: %s %s %s %s" % ( j.operationplan.operation.name, j.operationplan.quantity, j.operationplan.start, j.operationplan.end, )) else: yield "%s\v%s\v%s\v%s\v%s\v%s\v%s\v%s\n" % ( clean_value(j.operationplan.reference), clean_value(j.resource.name), round(-j.quantity, 8), str(j.startdate), str(j.enddate), clean_value(j.setup), j.status, timestamp, )
def exportOperationPlanResources(self): if self.verbosity: logger.info("Exporting operationplan resources...") starttime = time() cursor = connections[self.database].cursor() currentTime = self.timestamp with tempfile.TemporaryFile(mode="w+t", encoding='utf-8') as tmp: for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.quantity >= 0: continue if not j.operationplan.reference: print( "Warning: skip exporting uninitialized operationplan: ", j.operationplan.operation.name, j.operationplan.quantity, j.operationplan.start, j.operationplan.end ) else: print(("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s" % ( j.operationplan.reference, j.resource.name, round(-j.quantity, 8), str(j.startdate), str(j.enddate), j.setup and j.setup or "\\N", j.status, currentTime )), file=tmp) tmp.seek(0) cursor.copy_from( tmp, 'operationplanresource', columns=('operationplan_id', 'resource_id', 'quantity', 'startdate', 'enddate', 'setup', 'status', 'lastmodified') ) tmp.close() if self.verbosity: logger.info('Exported operationplan resources in %.2f seconds' % (time() - starttime))
def exportResourceplans(): print("Exporting resourceplans...") starttime = time() writer = csv.writer(open("resources.csv", "w", newline="", encoding="utf-8"), quoting=csv.QUOTE_ALL) writer.writerow(( '#resource', 'startdate', 'available', 'unavailable', 'setup', 'load', 'free' )) # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplan exists at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if not startdate: startdate = frepple.settings.current if not enddate: enddate = frepple.settings.current startdate -= timedelta(weeks=5) enddate += timedelta(weeks=5) startdate = startdate.replace(hour=0, minute=0, second=0) enddate = enddate.replace(hour=0, minute=0, second=0) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources for i in frepple.resources(): for j in i.plan(buckets): writer.writerow(( i.name, j['start'], j['available'], j['unavailable'], j['setup'], j['load'], j['free'] )) print('Exported resourceplans in %.2f seconds' % (time() - starttime))
def exportData(filename): with open(filename, "wt") as output: print("param timerate := 0.97;\n", file=output) print("param : demands : reqqty prio due :=", file=output) for b in frepple.demands(): if b.quantity > 0: # @todo Export of due date works for monthly buckets and a maximum horizon of 1 year only print(b.name.replace(' ', '').replace(':', ''), b.quantity, b.priority, b.due.month, file=output) print(";\n", file=output) print("param numbuckets := 12;", file=output) print("set buckets := 1 2 3 4 5 6 7 8 9 10 11 12;", file=output) print("set resources := ", file=output) for b in frepple.resources(): print(b.name.replace(' ', '').replace(':', ''), file=output) print(";\n", file=output) print("param availablecapacity", file=output) print(": 1 2 3 4 5 6 7 8 9 10 11 12 := ", file=output) res = [] for b in frepple.resources(): # @todo need a more correct way to extract the capacity per bucket. res.append(b.name.replace(' ', '').replace(':', '')) print(b.name.replace(' ', '').replace(':', ''), "120 120 120 120 120 120 120 120 120 120 120 120", file=output) print(";\n", file=output) print("param : loads : loadfactor :=", file=output) for b in frepple.demands(): if b.quantity > 0: oper = b.operation or b.item.operation if oper: for fl in oper.flows: if fl.quantity < 0: findResources(output, b, fl) print(";\n", file=output) print("end;\n", file=output)
def exportResourceplans(process): print("Exporting resourceplans...") starttime = time() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources process.stdin.write('COPY out_resourceplan (theresource,startdate,available,unavailable,setup,load,free) FROM STDIN;\n') for i in frepple.resources(): for j in i.plan(buckets): process.stdin.write("%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % ( i.name.encode(encoding), str(j['start']), round(j['available'],settings.DECIMAL_PLACES), round(j['unavailable'],settings.DECIMAL_PLACES), round(j['setup'],settings.DECIMAL_PLACES), round(j['load'],settings.DECIMAL_PLACES), round(j['free'],settings.DECIMAL_PLACES) )) process.stdin.write('\\.\n') print('Exported resourceplans in %.2f seconds' % (time() - starttime))
def getData(): # Loop over all reporting buckets of all resources for i in frepple.resources(): for j in i.plan(buckets): yield "%s\v%s\v%s\v%s\v%s\v%s\v%s\n" % ( clean_value(i.name), str(j["start"]), round(j["available"], 8), round(j["unavailable"], 8), round(j["setup"], 8), round(j["load"], 8), round(j["free"], 8), )
def exportLoadplans(): print("Exporting loadplans...") starttime = time() writer = csv.writer(open("loadplans.csv", "wb"), quoting=csv.QUOTE_ALL) writer.writerow(('#operationplan id','resource','quantity','start date','end date','setup')) for i in frepple.resources(): for j in i.loadplans: if j.quantity > 0: writer.writerow(( j.operationplan.id, j.resource.name.encode(encoding,"ignore"), j.quantity, j.startdate, j.enddate, j.setup and j.setup.encode(encoding,"ignore") or None )) print('Exported loadplans in %.2f seconds' % (time() - starttime))
def exportLoadplans(process): print("Exporting loadplans...") starttime = time() process.stdin.write('COPY out_loadplan (operationplan_id, theresource, quantity, startdate, enddate, setup) FROM STDIN;\n') for i in frepple.resources(): for j in i.loadplans: if j.quantity > 0: process.stdin.write("%s\t%s\t%s\t%s\t%s\t%s\n" % ( j.operationplan.id, j.resource.name.encode(encoding), round(j.quantity,settings.DECIMAL_PLACES), str(j.startdate), str(j.enddate), j.setup and j.setup.encode(encoding) or "\\N" )) process.stdin.write('\\.\n') print('Exported loadplans in %.2f seconds' % (time() - starttime))
def truncate(self, process): if self.verbosity: print("Emptying database plan tables...") starttime = time() if self.cluster == -1: # Complete export for the complete model process.stdin.write("truncate table out_demandpegging;\n".encode(self.encoding)) process.stdin.write("truncate table out_problem, out_resourceplan, out_constraint;\n".encode(self.encoding)) process.stdin.write("truncate table out_loadplan, out_flowplan, out_operationplan;\n".encode(self.encoding)) process.stdin.write("truncate table out_demand;\n".encode(self.encoding)) process.stdin.write("delete from purchase_order where status='proposed' or status is null;\n".encode(self.encoding)) process.stdin.write("delete from distribution_order where status='proposed' or status is null;\n".encode(self.encoding)) process.stdin.write("delete from operationplan where status='proposed' or status is null;\n".encode(self.encoding)) else: # Partial export for a single cluster process.stdin.write('create temporary table cluster_keys (name character varying(300), constraint cluster_key_pkey primary key (name));\n'.encode(self.encoding)) for i in frepple.items(): if i.cluster == self.cluster: process.stdin.write(("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding)).encode(self.encoding)) process.stdin.write('''delete from out_demandpegging where demand in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id );\n'''.encode(self.encoding)) process.stdin.write("delete from out_demand where item in (select name from cluster_keys);\n".encode(self.encoding)) process.stdin.write("delete from out_constraint where demand in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id);\n".encode(self.encoding)) process.stdin.write("delete from out_flowplan where thebuffer in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id);\n".encode(self.encoding)) process.stdin.write('''delete from out_problem where entity = 'demand' and owner in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id );\n'''.encode(self.encoding)) process.stdin.write("delete from out_problem where entity = 'material' and owner in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id);\n".encode(self.encoding)) process.stdin.write("delete from purchase_order using cluster_keys where (status='proposed' or status is null) and purchase_order.item_id = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write("delete from distribution_order using cluster_keys where (status='proposed' or status is null) and distribution_order.item_id = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write("truncate table cluster_keys;\n".encode(self.encoding)) for i in frepple.resources(): if i.cluster == self.cluster: process.stdin.write(("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding)).encode(self.encoding)) process.stdin.write("delete from out_problem where entity = 'demand' and owner in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id);\n".encode(self.encoding)) process.stdin.write('delete from out_loadplan using cluster_keys where theresource = cluster_keys.name;\n'.encode(self.encoding)) process.stdin.write('delete from out_resourceplan using cluster_keys where theresource = cluster_keys.name;\n'.encode(self.encoding)) process.stdin.write("delete from out_problem using cluster_keys where entity = 'capacity' and owner = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write('truncate table cluster_keys;\n'.encode(self.encoding)) for i in frepple.operations(): if i.cluster == self.cluster: process.stdin.write(("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding)).encode(self.encoding)) process.stdin.write("delete from out_problem using cluster_keys where entity = 'operation' and owner = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write("delete from out_operationplan using cluster_keys where operation = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write("delete from operationplan using cluster_keys where (status='proposed' or status is null) and operationplan.operation_id = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write("drop table cluster_keys;\n".encode(self.encoding)) if self.verbosity: print("Emptied plan tables in %.2f seconds" % (time() - starttime))
def exportData(filename): with open(filename,"wt") as output: print("param timerate := 0.97;\n", file=output) print("param : demands : reqqty prio due :=", file=output) for b in frepple.demands(): if b.quantity > 0: # @todo Export of due date works for monthly buckets and a maximum horizon of 1 year only print(b.name.replace(' ','').replace(':',''), b.quantity, b.priority, b.due.month, file=output) print(";\n", file=output) print("param numbuckets := 12;", file=output) print("set buckets := 1 2 3 4 5 6 7 8 9 10 11 12;", file=output) print("set resources := ", file=output) for b in frepple.resources(): print(b.name.replace(' ','').replace(':',''), file=output) print(";\n", file=output) print("param availablecapacity", file=output) print(": 1 2 3 4 5 6 7 8 9 10 11 12 := ", file=output) res = [] for b in frepple.resources(): # @todo need a more correct way to extract the capacity per bucket. res.append(b.name.replace(' ','').replace(':','')) print(b.name.replace(' ','').replace(':',''), "120 120 120 120 120 120 120 120 120 120 120 120", file=output) print(";\n", file=output) print("param : loads : loadfactor :=", file=output) for b in frepple.demands(): if b.quantity > 0: oper = b.operation or b.item.operation if oper: for fl in oper.flows: if fl.quantity < 0: findResources(output, b, fl) print(";\n", file=output) print("end;\n", file=output)
def exportLoadplans(): print("Exporting loadplans...") starttime = time() writer = csv.writer(open("loadplans.csv", "w", newline="", encoding="utf-8"), quoting=csv.QUOTE_ALL) writer.writerow(( '#operationplan', 'resource', 'quantity', 'start date', 'end date', 'setup' )) for i in frepple.resources(): for j in i.loadplans: if j.quantity < 0: writer.writerow(( j.operationplan.reference, j.resource.name, -j.quantity, j.startdate, j.enddate, j.setup and j.setup or None )) print('Exported loadplans in %.2f seconds' % (time() - starttime))
def exportLoadplans(): print("Exporting loadplans...") starttime = time() writer = csv.writer(open("loadplans.csv", "w", newline="", encoding="utf-8"), quoting=csv.QUOTE_ALL) writer.writerow(( '#operationplan id', 'resource', 'quantity', 'start date', 'end date', 'setup' )) for i in frepple.resources(): for j in i.loadplans: if j.quantity < 0: writer.writerow(( j.operationplan.id, j.resource.name, -j.quantity, j.startdate, j.enddate, j.setup and j.setup or None )) print('Exported loadplans in %.2f seconds' % (time() - starttime))
def exportLoadplans(process): print("Exporting loadplans...") starttime = time() process.stdin.write( 'COPY out_loadplan (operationplan_id, theresource, quantity, startdate, enddate, setup) FROM STDIN;\n' .encode(encoding)) for i in frepple.resources(): for j in i.loadplans: if j.quantity < 0: process.stdin.write( ("%s\t%s\t%s\t%s\t%s\t%s\n" % (j.operationplan.id, j.resource.name, round( -j.quantity, 4), str(j.startdate), str(j.enddate), j.setup and j.setup or "\\N")).encode(encoding)) process.stdin.write('\\.\n'.encode(encoding)) print('Exported loadplans in %.2f seconds' % (time() - starttime))
def exportLoadplans(cursor): print("Exporting loadplans...") starttime = time() cnt = 0 for i in frepple.resources(): cursor.executemany( "insert into out_loadplan \ (operationplan_id, theresource, quantity, startdate, enddate, setup) \ values (%s,%s,%s,%s,%s,%s)", [( j.operationplan.id, j.resource.name, round(-j.quantity, settings.DECIMAL_PLACES), str(j.startdate), str(j.enddate), j.setup ) for j in i.loadplans if j.quantity < 0] ) cnt += 1 cursor.execute("select count(*) from out_loadplan") print('Exported %d loadplans in %.2f seconds' % (cursor.fetchone()[0], time() - starttime))
def exportLoadplans(self, process): if self.verbosity: print("Exporting loadplans...") starttime = time() process.stdin.write('COPY out_loadplan (operationplan_id, theresource, quantity, startdate, enddate, setup) FROM STDIN;\n'.encode(self.encoding)) for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.quantity < 0: process.stdin.write(("%s\t%s\t%s\t%s\t%s\t%s\n" % ( j.operationplan.id, j.resource.name, round(-j.quantity, 4), str(j.startdate), str(j.enddate), j.setup and j.setup or "\\N" )).encode(self.encoding)) process.stdin.write('\\.\n'.encode(self.encoding)) if self.verbosity: print('Exported loadplans in %.2f seconds' % (time() - starttime))
def exportLoadplans(): print("Exporting loadplans...") starttime = time() writer = csv.writer(open("loadplans.csv", "w", newline="", encoding="utf-8"), quoting=csv.QUOTE_ALL) writer.writerow(("#operationplan", "resource", "quantity", "start date", "end date", "setup")) for i in frepple.resources(): for j in i.loadplans: if j.quantity < 0: writer.writerow(( j.operationplan.reference, j.resource.name, -j.quantity, j.startdate, j.enddate, j.setup and j.setup or None, )) print("Exported loadplans in %.2f seconds" % (time() - starttime))
def exportOperationPlanResources(self, process): if self.verbosity: print("Exporting operationplan resources...") starttime = time() process.stdin.write(( 'COPY operationplanresource ' '(operationplan_id, resource, quantity, startdate, enddate, setup) ' 'FROM STDIN;\n').encode(self.encoding)) for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.quantity < 0: process.stdin.write(( "%s\t%s\t%s\t%s\t%s\t%s\n" % (j.operationplan.id, j.resource.name, round(-j.quantity, 4), str(j.startdate), str( j.enddate), j.setup and j.setup or "\\N")).encode( self.encoding)) process.stdin.write('\\.\n'.encode(self.encoding)) if self.verbosity: print('Exported operationplan resources in %.2f seconds' % (time() - starttime))
def exportOperationPlanResources(self, process): if self.verbosity: print("Exporting operationplan resources...") starttime = time() process(( 'COPY operationplanresource ' '(operationplan_id, resource, quantity, startdate, enddate, setup, status, lastmodified) ' 'FROM STDIN;\n')) currentTime = self.timestamp for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.quantity < 0: process( ("%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n" % (j.operationplan.id, j.resource.name, round(-j.quantity, 6), str( j.startdate), str(j.enddate), j.setup and j.setup or "\\N", j.status, currentTime))) process('\\.\n') if self.verbosity: print('Exported operationplan resources in %.2f seconds' % (time() - starttime))
def truncate(self, process): if self.verbosity: print("Emptying database plan tables...") starttime = time() if self.cluster == -1: # Complete export for the complete model process.stdin.write("truncate table out_problem, out_resourceplan, out_constraint;\n".encode(self.encoding)) process.stdin.write("truncate table operationplanmaterial, operationplanresource;\n".encode(self.encoding)) # Above line is a temporary solution until we have a correct version of this block of code # process.stdin.write(''' # delete from operationplanmaterial # using operationplan # where operationplanmaterial.operationplan_id = operationplan.id # and ((operationplan.status='proposed' or operationplan.status is null) or operationplan.type = 'STCK');\n # '''.encode(self.encoding)) # process.stdin.write(''' # delete from operationplanresource # using operationplan # where operationplanresource.operationplan_id = operationplan.id # and ((operationplan.status='proposed' or operationplan.status is null) or operationplan.type = 'STCK');\n # '''.encode(self.encoding)) process.stdin.write(''' delete from operationplan where (status='proposed' or status is null) or type = 'STCK';\n '''.encode(self.encoding)) else: # Partial export for a single cluster process.stdin.write('create temporary table cluster_keys (name character varying(300), constraint cluster_key_pkey primary key (name));\n'.encode(self.encoding)) for i in frepple.items(): if i.cluster == self.cluster: process.stdin.write(("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding)).encode(self.encoding)) process.stdin.write("delete from out_constraint where demand in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id);\n".encode(self.encoding)) process.stdin.write(''' delete from operationplanmaterial where buffer in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id);\n '''.encode(self.encoding)) process.stdin.write(''' delete from out_problem where entity = 'demand' and owner in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id );\n '''.encode(self.encoding)) process.stdin.write(''' delete from out_problem where entity = 'material' and owner in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id);\n '''.encode(self.encoding)) process.stdin.write(''' delete from operationplan using cluster_keys where (status='proposed' or status is null or type='STCK') and item_id = cluster_keys.name;\n '''.encode(self.encoding)) process.stdin.write("truncate table cluster_keys;\n".encode(self.encoding)) for i in frepple.resources(): if i.cluster == self.cluster: process.stdin.write(("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding)).encode(self.encoding)) process.stdin.write("delete from out_problem where entity = 'demand' and owner in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id);\n".encode(self.encoding)) process.stdin.write('delete from operationplanresource using cluster_keys where resource = cluster_keys.name;\n'.encode(self.encoding)) process.stdin.write('delete from out_resourceplan using cluster_keys where resource = cluster_keys.name;\n'.encode(self.encoding)) process.stdin.write("delete from out_problem using cluster_keys where entity = 'capacity' and owner = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write('truncate table cluster_keys;\n'.encode(self.encoding)) for i in frepple.operations(): if i.cluster == self.cluster: process.stdin.write(("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding)).encode(self.encoding)) process.stdin.write("delete from out_problem using cluster_keys where entity = 'operation' and owner = cluster_keys.name;\n".encode(self.encoding)) process.stdin.write("delete from operationplan using cluster_keys where (status='proposed' or status is null) and operationplan.operation_id = cluster_keys.name;\n".encode(self.encoding)) # TODO not correct in new data model process.stdin.write("drop table cluster_keys;\n".encode(self.encoding)) if self.verbosity: print("Emptied plan tables in %.2f seconds" % (time() - starttime))
def printModel(filename): ''' A function that prints out all models to a file. ''' # Open the output file output = open(filename,"wt") # Global settings print("Echoing global settings", file=output) print("Plan name:", frepple.settings.name, file=output) print("Plan description:", frepple.settings.description.encode('utf-8'), file=output) print("Plan current:", frepple.settings.current, file=output) # Solvers print("\nEchoing solvers:", file=output) for b in frepple.solvers(): print(" Solver:", b.name, b.loglevel, getattr(b,'constraints',None), file=output) # Calendars print("\nEchoing calendars:", file=output) for b in frepple.calendars(): print(" Calendar:", b.name, getattr(b,'default',None), file=output) for j in b.buckets: print(" Bucket:", getattr(j,'value',None), j.start, j.end, j.priority, file=output) # Customers print("\nEchoing customers:", file=output) for b in frepple.customers(): print(" Customer:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) # Locations print("\nEchoing locations:", file=output) for b in frepple.locations(): print(" Location:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) # Items print("\nEchoing items:", file=output) for b in frepple.items(): print(" Item:", b.name, b.description, b.category, b.subcategory, b.owner, b.operation, file=output) # Resources print("\nEchoing resources:", file=output) for b in frepple.resources(): print(" Resource:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) for l in b.loads: print(" Load:", l.operation.name, l.quantity, l.effective_start, l.effective_end, file=output) for l in b.loadplans: print(" Loadplan:", l.operationplan.id, l.operationplan.operation.name, l.quantity, l.startdate, l.enddate, file=output) # Buffers print("\nEchoing buffers:", file=output) for b in frepple.buffers(): print(" Buffer:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) for l in b.flows: print(" Flow:", l.operation.name, l.quantity, l.effective_start, l.effective_end, file=output) for l in b.flowplans: print(" Flowplan:", l.operationplan.id, l.operationplan.operation.name, l.quantity, l.date, file=output) # Operations print("\nEchoing operations:", file=output) for b in frepple.operations(): print(" Operation:", b.name, b.description, b.category, b.subcategory, file=output) for l in b.loads: print(" Load:", l.resource.name, l.quantity, l.effective_start, l.effective_end, file=output) for l in b.flows: print(" Flow:", l.buffer.name, l.quantity, l.effective_start, l.effective_end, file=output) if isinstance(b, frepple.operation_alternate): for l in b.alternates: print(" Alternate:", l.name, file=output) if isinstance(b, frepple.operation_routing): for l in b.steps: print(" Step:", l.name, file=output) # Demands print("\nEchoing demands:", file=output) for b in frepple.demands(): print(" Demand:", b.name, b.due, b.item.name, b.quantity, file=output) for i in b.operationplans: print(" Operationplan:", i.id, i.operation.name, i.quantity, i.end, file=output) # Operationplans print("\nEchoing operationplans:", file=output) for b in frepple.operationplans(): print(" Operationplan:", b.operation.name, b.quantity, b.start, b.end, file=output) for s in b.operationplans: print(" ", s.operation.name, s.quantity, s.start, s.end, file=output) # Problems print("\nPrinting problems", file=output) for i in frepple.problems(): print(" Problem:", i.entity, i.name, i.description, i.start, i.end, i.weight, file=output)
def truncate(self): cursor = connections[self.database].cursor() if self.verbosity: logger.info("Emptying database plan tables...") starttime = time() if self.cluster == -1: # Complete export for the complete model cursor.execute("truncate table out_problem, out_resourceplan, out_constraint") cursor.execute(''' update operationplan set owner_id = null where owner_id is not null and exists ( select 1 from operationplan op2 where op2.reference = operationplan.owner_id and (op2.status is null or op2.status = 'proposed') ) ''') cursor.execute(''' truncate operationplanmaterial, operationplanresource ''') cursor.execute(''' delete from operationplan where (status='proposed' or status is null) or type = 'STCK' ''') else: # Partial export for a single cluster cursor.execute('create temporary table cluster_keys (name character varying(300), constraint cluster_key_pkey primary key (name))') for i in frepple.items(): if i.cluster == self.cluster: cursor.execute(("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding))) cursor.execute("delete from out_constraint where demand in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id)") cursor.execute(''' delete from operationplanmaterial using cluster_keys where operationplan_id in ( select reference from operationplan inner join cluster_keys on cluster_keys.name = operationplan.item_id union select reference from operationplan where owner_id in ( select reference from operationplan parent_opplan inner join cluster_keys on cluster_keys.name = parent_opplan.item_id ) ) ''') cursor.execute(''' delete from out_problem where entity = 'demand' and owner in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id ) ''') cursor.execute(''' delete from out_problem where entity = 'material' and owner in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select reference from operationplan inner join cluster_keys on cluster_keys.name = operationplan.item_id where status = 'proposed' or status is null or type='STCK' union select reference from operationplan where owner_id in ( select reference from operationplan parent_opplan inner join cluster_keys on cluster_keys.name = parent_opplan.item_id ) and (status = 'proposed' or status is null) ) ''') cursor.execute(''' delete from operationplan using cluster_keys where owner_id in ( select oplan_parent.reference from operationplan as oplan_parent where (oplan_parent.status='proposed' or oplan_parent.status is null or oplan_parent.type='STCK') and oplan_parent.item_id = cluster_keys.name ) ''') cursor.execute(''' delete from operationplan using cluster_keys where (status='proposed' or status is null or type='STCK') and item_id = cluster_keys.name ''') # TODO next subqueries are not efficient - the exists condition triggers a sequential scan cursor.execute("delete from out_constraint where exists (select 1 from forecast inner join cluster_keys on cluster_keys.name = forecast.item_id and out_constraint.demand like forecast.name || ' - %')") cursor.execute("delete from out_problem where entity = 'demand' and exists (select 1 from forecast inner join cluster_keys on cluster_keys.name = forecast.item_id and out_problem.owner like forecast.name || ' - %')") cursor.execute("truncate table cluster_keys") for i in frepple.resources(): if i.cluster == self.cluster: cursor.execute(("insert into cluster_keys (name) values (%s)" % adapt(i.name).getquoted().decode(self.encoding))) cursor.execute("delete from out_problem where entity = 'demand' and owner in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id)") cursor.execute('delete from operationplanresource using cluster_keys where resource_id = cluster_keys.name') cursor.execute('delete from out_resourceplan using cluster_keys where resource = cluster_keys.name') cursor.execute("delete from out_problem using cluster_keys where entity = 'capacity' and owner = cluster_keys.name") cursor.execute('truncate table cluster_keys') for i in frepple.operations(): if i.cluster == self.cluster: cursor.execute(("insert into cluster_keys (name) values (%s)" % adapt(i.name).getquoted().decode(self.encoding))) cursor.execute("delete from out_problem using cluster_keys where entity = 'operation' and owner = cluster_keys.name") cursor.execute("delete from operationplan using cluster_keys where (status='proposed' or status is null) and operationplan.name = cluster_keys.name") cursor.execute("drop table cluster_keys") if self.verbosity: logger.info("Emptied plan tables in %.2f seconds" % (time() - starttime))
def exportResourceplans(self): if self.verbosity: logger.info("Exporting resourceplans...") starttime = time() cursor = connections[self.database].cursor() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) # Build a list of horizon buckets cursor.execute(''' select startdate from common_bucketdetail where startdate between %s and %s and bucket_id = (select name from common_bucket order by level desc limit 1) ''', (startdate, enddate)) buckets = [ rec[0] for rec in cursor.fetchall() ] # Loop over all reporting buckets of all resources with tempfile.TemporaryFile(mode="w+t", encoding='utf-8') as tmp: for i in frepple.resources(): for j in i.plan(buckets): print(("%s\t%s\t%s\t%s\t%s\t%s\t%s" % ( i.name, str(j['start']), round(j['available'], 8), round(j['unavailable'], 8), round(j['setup'], 8), round(j['load'], 8), round(j['free'], 8) )),file=tmp) tmp.seek(0) cursor.copy_from( tmp, 'out_resourceplan', columns=('resource','startdate','available','unavailable','setup','load','free') ) tmp.close() #update owner records with sum of children quantities if self.verbosity: logger.info('Exported resourceplans in %.2f seconds' % (time() - starttime))
def exportResourceplans(self): if self.verbosity: print("Exporting resourceplans...") starttime = time() cursor = connections[self.database].cursor() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) # Build a list of horizon buckets buckets = [] while startdate < enddate: buckets.append(startdate) startdate += timedelta(days=1) # Loop over all reporting buckets of all resources with tempfile.TemporaryFile(mode="w+t", encoding='utf-8') as tmp: for i in frepple.resources(): for j in i.plan(buckets): print(("%s\t%s\t%s\t%s\t%s\t%s\t%s" % (i.name, str(j['start']), round(j['available'], 6), round(j['unavailable'], 6), round(j['setup'], 6), round(j['load'], 6), round(j['free'], 6))), file=tmp) tmp.seek(0) cursor.copy_from(tmp, 'out_resourceplan', columns=('resource', 'startdate', 'available', 'unavailable', 'setup', 'load', 'free')) tmp.close() #update owner records with sum of children quantities cursor.execute(''' with cte as ( select parent.name resource, out_resourceplan.startdate, sum(out_resourceplan.available) available, sum(out_resourceplan.unavailable) unavailable, sum(out_resourceplan.setup) setup, sum(out_resourceplan.load) "load", sum(out_resourceplan.free) free from resource parent inner join resource child on child.lft > parent.lft and child.rght < parent.rght and not exists (select 1 from resource where owner_id = child.name) inner join out_resourceplan on out_resourceplan.resource = child.name group by parent.name, startdate) update out_resourceplan set available = cte.available, unavailable = cte.unavailable, setup = cte.setup, load = cte.load, free = cte.free from cte where out_resourceplan.resource = cte.resource and out_resourceplan.startdate = cte.startdate ''') if self.verbosity: print('Exported resourceplans in %.2f seconds' % (time() - starttime))
def exportResourceplans(self): # Build a list of horizon buckets if self.verbosity: logger.info("Exporting resourceplans...") starttime = time() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. cursor = connections[self.database].cursor() startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) cursor.execute( """ select startdate from common_bucketdetail where startdate between %s and %s and bucket_id = (select name from common_bucket order by level desc limit 1) """, (startdate, enddate), ) buckets = [rec[0] for rec in cursor.fetchall()] def getData(): # Loop over all reporting buckets of all resources for i in frepple.resources(): for j in i.plan(buckets): yield "%s\v%s\v%s\v%s\v%s\v%s\v%s\n" % ( clean_value(i.name), str(j["start"]), round(j["available"], 8), round(j["unavailable"], 8), round(j["setup"], 8), round(j["load"], 8), round(j["free"], 8), ) cursor.copy_from( CopyFromGenerator(getData()), "out_resourceplan", columns=( "resource", "startdate", "available", "unavailable", "setup", "load", "free", ), size=1024, sep="\v", ) if self.verbosity: logger.info("Exported resourceplans in %.2f seconds" % (time() - starttime))
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): import frepple # Set the timestamp for the export tasks in this thread cls.parent.timestamp = datetime.now() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. cursor = connections[database].cursor() startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if cluster != -1 and cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) cursor.execute( """ select startdate from common_bucketdetail where startdate between %s and %s and bucket_id = (select name from common_bucket order by level desc limit 1) order by startdate """, (startdate, enddate), ) buckets = [rec[0] for rec in cursor.fetchall()] def getData(): # Loop over all reporting buckets of all resources for i in frepple.resources(): for j in i.plan(buckets): yield "%s\v%s\v%s\v%s\v%s\v%s\v%s\n" % ( clean_value(i.name), str(j["start"]), round(j["available"], 8), round(j["unavailable"], 8), round(j["setup"], 8), round(j["load"], 8), round(j["free"], 8), ) cursor.copy_from( CopyFromGenerator(getData()), "out_resourceplan", columns=( "resource", "startdate", "available", "unavailable", "setup", "load", "free", ), size=1024, sep="\v", )
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): import frepple cursor = connections[database].cursor() if cluster == -1: # Complete export for the complete model cursor.execute( "truncate table out_problem, out_resourceplan, out_constraint") cursor.execute(""" update operationplan set owner_id = null where owner_id is not null and exists ( select 1 from operationplan op2 where op2.reference = operationplan.owner_id and (op2.status is null or op2.status = 'proposed') ) """) cursor.execute(""" truncate operationplanmaterial, operationplanresource """) cursor.execute(""" delete from operationplan where (status='proposed' or status is null) or type = 'STCK' """) else: # Partial export for a single cluster cursor.execute( "create temporary table cluster_keys (name character varying(300), constraint cluster_key_pkey primary key (name))" ) for i in frepple.items(): if i.cluster == cluster: cursor.execute( ("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode("UTF8"))) cursor.execute(""" delete from out_constraint where demand in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id ) """) cursor.execute(""" delete from out_problem where entity = 'demand' and owner in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id ) """) cursor.execute(""" delete from out_problem where entity = 'material' and owner in ( select buffer.item_id || ' @ ' || buffer.location_id from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id ) """) cursor.execute(""" delete from operationplan using cluster_keys where owner_id in ( select oplan_parent.reference from operationplan as oplan_parent where (oplan_parent.status='proposed' or oplan_parent.status is null or oplan_parent.type='STCK') and oplan_parent.item_id = cluster_keys.name ) """) cursor.execute(""" delete from operationplan using cluster_keys where (status='proposed' or status is null or type='STCK') and item_id = cluster_keys.name """) cursor.execute("truncate table cluster_keys") for i in frepple.resources(): if i.cluster == cluster: cursor.execute( ("insert into cluster_keys (name) values (%s)" % adapt(i.name).getquoted().decode("UTF8"))) cursor.execute(""" delete from out_problem where entity = 'demand' and owner in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id ) """) cursor.execute( "delete from operationplanresource using cluster_keys where resource_id = cluster_keys.name" ) cursor.execute( "delete from out_resourceplan using cluster_keys where resource = cluster_keys.name" ) cursor.execute( "delete from out_problem using cluster_keys where entity = 'capacity' and owner = cluster_keys.name" ) cursor.execute("truncate table cluster_keys") for i in frepple.operations(): if i.cluster == cluster: cursor.execute( ("insert into cluster_keys (name) values (%s)" % adapt(i.name).getquoted().decode("UTF8"))) cursor.execute("""" delete from out_problem using cluster_keys where entity = 'operation' and owner = cluster_keys.name """) cursor.execute(""" delete from operationplan using cluster_keys where (status='proposed' or status is null) and operationplan.name = cluster_keys.name """) cursor.execute("drop table cluster_keys")
def printModel(filename): ''' A function that prints out all models to a file. ''' # Open the output file with open(filename, "wt", encoding='utf-8') as output: # Global settings print("Echoing global settings", file=output) print("Plan name:", frepple.settings.name, file=output) print("Plan description:", frepple.settings.description, file=output) print("Plan current:", frepple.settings.current, file=output) # Solvers print("\nEchoing solvers:", file=output) for b in frepple.solvers(): print(" Solver:", b.name, b.loglevel, getattr(b, 'constraints', None), file=output) # Calendars print("\nEchoing calendars:", file=output) for b in frepple.calendars(): print(" Calendar:", b.name, getattr(b, 'default', None), file=output) for j in b.buckets: print(" Bucket:", getattr(j, 'value', None), j.start, j.end, j.priority, file=output) # Customers print("\nEchoing customers:", file=output) for b in frepple.customers(): print(" Customer:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) # Locations print("\nEchoing locations:", file=output) for b in frepple.locations(): print(" Location:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) # Items print("\nEchoing items:", file=output) for b in frepple.items(): print(" Item:", b.name, b.description, b.category, b.subcategory, b.owner, b.operation, file=output) # Resources print("\nEchoing resources:", file=output) for b in frepple.resources(): print(" Resource:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) for l in b.loads: print(" Load:", l.operation.name, l.quantity, l.effective_start, l.effective_end, file=output) for l in b.loadplans: print(" Loadplan:", l.operationplan.id, l.operationplan.operation.name, l.quantity, l.startdate, l.enddate, file=output) # Buffers print("\nEchoing buffers:", file=output) for b in frepple.buffers(): print(" Buffer:", b.name, b.description, b.category, b.subcategory, b.owner, file=output) for l in b.flows: print(" Flow:", l.operation.name, l.quantity, l.effective_start, l.effective_end, file=output) for l in b.flowplans: print(" Flowplan:", l.operationplan.id, l.operationplan.operation.name, l.quantity, l.date, file=output) # Operations print("\nEchoing operations:", file=output) for b in frepple.operations(): print(" Operation:", b.name, b.description, b.category, b.subcategory, file=output) for l in b.loads: print(" Load:", l.resource.name, l.quantity, l.effective_start, l.effective_end, file=output) for l in b.flows: print(" Flow:", l.buffer.name, l.quantity, l.effective_start, l.effective_end, file=output) if isinstance(b, frepple.operation_alternate): for l in b.alternates: print(" Alternate:", l[0].name, l[1], l[2], l[3], file=output) if isinstance(b, frepple.operation_routing): for l in b.steps: print(" Step:", l.name, file=output) # Demands print("\nEchoing demands:", file=output) for b in frepple.demands(): print(" Demand:", b.name, b.due, b.item.name, b.quantity, file=output) for i in b.operationplans: print(" Operationplan:", i.id, i.operation.name, i.quantity, i.end, file=output) # Operationplans print("\nEchoing operationplans:", file=output) for b in frepple.operationplans(): print(" Operationplan:", b.operation.name, b.quantity, b.start, b.end, file=output) for s in b.operationplans: print(" ", s.operation.name, s.quantity, s.start, s.end, file=output) # Problems print("\nPrinting problems", file=output) for i in frepple.problems(): print(" Problem:", i.entity, i.name, i.description, i.start, i.end, i.weight, file=output)
def truncate(self): cursor = connections[self.database].cursor() if self.verbosity: logger.info("Emptying database plan tables...") starttime = time() if self.cluster == -1: # Complete export for the complete model cursor.execute( "truncate table out_problem, out_resourceplan, out_constraint") cursor.execute(''' update operationplan set owner_id = null where owner_id is not null and exists ( select 1 from operationplan op2 where op2.id = operationplan.owner_id and (op2.status is null or op2.status = 'proposed') ) ''') cursor.execute(''' delete from operationplanmaterial using operationplan where operationplanmaterial.operationplan_id = operationplan.id and ((operationplan.status='proposed' or operationplan.status is null) or operationplan.type = 'STCK' or operationplanmaterial.status = 'proposed' or operationplanmaterial.status is null) ''') cursor.execute(''' delete from operationplanresource using operationplan where operationplanresource.operationplan_id = operationplan.id and ((operationplan.status='proposed' or operationplan.status is null) or operationplan.type = 'STCK' or operationplanresource.status = 'proposed' or operationplanresource.status is null) ''') cursor.execute(''' delete from operationplan where (status='proposed' or status is null) or type = 'STCK' ''') else: # Partial export for a single cluster cursor.execute( 'create temporary table cluster_keys (name character varying(300), constraint cluster_key_pkey primary key (name))' ) for i in frepple.items(): if i.cluster == self.cluster: cursor.execute( ("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode(self.encoding))) cursor.execute( "delete from out_constraint where demand in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id)" ) cursor.execute(''' delete from operationplanmaterial using cluster_keys where operationplan_id in ( select id from operationplan inner join cluster_keys on cluster_keys.name = operationplan.item_id union select id from operationplan where owner_id in ( select id from operationplan parent_opplan inner join cluster_keys on cluster_keys.name = parent_opplan.item_id ) ) ''') cursor.execute(''' delete from out_problem where entity = 'demand' and owner in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id ) ''') cursor.execute(''' delete from out_problem where entity = 'material' and owner in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select id from operationplan inner join cluster_keys on cluster_keys.name = operationplan.item_id union select id from operationplan where owner_id in ( select id from operationplan parent_opplan inner join cluster_keys on cluster_keys.name = parent_opplan.item_id ) ) ''') cursor.execute(''' delete from operationplan using cluster_keys where owner_id in ( select oplan_parent.id from operationplan as oplan_parent where (oplan_parent.status='proposed' or oplan_parent.status is null or oplan_parent.type='STCK') and oplan_parent.item_id = cluster_keys.name ) ''') cursor.execute(''' delete from operationplan using cluster_keys where (status='proposed' or status is null or type='STCK') and item_id = cluster_keys.name ''') # TODO next subqueries are not efficient - the exists condition triggers a sequential scan cursor.execute( "delete from out_constraint where exists (select 1 from forecast inner join cluster_keys on cluster_keys.name = forecast.item_id and out_constraint.demand like forecast.name || ' - %')" ) cursor.execute( "delete from out_problem where entity = 'demand' and exists (select 1 from forecast inner join cluster_keys on cluster_keys.name = forecast.item_id and out_problem.owner like forecast.name || ' - %')" ) cursor.execute("truncate table cluster_keys") for i in frepple.resources(): if i.cluster == self.cluster: cursor.execute( ("insert into cluster_keys (name) values (%s)" % adapt(i.name).getquoted().decode(self.encoding))) cursor.execute( "delete from out_problem where entity = 'demand' and owner in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id)" ) cursor.execute( 'delete from operationplanresource using cluster_keys where resource_id = cluster_keys.name' ) cursor.execute( 'delete from out_resourceplan using cluster_keys where resource = cluster_keys.name' ) cursor.execute( "delete from out_problem using cluster_keys where entity = 'capacity' and owner = cluster_keys.name" ) cursor.execute('truncate table cluster_keys') for i in frepple.operations(): if i.cluster == self.cluster: cursor.execute( ("insert into cluster_keys (name) values (%s)" % adapt(i.name).getquoted().decode(self.encoding))) cursor.execute( "delete from out_problem using cluster_keys where entity = 'operation' and owner = cluster_keys.name" ) cursor.execute( "delete from operationplan using cluster_keys where (status='proposed' or status is null) and operationplan.name = cluster_keys.name" ) cursor.execute("drop table cluster_keys") if self.verbosity: logger.info("Emptied plan tables in %.2f seconds" % (time() - starttime))
def exportResourceplans(self): if self.verbosity: logger.info("Exporting resourceplans...") starttime = time() cursor = connections[self.database].cursor() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if self.cluster != -1 and self.cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) # Build a list of horizon buckets cursor.execute( """ select startdate from common_bucketdetail where startdate between %s and %s and bucket_id = (select name from common_bucket order by level desc limit 1) """, (startdate, enddate), ) buckets = [rec[0] for rec in cursor.fetchall()] # Loop over all reporting buckets of all resources with tempfile.TemporaryFile(mode="w+t", encoding="utf-8") as tmp: for i in frepple.resources(): for j in i.plan(buckets): print( ( "%s\t%s\t%s\t%s\t%s\t%s\t%s" % ( i.name, str(j["start"]), round(j["available"], 8), round(j["unavailable"], 8), round(j["setup"], 8), round(j["load"], 8), round(j["free"], 8), ) ), file=tmp, ) tmp.seek(0) cursor.copy_from( tmp, "out_resourceplan", columns=( "resource", "startdate", "available", "unavailable", "setup", "load", "free", ), ) tmp.close() # update owner records with sum of children quantities if self.verbosity: logger.info("Exported resourceplans in %.2f seconds" % (time() - starttime))
def truncate(self, process): if self.verbosity: print("Emptying database plan tables...") starttime = time() if self.cluster == -1: # Complete export for the complete model process.stdin.write( "truncate table out_problem, out_resourceplan, out_constraint;\n" .encode(self.encoding)) process.stdin.write(''' delete from operationplanmaterial using operationplan where operationplanmaterial.operationplan_id = operationplan.id and ((operationplan.status='proposed' or operationplan.status is null) or operationplan.type = 'STCK' or operationplanmaterial.status = 'proposed');\n '''.encode(self.encoding)) process.stdin.write(''' delete from operationplanresource using operationplan where operationplanresource.operationplan_id = operationplan.id and ((operationplan.status='proposed' or operationplan.status is null) or operationplan.type = 'STCK' or operationplanresource.status = 'proposed');\n '''.encode(self.encoding)) process.stdin.write(''' delete from operationplan where (status='proposed' or status is null) or type = 'STCK';\n '''.encode(self.encoding)) else: # Partial export for a single cluster process.stdin.write( 'create temporary table cluster_keys (name character varying(300), constraint cluster_key_pkey primary key (name));\n' .encode(self.encoding)) for i in frepple.items(): if i.cluster == self.cluster: process.stdin.write( ("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode( self.encoding)).encode(self.encoding)) process.stdin.write( "delete from out_constraint where demand in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id);\n" .encode(self.encoding)) process.stdin.write(''' delete from operationplanmaterial where buffer in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id);\n '''.encode(self.encoding)) process.stdin.write(''' delete from out_problem where entity = 'demand' and owner in ( select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id );\n '''.encode(self.encoding)) process.stdin.write(''' delete from out_problem where entity = 'material' and owner in (select buffer.name from buffer inner join cluster_keys on cluster_keys.name = buffer.item_id);\n '''.encode(self.encoding)) process.stdin.write(''' delete from operationplan using cluster_keys where (status='proposed' or status is null or type='STCK') and item_id = cluster_keys.name;\n '''.encode(self.encoding)) process.stdin.write("truncate table cluster_keys;\n".encode( self.encoding)) for i in frepple.resources(): if i.cluster == self.cluster: process.stdin.write( ("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode( self.encoding)).encode(self.encoding)) process.stdin.write( "delete from out_problem where entity = 'demand' and owner in (select demand.name from demand inner join cluster_keys on cluster_keys.name = demand.item_id);\n" .encode(self.encoding)) process.stdin.write( 'delete from operationplanresource using cluster_keys where resource = cluster_keys.name;\n' .encode(self.encoding)) process.stdin.write( 'delete from out_resourceplan using cluster_keys where resource = cluster_keys.name;\n' .encode(self.encoding)) process.stdin.write( "delete from out_problem using cluster_keys where entity = 'capacity' and owner = cluster_keys.name;\n" .encode(self.encoding)) process.stdin.write('truncate table cluster_keys;\n'.encode( self.encoding)) for i in frepple.operations(): if i.cluster == self.cluster: process.stdin.write( ("insert into cluster_keys (name) values (%s);\n" % adapt(i.name).getquoted().decode( self.encoding)).encode(self.encoding)) process.stdin.write( "delete from out_problem using cluster_keys where entity = 'operation' and owner = cluster_keys.name;\n" .encode(self.encoding)) process.stdin.write( "delete from operationplan using cluster_keys where (status='proposed' or status is null) and operationplan.operation_id = cluster_keys.name;\n" .encode(self.encoding)) # TODO not correct in new data model process.stdin.write("drop table cluster_keys;\n".encode( self.encoding)) if self.verbosity: print("Emptied plan tables in %.2f seconds" % (time() - starttime))