def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, timestamp=None, **kwargs): cursor = connections[database].cursor() updates = [] cursor.copy_from( CopyFromGenerator( cls.getData(timestamp=timestamp or cls.parent.timestamp, cluster=cluster, **kwargs)), "operationplanmaterial", columns=( "operationplan_id", "item_id", "location_id", "quantity", "flowdate", "onhand", "minimum", "periodofcover", "status", "lastmodified", ), size=1024, sep="\v", ) if len(updates) > 0: cursor.execute("\n".join(updates))
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, timestamp=None, resources=None, **kwargs): cursor = connections[database].cursor() cursor.copy_from( CopyFromGenerator( cls.getData(timestamp=timestamp or cls.parent.timestamp, cluster=cluster, resources=resources, **kwargs)), "operationplanresource", columns=( "operationplan_id", "resource_id", "quantity", "startdate", "enddate", "setup", "status", "lastmodified", ), size=1024, sep="\v", )
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): cursor = connections[database].cursor() cursor.copy_from( CopyFromGenerator(cls.getData(cluster)), "out_problem", columns=( "entity", "name", "owner", "description", "startdate", "enddate", "weight", ), size=1024, sep="\v", )
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): cursor = connections[database].cursor() updates = [] cursor.copy_from( CopyFromGenerator( cls.getData(timestamp=cls.parent.timestamp, cluster=cluster)), "operationplanmaterial", columns=( "operationplan_id", "item_id", "location_id", "quantity", "flowdate", "onhand", "minimum", "periodofcover", "status", "lastmodified", ), size=1024, sep="\v", ) if len(updates) > 0: cursor.execute("\n".join(updates)) # correct on hand for non-MTO items cursor.execute(""" with cte as ( select id, sum(quantity) over(partition by item_id, location_id order by flowdate) as onhand from operationplanmaterial inner join item on item.name = operationplanmaterial.item_id where item.type is distinct from 'make to order' ) update operationplanmaterial set onhand = cte.onhand from cte where operationplanmaterial.id = cte.id and operationplanmaterial.onhand != cte.onhand """)
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): import frepple # Set the timestamp for the export tasks in this thread cls.parent.timestamp = datetime.now() # Determine start and end date of the reporting horizon # The start date is computed as 5 weeks before the start of the earliest loadplan in # the entire plan. # The end date is computed as 5 weeks after the end of the latest loadplan in # the entire plan. # If no loadplans exist at all we use the current date +- 1 month. cursor = connections[database].cursor() startdate = datetime.max enddate = datetime.min for i in frepple.resources(): if cluster != -1 and cluster != i.cluster: continue for j in i.loadplans: if j.startdate < startdate: startdate = j.startdate if j.enddate > enddate: enddate = j.enddate if startdate == datetime.max: startdate = frepple.settings.current if enddate == datetime.min: enddate = frepple.settings.current startdate = (startdate - timedelta(days=30)).date() enddate = (enddate + timedelta(days=30)).date() if enddate > date(2030, 12, 30): # This is the max frePPLe can represent. enddate = date(2030, 12, 30) cursor.execute( """ select startdate from common_bucketdetail where startdate between %s and %s and bucket_id = (select name from common_bucket order by level desc limit 1) order by startdate """, (startdate, enddate), ) buckets = [rec[0] for rec in cursor.fetchall()] def getData(): # Loop over all reporting buckets of all resources for i in frepple.resources(): for j in i.plan(buckets): yield "%s\v%s\v%s\v%s\v%s\v%s\v%s\n" % ( clean_value(i.name), str(j["start"]), round(j["available"], 8), round(j["unavailable"], 8), round(j["setup"], 8), round(j["load"], 8), round(j["free"], 8), ) cursor.copy_from( CopyFromGenerator(getData()), "out_resourceplan", columns=( "resource", "startdate", "available", "unavailable", "setup", "load", "free", ), size=1024, sep="\v", )
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): # Export operationplans to a temporary table cursor = connections[database].cursor() cursor.execute(""" create temporary table tmp_operationplan ( name character varying(1000), type character varying(5) NOT NULL, status character varying(20), quantity numeric(20,8) NOT NULL, startdate timestamp with time zone, enddate timestamp with time zone, criticality numeric(20,8), delay numeric, plan json, source character varying(300), lastmodified timestamp with time zone NOT NULL, operation_id character varying(300), owner_id character varying(300), item_id character varying(300), destination_id character varying(300), origin_id character varying(300), location_id character varying(300), supplier_id character varying(300), demand_id character varying(300), due timestamp with time zone, color numeric(20,8), reference character varying(300) NOT NULL, batch character varying(300) ) """) cursor.copy_from( CopyFromGenerator( cls.getData(cls.parent.timestamp, cluster=cluster)), table="tmp_operationplan", size=1024, sep="\v", ) # Merge temp table into the actual table cursor.execute(""" update operationplan set name=tmp.name, type=tmp.type, status=tmp.status, quantity=tmp.quantity, startdate=tmp.startdate, enddate=tmp.enddate, criticality=tmp.criticality, delay=tmp.delay * interval '1 second', plan=tmp.plan, source=tmp.source, lastmodified=tmp.lastmodified, operation_id=tmp.operation_id, owner_id=tmp.owner_id, item_id=tmp.item_id, destination_id=tmp.destination_id, origin_id=tmp.origin_id, location_id=tmp.location_id, supplier_id=tmp.supplier_id, demand_id=tmp.demand_id, due=tmp.due, color=tmp.color, batch=tmp.batch from tmp_operationplan as tmp where operationplan.reference = tmp.reference; """) cursor.execute(""" delete from operationplan where status in ('confirmed','approved','completed') and type = 'MO' and not exists (select 1 from tmp_operationplan where reference = operationplan.reference) """) cursor.execute(""" insert into operationplan (name,type,status,quantity,startdate,enddate, criticality,delay,plan,source,lastmodified, operation_id,owner_id, item_id,destination_id,origin_id, location_id,supplier_id, demand_id,due,color,reference,batch) select name,type,status,quantity,startdate,enddate, criticality,delay * interval '1 second',plan,source,lastmodified, operation_id,owner_id, item_id,destination_id,origin_id, location_id,supplier_id, demand_id,due,color,reference,batch from tmp_operationplan where not exists ( select 1 from operationplan where operationplan.reference = tmp_operationplan.reference ); """) # update demand table specific fields cursor.execute(""" with cte as ( select demand_id, sum(quantity) plannedquantity, max(enddate) deliverydate, max(enddate)-due as delay from operationplan where demand_id is not null and owner_id is null group by demand_id, due ) update demand set delay = cte.delay, plannedquantity = cte.plannedquantity, deliverydate = cte.deliverydate from cte where cte.demand_id = demand.name """) cursor.execute(""" update demand set delay = null, plannedquantity = null, deliverydate = null where (delay is not null or plannedquantity is not null or deliverydate is not null) and not exists( select 1 from operationplan where owner_id is null and operationplan.demand_id = demand.name ) """) cursor.execute(""" update demand set plannedquantity = 0 where status in ('open','quote') and plannedquantity is null """)
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): import frepple currentdate = frepple.settings.current cursor = connections[database].cursor() if cluster != -1: cursor.execute(""" create temp table cluster_item_tmp as select name from item where false; """) cursor.copy_from( CopyFromGenerator(cls.getItemsFromCluster(cluster)), "cluster_item_tmp", ) cursor.execute("create unique index on cluster_item_tmp (name);") cursor.execute( """ -- Query assumes there is only 1 location -- all quantities are then aggregated update item set periodofcover = floor(extract(epoch from coalesce( -- backlogged demand exceeds the inventory: 0 days of inventory ( select '0 days'::interval from operationplanmaterial %s inner join operationplan on operationplanmaterial.operationplan_id = operationplan.reference where operationplanmaterial.item_id = item.name and ( (operationplanmaterial.quantity < 0 and operationplan.type = 'DLVR' and operationplan.due < %%s) or ( operationplanmaterial.quantity > 0 and operationplan.status = 'closed' and operationplan.type = 'STCK') or ( operationplanmaterial.quantity > 0 and operationplan.status in ('approved','confirmed','completed') and flowdate <= %%s + interval '1 second') ) having sum(operationplanmaterial.quantity) <0 limit 1 ), -- Normal case ( select case when periodofcover = 999 * 24 * 3600 then '999 days'::interval when onhand > 0.00001 then date_trunc('day', least( periodofcover * '1 sec'::interval + flowdate - %%s, '999 days'::interval)) else null end from operationplanmaterial %s where flowdate < %%s and operationplanmaterial.item_id = item.name order by flowdate desc, id desc limit 1 ), -- No inventory and no backlog: use the date of next consumer ( select greatest('0 days'::interval, least( date_trunc('day', justify_interval(flowdate - %%s - coalesce(operationplan.delay, '0 day'::interval))), '999 days'::interval )) from operationplanmaterial %s inner join operationplan on operationplanmaterial.operationplan_id = operationplan.reference where operationplanmaterial.quantity < 0 and operationplanmaterial.item_id = item.name order by flowdate asc, id asc limit 1 ), '999 days'::interval ))/86400) """ % ( "inner join cluster_item_tmp on cluster_item_tmp.name = operationplanmaterial.item_id" if cluster != -1 else "", "inner join cluster_item_tmp on cluster_item_tmp.name = operationplanmaterial.item_id" if cluster != -1 else "", "inner join cluster_item_tmp on cluster_item_tmp.name = operationplanmaterial.item_id" if cluster != -1 else "", ), ((currentdate, ) * 5), ) if cluster != -1: cursor.execute("drop table cluster_item_tmp;")
def run(cls, cluster=-1, database=DEFAULT_DB_ALIAS, **kwargs): cls.attrs = [ x for x in getAttributes(OperationPlan) if x[0] != "forecast" ] # Export operationplans to a temporary table cursor = connections[database].cursor() sql = """ create temporary table tmp_operationplan ( name character varying(1000), type character varying(5) NOT NULL, status character varying(20), quantity numeric(20,8) NOT NULL, startdate timestamp with time zone, enddate timestamp with time zone, criticality numeric(20,8), delay numeric, plan jsonb, source character varying(300), lastmodified timestamp with time zone NOT NULL, operation_id character varying(300), owner_id character varying(300), item_id character varying(300), destination_id character varying(300), origin_id character varying(300), location_id character varying(300), supplier_id character varying(300), demand_id character varying(300), due timestamp with time zone, color numeric(20,8), reference character varying(300) NOT NULL, batch character varying(300), quantity_completed numeric(20,8) """ for attr in cls.attrs: if attr[2] == "boolean": sql += ", %s boolean" % attr[0] elif attr[2] == "duration": sql += ", %s interval" % attr[0] elif attr[2] == "integer": sql += ", %s integer" % attr[0] elif attr[2] == "number": sql += ", %s numeric(15,6)" % attr[0] elif attr[2] == "string": sql += ", %s character varying(300)" % attr[0] elif attr[2] == "time": sql += ", %s time without time zone" % attr[0] elif attr[2] == "date": sql += ", %s date" % attr[0] elif attr[2] == "datetime": sql += ", %s timestamp with time zone" % attr[0] else: raise Exception("Unknown attribute type %s" % attr[2]) sql += ")" cursor.execute(sql) cursor.copy_from( CopyFromGenerator( cls.getData( cls.parent.timestamp, cluster=cluster, accepted_status=[ "confirmed", "approved", "completed", "closed" ], )), table="tmp_operationplan", size=1024, sep="\v", ) forecastfield0 = "" forecastfield1 = "" # Merge temp table into the actual table sql = (""" update operationplan set name=tmp.name, type=tmp.type, status=tmp.status, quantity=tmp.quantity, startdate=tmp.startdate, enddate=tmp.enddate, criticality=tmp.criticality, delay=tmp.delay * interval '1 second', plan=tmp.plan, source=tmp.source, lastmodified=tmp.lastmodified, operation_id=tmp.operation_id, owner_id=tmp.owner_id, item_id=tmp.item_id, destination_id=tmp.destination_id, origin_id=tmp.origin_id, location_id=tmp.location_id, supplier_id=tmp.supplier_id, demand_id=tmp.demand_id, due=tmp.due,%s color=tmp.color, batch=tmp.batch, quantity_completed=tmp.quantity_completed """ % forecastfield0) for a in cls.attrs: sql += ", %s=tmp.%s" % (a[0], a[0]) sql += """ from tmp_operationplan as tmp where operationplan.reference = tmp.reference """ cursor.execute(sql) # Make sure any deleted confirmed MO from Plan Editor gets deleted in the database # Only MO can currently be deleted through Plan Editor cursor.execute(""" delete from operationplan where status in ('confirmed','approved','completed','closed') and type = 'MO' and not exists (select 1 from tmp_operationplan where reference = operationplan.reference) """) cursor.execute(""" insert into operationplan (name,type,status,quantity,startdate,enddate, criticality,delay,plan,source,lastmodified, operation_id,owner_id, item_id,destination_id,origin_id, location_id,supplier_id, demand_id,due,color,reference,batch,quantity_completed%s) select name,type,status,quantity,startdate,enddate, criticality,delay * interval '1 second',plan,source,lastmodified, operation_id,owner_id, item_id,destination_id,origin_id, location_id,supplier_id, demand_id,due,color,reference,batch,quantity_completed%s from tmp_operationplan where not exists ( select 1 from operationplan where operationplan.reference = tmp_operationplan.reference ); """ % (forecastfield1, forecastfield1)) # directly injecting proposed records in operationplan table cursor.copy_from( CopyFromGenerator( cls.getData( cls.parent.timestamp, cluster=cluster, accepted_status=["proposed"], )), table="operationplan", size=1024, sep="\v", columns=[ "name", "type", "status", "quantity", "startdate", "enddate", "criticality", "delay", "plan", "source", "lastmodified", "operation_id", "owner_id", "item_id", "destination_id", "origin_id", "location_id", "supplier_id", "demand_id", "due", "color", "reference", "batch", "quantity_completed", ] + [a[0] for a in cls.attrs], ) # update demand table specific fields cursor.execute(""" with cte as ( select demand_id, sum(quantity) plannedquantity, max(enddate) deliverydate, max(enddate)-due as delay from operationplan where demand_id is not null and owner_id is null group by demand_id, due ) update demand set delay = cte.delay, plannedquantity = cte.plannedquantity, deliverydate = cte.deliverydate from cte where cte.demand_id = demand.name """) cursor.execute(""" update demand set delay = null, plannedquantity = null, deliverydate = null where (delay is not null or plannedquantity is not null or deliverydate is not null) and not exists( select 1 from operationplan where owner_id is null and operationplan.demand_id = demand.name ) """) cursor.execute(""" update demand set plannedquantity = 0 where status in ('open','quote') and plannedquantity is null """)