def handle(self, *args, **options): if "database" in options: database = options["database"] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) task = None now = datetime.now() try: # Initialize the task if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name != "my_command"): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="my_command", submitted=now, started=now, status="0%") task.save(using=database) # Here goes the real business logic print("This command was called with argument %s" % options["my_arg"]) # The task has finished successfully task.message = "My task message" task.processid = None task.status = "Done" task.finished = datetime.now() task.save(using=database) except Exception as e: # The task failed if task: task = Task.objects.all().using(database).get(pk=task.id) task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database) raise e
def handle(self, **options): # Pick up options if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if 'user' in options and options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None if 'models' in options and options['models']: models = options['models'].split(',') else: models = None now = datetime.now() task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'empty database': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='empty database', submitted=now, started=now, status='0%', user=user) task.save(using=database) # Create a database connection cursor = connections[database].cursor() # Get a list of all django tables in the database tables = set( connections[database].introspection.django_table_names( only_existing=True)) # Validate the user list of tables if models: models2tables = set() for m in models: try: x = m.split('.', 1) x = apps.get_model(x[0], x[1]) if x in EXCLUDE_FROM_BULK_OPERATIONS: continue x = x._meta.db_table if x not in tables: raise models2tables.add(x) except Exception as e: raise CommandError("Invalid model to erase: %s" % m) tables = models2tables else: for i in EXCLUDE_FROM_BULK_OPERATIONS: tables.discard(i._meta.db_table) # Some tables need to be handled a bit special if "setupmatrix" in tables: tables.add("setuprule") if 'operationplan' in tables: tables.add('operationplanmaterial') tables.add('operationplanresource') tables.discard('auth_group_permissions') tables.discard('auth_permission') tables.discard('auth_group') tables.discard('django_session') tables.discard('common_user') tables.discard('common_user_groups') tables.discard('common_user_user_permissions') tables.discard('django_admin_log') tables.discard('django_content_type') tables.discard('execute_log') tables.discard('common_scenario') # Delete all records from the tables. with transaction.atomic(using=database, savepoint=False): if "common_bucket" in tables: cursor.execute( 'update common_user set horizonbuckets = null') for stmt in connections[database].ops.sql_flush( no_style(), tables, []): cursor.execute(stmt) if models: if 'input.purchaseorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'PO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'PO' ) ''') cursor.execute( "delete from operationplan where type = 'PO'") if 'input.distributionorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'DO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'DO' ) ''') cursor.execute( "delete from operationplan where type = 'DO'") if 'input.manufacturingorder' in models: cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'MO' ) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'MO' ) ''') cursor.execute( "delete from operationplan where type = 'MO'") # Task update task.status = 'Done' task.finished = datetime.now() task.save(using=database) except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.save(using=database) raise CommandError('%s' % e)
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up the options verbosity = int(options["verbosity"]) cluster = int(options["cluster"]) demand = int(options["demand"]) forecast_per_item = int(options["forecast_per_item"]) level = int(options["level"]) resource = int(options["resource"]) resource_size = int(options["resource_size"]) components = int(options["components"]) components_per = int(options["components_per"]) if components <= 0: components_per = 0 deliver_lt = int(options["deliver_lt"]) procure_lt = int(options["procure_lt"]) if options["currentdate"]: currentdate = options["currentdate"] else: currentdate = datetime.strftime(date.today(), "%Y-%m-%d") database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None random.seed(100) # Initialize random seed to get reproducible results now = datetime.now() task = None try: # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_createmodel", "createmodel")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="createmodel", submitted=now, started=now, status="0%", user=user, ) task.arguments = ( "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % ( cluster, demand, forecast_per_item, level, resource, resource_size, components, components_per, deliver_lt, procure_lt, )) task.save(using=database) # Pick up the startdate try: startdate = datetime.strptime(currentdate, "%Y-%m-%d") except Exception: raise CommandError( "current date is not matching format YYYY-MM-DD") # Check whether the database is empty if (Buffer.objects.using(database).count() > 0 or Item.objects.using(database).count() > 0): raise CommandError( "Database must be empty before creating a model") # Plan start date if verbosity > 0: print("Updating current date...") param = Parameter.objects.using(database).get_or_create( name="currentdate")[0] param.value = datetime.strftime(startdate, "%Y-%m-%d %H:%M:%S") param.save(using=database) # Planning horizon # minimum 10 daily buckets, weekly buckets till 40 days after current if verbosity > 0: print("Updating buckets...") management.call_command("createbuckets", user=user, database=database) task.status = "2%" task.save(using=database) # Weeks calendar if verbosity > 0: print("Creating weeks calendar...") with transaction.atomic(using=database): weeks = Calendar.objects.using(database).create(name="Weeks", defaultvalue=0) for i in (BucketDetail.objects.using(database).filter( bucket="week").all()): CalendarBucket( startdate=i.startdate, enddate=i.enddate, value=1, calendar=weeks, ).save(using=database) task.status = "4%" task.save(using=database) # Working days calendar if verbosity > 0: print("Creating working days...") with transaction.atomic(using=database): workingdays = Calendar.objects.using(database).create( name="Working Days", defaultvalue=0) minmax = (BucketDetail.objects.using(database).filter( bucket="week").aggregate(Min("startdate"), Max("startdate"))) CalendarBucket( startdate=minmax["startdate__min"], enddate=minmax["startdate__max"], value=1, calendar=workingdays, priority=1, saturday=False, sunday=False, ).save(using=database) task.status = "6%" task.save(using=database) # Parent location loc = Location.objects.using(database).create( name="Factory", available=workingdays) # Create a random list of categories to choose from categories = [ "cat A", "cat B", "cat C", "cat D", "cat E", "cat F", "cat G" ] # Create customers if verbosity > 0: print("Creating customers...") with transaction.atomic(using=database): cust = [] for i in range(100): c = Customer.objects.using(database).create( name="Cust %03d" % i) cust.append(c) task.status = "8%" task.save(using=database) # Create resources and their calendars if verbosity > 0: print("Creating resources and calendars...") with transaction.atomic(using=database): res = [] for i in range(resource): cal = Calendar.objects.using(database).create( name="capacity for res %03d" % i, category="capacity", defaultvalue=0, ) CalendarBucket.objects.using(database).create( startdate=startdate, value=resource_size, calendar=cal) r = Resource.objects.using(database).create( name="Res %03d" % i, maximum_calendar=cal, location=loc) res.append(r) task.status = "10%" task.save(using=database) random.shuffle(res) # Create the components if verbosity > 0: print("Creating raw materials...") with transaction.atomic(using=database): comps = [] compsupplier = Supplier.objects.using(database).create( name="component supplier") for i in range(components): it = Item.objects.using(database).create( name="Component %04d" % i, category="Procured", cost=str(round(random.uniform(0, 100))), ) ld = abs( round(random.normalvariate(procure_lt, procure_lt / 3))) Buffer.objects.using(database).create( location=loc, category="Procured", item=it, minimum=20, onhand=str( round(forecast_per_item * random.uniform(1, 3) * ld / 30)), ) ItemSupplier.objects.using(database).create( item=it, location=loc, supplier=compsupplier, leadtime=timedelta(days=ld), sizeminimum=80, sizemultiple=10, priority=1, cost=it.cost, ) comps.append(it) task.status = "12%" task.save(using=database) # Loop over all clusters durations = [timedelta(days=i) for i in range(1, 6)] progress = 88.0 / cluster for i in range(cluster): with transaction.atomic(using=database): if verbosity > 0: print("Creating supply chain for end item %d..." % i) # Item it = Item.objects.using(database).create( name="Itm %05d" % i, category=random.choice(categories), cost=str(round(random.uniform(100, 200))), ) # Level 0 buffer buf = Buffer.objects.using(database).create(item=it, location=loc, category="00") # Demand for j in range(demand): Demand.objects.using(database).create( name="Dmd %05d %05d" % (i, j), item=it, location=loc, quantity=int(random.uniform(1, 6)), # Exponential distribution of due dates, with an average of deliver_lt days. due=startdate + timedelta(days=round( random.expovariate(float(1) / deliver_lt / 24)) / 24), # Orders have higher priority than forecast priority=random.choice([1, 2]), customer=random.choice(cust), category=random.choice(categories), ) # Create upstream operations and buffers ops = [] previtem = it for k in range(level): if k == 1 and res: # Create a resource load for operations on level 1 oper = Operation.objects.using(database).create( name="Oper %05d L%02d" % (i, k), type="time_per", location=loc, duration_per=timedelta(days=1), sizemultiple=1, item=previtem, ) if resource < cluster and i < resource: # When there are more cluster than resources, we try to assure # that each resource is loaded by at least 1 operation. OperationResource.objects.using( database).create(resource=res[i], operation=oper) else: OperationResource.objects.using( database).create( resource=random.choice(res), operation=oper) else: oper = Operation.objects.using(database).create( name="Oper %05d L%02d" % (i, k), duration=random.choice(durations), sizemultiple=1, location=loc, item=previtem, ) ops.append(oper) # Some inventory in random buffers if random.uniform(0, 1) > 0.8: buf.onhand = int(random.uniform(5, 20)) buf.save(using=database) OperationMaterial.objects.using(database).create( operation=oper, item=previtem, quantity=1, type="end") if k != level - 1: # Consume from the next level in the bill of material it_tmp = Item.objects.using(database).create( name="Itm %05d L%02d" % (i, k + 1), category=random.choice(categories), cost=str(round(random.uniform(100, 200))), ) buf = Buffer.objects.using(database).create( item=it_tmp, location=loc, category="%02d" % (k + 1)) OperationMaterial.objects.using(database).create( operation=oper, item=it_tmp, quantity=-1) previtem = it_tmp # Consume raw materials / components c = [] for j in range(components_per): o = random.choice(ops) b = random.choice(comps) while (o, b) in c: # A flow with the same operation and buffer already exists o = random.choice(ops) b = random.choice(comps) c.append((o, b)) OperationMaterial.objects.using(database).create( operation=o, item=b, quantity=random.choice([-1, -1, -1, -2, -3]), ) # Commit the current cluster task.status = "%d%%" % (12 + progress * (i + 1)) task.save(using=database) # Task update task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.save(using=database) raise e finally: if task: task.save(using=database) settings.DEBUG = tmp_debug
def handle(self, *args, **options): # Pick up the options if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if 'user' in options and options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None now = datetime.now() task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'restore database': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='restore database', submitted=now, started=now, status='0%', user=user) task.arguments = args and args[0] or None task.save(using=database) # Validate options if not args: raise CommandError("No dump file specified") if not os.path.isfile( os.path.join(settings.FREPPLE_LOGDIR, args[0])): raise CommandError("Dump file not found") # Run the restore command # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[database]['PASSWORD']: os.environ['PGPASSWORD'] = settings.DATABASES[database][ 'PASSWORD'] cmd = [ "psql", ] if settings.DATABASES[database]['USER']: cmd.append("--username=%s" % settings.DATABASES[database]['USER']) if settings.DATABASES[database]['HOST']: cmd.append("--host=%s" % settings.DATABASES[database]['HOST']) if settings.DATABASES[database]['PORT']: cmd.append("--port=%s " % settings.DATABASES[database]['PORT']) cmd.append(settings.DATABASES[database]['NAME']) cmd.append('<%s' % os.path.abspath( os.path.join(settings.FREPPLE_LOGDIR, args[0]))) ret = subprocess.call( cmd, shell=True ) # Shell needs to be True in order to interpret the < character if ret: raise Exception("Run of run psql failed") # Task update # We need to recreate a new task record, since the previous one is lost during the restoration. task = Task(name='restore database', submitted=task.submitted, started=task.started, arguments=task.arguments, status='Done', finished=datetime.now(), user=task.user) except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: # Commit it all, even in case of exceptions if task: task.save(using=database)
def handle(self, **options): # Pick up the options now = datetime.now() if "database" in options: database = options["database"] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if "user" in options and options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None timestamp = now.strftime("%Y%m%d%H%M%S") if database == DEFAULT_DB_ALIAS: logfile = "frepple-%s.log" % timestamp else: logfile = "frepple_%s-%s.log" % (database, timestamp) task = None try: # Initialize the task setattr(_thread_locals, "database", database) if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("runplan", "odoo_import", "odoo_export")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.logfile = logfile else: task = Task( name="runplan", submitted=now, started=now, status="0%", user=user, logfile=logfile, ) # Validate options if "constraint" in options: constraint = int(options["constraint"]) if constraint < 0 or constraint > 15: raise ValueError("Invalid constraint: %s" % options["constraint"]) else: constraint = 15 if "plantype" in options: plantype = int(options["plantype"]) else: plantype = 1 # Reset environment variables # TODO avoid having to delete the environment variables. Use options directly? for label in freppledb.common.commands.PlanTaskRegistry.getLabels( ): if "env" in options: # Options specified if label[0] in os.environ: del os.environ[label[0]] else: # No options specified - default to activate them all os.environ[label[0]] = "1" # Set environment variables if options["env"]: task.arguments = "--constraint=%d --plantype=%d --env=%s" % ( constraint, plantype, options["env"], ) for i in options["env"].split(","): j = i.split("=") if len(j) == 1: os.environ[j[0]] = "1" else: os.environ[j[0]] = j[1] else: task.arguments = "--constraint=%d --plantype=%d" % ( constraint, plantype, ) if options["background"]: task.arguments += " --background" # Log task # Different from the other tasks the frepple engine will write the processid task.save(using=database) # Locate commands.py cmd = freppledb.common.commands.__file__ def setlimits(): import resource if settings.MAXMEMORYSIZE: resource.setrlimit( resource.RLIMIT_AS, ( settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024, ), ) if settings.MAXCPUTIME: resource.setrlimit( resource.RLIMIT_CPU, (settings.MAXCPUTIME, settings.MAXCPUTIME + 5), ) # Limiting the file size is a bit tricky as this limit not only applies to the log # file, but also to temp files during the export # if settings.MAXTOTALLOGFILESIZE: # resource.setrlimit( # resource.RLIMIT_FSIZE, # (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024) # ) # Make sure the forecast engine uses the same correct timezone os.environ["PGTZ"] = settings.TIME_ZONE # Prepare environment os.environ["FREPPLE_PLANTYPE"] = str(plantype) os.environ["FREPPLE_CONSTRAINT"] = str(constraint) os.environ["FREPPLE_TASKID"] = str(task.id) os.environ["FREPPLE_DATABASE"] = database os.environ["FREPPLE_LOGFILE"] = logfile os.environ["FREPPLE_PROCESSNAME"] = settings.DATABASES[database][ "NAME"].replace("demo", "") os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep + os.environ["PATH"] + os.pathsep + settings.FREPPLE_APP) if os.path.isfile( os.path.join(settings.FREPPLE_HOME, "libfrepple.so")): os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME if "DJANGO_SETTINGS_MODULE" not in os.environ: os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings" os.environ["PYTHONPATH"] = os.path.normpath(settings.FREPPLE_APP) libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), "lib") if os.path.isdir(libdir): # Folders used by the Windows version os.environ["PYTHONPATH"] += os.pathsep + libdir if os.path.isfile(os.path.join(libdir, "library.zip")): os.environ["PYTHONPATH"] += os.pathsep + os.path.join( libdir, "library.zip") if options["background"]: # Execute as background process on Windows if os.name == "nt": startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW subprocess.Popen( ["frepple", cmd], creationflags=0x08000000, startupinfo=startupinfo, ) else: # Execute as background process on Linux subprocess.Popen(["frepple", cmd], preexec_fn=setlimits) else: if os.name == "nt": # Execute in foreground on Windows startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW ret = subprocess.call(["frepple", cmd], startupinfo=startupinfo) else: # Execute in foreground on Linux ret = subprocess.call(["frepple", cmd], preexec_fn=setlimits) if ret != 0 and ret != 2: # Return code 0 is a successful run # Return code is 2 is a run cancelled by a user. That's shown in the status field. raise Exception("Failed with exit code %d" % ret) if options["background"]: # Wait for the background task to be ready while True: sleep(5) t = Task.objects.using(database).get(pk=task.id) if t.status in ["100%", "Canceled", "Failed", "Done"]: break if not self.process_exists(t.processid): t.status = "Failed" t.processid = None t.save(update_fields=["processid", "status"], using=database) break else: # Reread the task from the database and update it task = Task.objects.all().using(database).get(pk=task.id) task.processid = None task.status = "Done" task.finished = datetime.now() task.save(using=database) except Exception as e: if task: task = Task.objects.all().using(database).get(pk=task.id) task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database) raise e finally: setattr(_thread_locals, "database", None)
def handle(self, **options): ''' Uploads approved operationplans to the ERP system. ''' # Select the correct frePPLe scenario database self.database = options['database'] if self.database not in settings.DATABASES.keys(): raise CommandError("No database settings known for '%s'" % self.database) self.cursor_frepple = connections[self.database].cursor() # FrePPle user running this task if options['user']: try: self.user = User.objects.all().using( self.database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: self.user = None # FrePPLe task identifier if options['task']: try: self.task = Task.objects.all().using( self.database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if self.task.started or self.task.finished or self.task.status != "Waiting" or self.task.name != 'export to erp': raise CommandError("Invalid task identifier") else: now = datetime.now() self.task = Task(name='export to erp', submitted=now, started=now, status='0%', user=self.user) self.task.save(using=self.database) try: # Open database connection print("Connecting to the ERP database") with getERPconnection() as erp_connection: self.cursor_erp = erp_connection.cursor(self.database) try: self.extractPurchaseOrders() self.task.status = '33%' self.task.save(using=self.database) self.extractDistributionOrders() self.task.status = '66%' self.task.save(using=self.database) self.extractManufacturingOrders() self.task.status = '100%' self.task.save(using=self.database) # Optional extra planning output the ERP might be interested in: # - planned delivery date of sales orders # - safety stock (Enterprise Edition only) # - reorder quantities (Enterprise Edition only) # - forecast (Enterprise Edition only) self.task.status = 'Done' finally: self.cursor_erp.close() except Exception as e: self.task.status = 'Failed' self.task.message = 'Failed: %s' % e self.task.finished = datetime.now() self.task.save(using=self.database) self.cursor_frepple.close()
def handle(self, **options): # Pick up the options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None now = datetime.now() task = None try: # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ( 'frepple_loadxml', 'loadxml'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='loadxml', submitted=now, started=now, status='0%', user=user) task.arguments = ' '.join(options['file']) task.processid = os.getpid() task.save(using=database) # Execute # TODO: if frePPLe is available as a module, we don't really need to spawn another process. os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace( '\\', '\\\\') os.environ['FREPPLE_APP'] = settings.FREPPLE_APP os.environ['FREPPLE_DATABASE'] = database os.environ[ 'PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ[ 'PATH'] + os.pathsep + settings.FREPPLE_APP os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME if 'DJANGO_SETTINGS_MODULE' not in os.environ: os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings' if os.path.exists( os.path.join(os.environ['FREPPLE_HOME'], 'python36.zip')): # For the py2exe executable os.environ['PYTHONPATH'] = os.path.join( os.environ['FREPPLE_HOME'], 'python%d%d.zip' % (sys.version_info[0], sys.version_info[1]) ) + os.pathsep + os.path.normpath(os.environ['FREPPLE_APP']) else: # Other executables os.environ['PYTHONPATH'] = os.path.normpath( os.environ['FREPPLE_APP']) cmdline = ['"%s"' % i for i in options['file']] cmdline.insert(0, 'frepple') cmdline.append('"%s"' % os.path.join( settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py')) proc = subprocess.run(' '.join(cmdline)) if proc.returncode: raise Exception('Exit code of the batch run is %d' % proc.returncode) # Task update task.status = 'Done' task.finished = datetime.now() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database)
def wrapTask(request, action): # Allow only post if request.method != "POST": raise Exception("Only post requests allowed") # Parse the posted parameters as arguments for an asynchronous task to add to the queue. TODO MAKE MODULAR WITH SEPERATE TASK CLASS worker_database = request.database now = datetime.now() task = None args = request.POST or request.GET # A if action in ("frepple_run", "runplan"): if not request.user.has_perm("auth.generate_plan"): raise Exception("Missing execution privileges") constraint = 0 for value in args.getlist("constraint"): try: constraint += int(value) except Exception: pass task = Task(name="runplan", submitted=now, status="Waiting", user=request.user) task.arguments = "--constraint=%s --plantype=%s" % ( constraint, args.get("plantype", 1), ) env = [] for value in args.getlist("env"): env.append(value) if env: task.arguments = "%s --env=%s" % (task.arguments, ",".join(env)) task.save(using=request.database) # C elif action in ("frepple_flush", "empty"): if not request.user.has_perm("auth.run_db"): raise Exception("Missing execution privileges") task = Task(name="empty", submitted=now, status="Waiting", user=request.user) models = ",".join(args.getlist("models")) if models: task.arguments = "--models=%s" % (models) task.save(using=request.database) # D elif action == "loaddata": if not request.user.has_perm("auth.run_db"): raise Exception("Missing execution privileges") task = Task( name="loaddata", submitted=now, status="Waiting", user=request.user, arguments=args["fixture"], ) task.save(using=request.database) # Also run the workflow upon loading of manufacturing_demo or distribution_demo if args.get("regenerateplan", False) == "true": active_modules = "supply" task = Task(name="runplan", submitted=now, status="Waiting", user=request.user) task.arguments = "--constraint=15 --plantype=1 --env=%s --background" % ( active_modules, ) task.save(using=request.database) # E elif action in ("frepple_copy", "scenario_copy"): worker_database = DEFAULT_DB_ALIAS if "copy" in args: if not request.user.has_perm("auth.copy_scenario"): raise Exception("Missing execution privileges") source = args.get("source", request.database) worker_database = source destination = args.get("destination", False) if destination and destination != DEFAULT_DB_ALIAS: force = args.get("force", False) arguments = "%s %s" % (source, destination) if force: arguments += " --force" task = Task( name="scenario_copy", submitted=now, status="Waiting", user=request.user, arguments=arguments, ) task.save(using=source) elif "release" in args: # Note: release is immediate and synchronous. if not request.user.has_perm("auth.release_scenario"): raise Exception("Missing execution privileges") sc = Scenario.objects.using(DEFAULT_DB_ALIAS).get( name=request.database) if sc.status != "Free" and sc.name != DEFAULT_DB_ALIAS: sc.status = "Free" sc.lastrefresh = now sc.save(using=DEFAULT_DB_ALIAS) elif "promote" in args: if not request.user.has_perm("auth.promote_scenario"): raise Exception("Missing execution privileges") source = args.get("source", request.database) worker_database = source destination = args.get("destination", False) if destination and destination == DEFAULT_DB_ALIAS: arguments = "--promote %s %s" % (source, destination) task = Task( name="scenario_copy", submitted=now, status="Waiting", user=request.user, arguments=arguments, ) task.save(using=source) elif "update" in args: # Note: update is immediate and synchronous. if not request.user.has_perm("auth.release_scenario"): raise Exception("Missing execution privileges") sc = Scenario.objects.using(DEFAULT_DB_ALIAS).get( name=request.database) sc.description = args.get("description", None) sc.save(using=DEFAULT_DB_ALIAS) else: raise Exception("Invalid scenario task") # G elif action in ("frepple_createbuckets", "createbuckets"): if not request.user.has_perm("auth.run_db"): raise Exception("Missing execution privileges") task = Task(name="createbuckets", submitted=now, status="Waiting", user=request.user) arguments = [] start = args.get("start", None) if start: arguments.append("--start=%s" % start) end = args.get("end", None) if end: arguments.append("--end=%s" % end) weekstart = args.get("weekstart", None) if weekstart: arguments.append("--weekstart=%s" % weekstart) format_day = args.get("format-day", None) if format_day: arguments.append('--format-day="%s"' % format_day) format_week = args.get("format-week", None) if format_week: arguments.append('--format-week="%s"' % format_week) format_month = args.get("format-month", None) if format_month: arguments.append('--format-month="%s"' % format_month) format_quarter = args.get("format-quarter", None) if format_quarter: arguments.append('--format-quarter="%s"' % format_quarter) format_year = args.get("format-year", None) if format_year: arguments.append('--format-year="%s"' % format_year) if arguments: task.arguments = " ".join(arguments) task.save(using=request.database) else: # Generic task wrapper # Find the command and verify we have permissions to run it command = None for commandname, appname in get_commands().items(): if commandname == action: try: c = getattr( import_module("%s.management.commands.%s" % (appname, commandname)), "Command", ) if c.index >= 0: if getattr(c, "getHTML", None) and c.getHTML(request): # Command class has getHTML method command = c break else: for p in c.__bases__: # Parent command class has getHTML method if getattr(p, "getHTML", None) and p.getHTML(request): command = c break if command: break except Exception: pass # Silently ignore failures if not command: raise Exception("Invalid task name '%s'" % action) # Create a task arguments = [] for arg, val in args.lists(): if arg != "csrfmiddlewaretoken": arguments.append("--%s=%s" % (arg, ",".join(val))) task = Task(name=action, submitted=now, status="Waiting", user=request.user) if arguments: task.arguments = " ".join(arguments) task.save(using=request.database) # Launch a worker process, making sure it inherits the right # environment variables from this parent os.environ["FREPPLE_CONFIGDIR"] = settings.FREPPLE_CONFIGDIR if task and not checkActive(worker_database): if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")): if "python" in sys.executable: # Development layout Popen([ sys.executable, # Python executable os.path.join(settings.FREPPLE_APP, "frepplectl.py"), "runworker", "--database=%s" % worker_database, ]) else: # Deployment on Apache web server Popen( [ "python", os.path.join(settings.FREPPLE_APP, "frepplectl.py"), "runworker", "--database=%s" % worker_database, ], creationflags=0x08000000, ) elif sys.executable.find("freppleserver.exe") >= 0: # Py2exe executable Popen( [ sys.executable.replace( "freppleserver.exe", "frepplectl.exe"), # frepplectl executable "runworker", "--database=%s" % worker_database, ], creationflags=0x08000000, ) # Do not create a console window else: # Linux standard installation Popen( ["frepplectl", "runworker", "--database=%s" % worker_database]) return task
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up the options if 'start' in options: start = options['start'] or '2011-1-1' else: start = '2011-1-1' if 'end' in options: end = options['end'] or '2019-1-1' else: end = '2019-1-1' if 'weekstart' in options: weekstart = int(options['weekstart']) if weekstart < 0 or weekstart > 6: raise CommandError("Invalid weekstart %s" % weekstart) else: weekstart = 1 if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if 'user' in options and options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None now = datetime.now() task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'generate buckets': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='generate buckets', submitted=now, started=now, status='0%', user=user, arguments="--start=%s --end=%s --weekstart=%s" % (start, end, weekstart)) task.save(using=database) # Validate the date arguments try: curdate = datetime.strptime(start, '%Y-%m-%d') enddate = datetime.strptime(end, '%Y-%m-%d') except Exception as e: raise CommandError("Date is not matching format YYYY-MM-DD") with transaction.atomic(using=database, savepoint=False): # Delete previous contents connections[database].cursor().execute( "delete from common_bucketdetail where bucket_id in ('year','quarter','month','week','day')" ) connections[database].cursor().execute( "delete from common_bucket where name in ('year','quarter','month','week','day')" ) # Create buckets y = Bucket(name='year', description='Yearly time buckets', level=1) q = Bucket(name='quarter', description='Quarterly time buckets', level=2) m = Bucket(name='month', description='Monthly time buckets', level=3) w = Bucket(name='week', description='Weeky time buckets', level=4) d = Bucket(name='day', description='Daily time buckets', level=5) y.save(using=database) q.save(using=database) m.save(using=database) w.save(using=database) d.save(using=database) # Loop over all days in the chosen horizon prev_year = None prev_quarter = None prev_month = None prev_week = None while curdate < enddate: month = int(curdate.strftime( "%m")) # an integer in the range 1 - 12 quarter = (month - 1) // 3 + 1 # an integer in the range 1 - 4 year = int(curdate.strftime("%Y")) dayofweek = int(curdate.strftime( "%w")) # day of the week, 0 = sunday, 1 = monday, ... year_start = datetime(year, 1, 1) year_end = datetime(year + 1, 1, 1) week_start = curdate - timedelta((dayofweek + 6) % 7 + 1 - weekstart) week_end = curdate - timedelta((dayofweek + 6) % 7 - 6 - weekstart) if week_start < year_start: week_start = year_start if week_end > year_end: week_end = year_end # Create buckets if year != prev_year: prev_year = year BucketDetail(bucket=y, name=str(year), startdate=year_start, enddate=year_end).save(using=database) if quarter != prev_quarter: prev_quarter = quarter BucketDetail(bucket=q, name="%02d Q%s" % (year - 2000, quarter), startdate=date(year, quarter * 3 - 2, 1), enddate=date( year + quarter // 4, quarter * 3 + 1 - 12 * (quarter // 4), 1)).save(using=database) if month != prev_month: prev_month = month BucketDetail( bucket=m, name=curdate.strftime("%b %y"), startdate=date(year, month, 1), enddate=date(year + month // 12, month + 1 - 12 * (month // 12), 1), ).save(using=database) if week_start != prev_week: prev_week = week_start BucketDetail( bucket=w, name=curdate.strftime("%y W%W"), startdate=week_start, enddate=week_end, ).save(using=database) BucketDetail( bucket=d, name=str(curdate.date()), startdate=curdate, enddate=curdate + timedelta(1), ).save(using=database) # Next date curdate = curdate + timedelta(1) # Log success task.status = 'Done' task.finished = datetime.now() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: if task: task.save(using=database) settings.DEBUG = tmp_debug
def handle(self, **options): if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() now = datetime.now() task = None database = options["database"] if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name != "scenario_release"): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="scenario_release", submitted=now, started=now, status="0%", user=user, ) task.processid = os.getpid() task.save(using=database) # Validate the arguments try: releasedScenario = None try: releasedScenario = Scenario.objects.using( DEFAULT_DB_ALIAS).get(pk=database) except Exception: raise CommandError( "No destination database defined with name '%s'" % database) if database == DEFAULT_DB_ALIAS: raise CommandError("Production scenario cannot be released.") if releasedScenario.status != "In use": raise CommandError("Scenario to release is not in use") # Update the scenario table, set it free in the production database releasedScenario.status = "Free" releasedScenario.lastrefresh = datetime.today() releasedScenario.save(using=DEFAULT_DB_ALIAS) # Killing webservice if "freppledb.webservice" in settings.INSTALLED_APPS: management.call_command("stopwebservice", force=True, database=database) # Logging message task.processid = None task.status = "Done" task.finished = datetime.now() # Update the task in the destination database task.message = "Scenario %s released" % (database, ) task.save(using=database) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() if releasedScenario and releasedScenario.status == "Busy": releasedScenario.status = "Free" releasedScenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.processid = None task.save(using=database)
def handle(self, **options): # Pick up the options now = datetime.now() if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if 'user' in options and options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None timestamp = now.strftime("%Y%m%d%H%M%S") if database == DEFAULT_DB_ALIAS: logfile = 'frepple-%s.log' % timestamp else: logfile = 'frepple_%s-%s.log' % (database, timestamp) task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_run': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.logfile = logfile else: task = Task(name='frepple_run', submitted=now, started=now, status='0%', user=user, logfile=logfile) # Validate options if 'constraint' in options: constraint = int(options['constraint']) if constraint < 0 or constraint > 15: raise ValueError("Invalid constraint: %s" % options['constraint']) else: constraint = 15 if 'plantype' in options: plantype = int(options['plantype']) else: plantype = 1 # Reset environment variables # TODO avoid having to delete the environment variables. Use options directly? PlanTaskRegistry.autodiscover() for i in PlanTaskRegistry.reg: if 'env' in options: # Options specified if i.label and i.label[0] in os.environ: del os.environ[i.label[0]] elif i.label: # No options specified - default to activate them all os.environ[i.label[0]] = '1' # Set environment variables if options['env']: task.arguments = "--constraint=%d --plantype=%d --env=%s" % ( constraint, plantype, options['env']) for i in options['env'].split(','): j = i.split('=') if len(j) == 1: os.environ[j[0]] = '1' else: os.environ[j[0]] = j[1] else: task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype) if options['background']: task.arguments += " --background" # Log task task.save(using=database) # Locate commands.py import freppledb.common.commands cmd = freppledb.common.commands.__file__ # Prepare environment os.environ['FREPPLE_PLANTYPE'] = str(plantype) os.environ['FREPPLE_CONSTRAINT'] = str(constraint) os.environ['FREPPLE_TASKID'] = str(task.id) os.environ['FREPPLE_DATABASE'] = database os.environ['FREPPLE_LOGFILE'] = logfile os.environ[ 'PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ[ 'PATH'] + os.pathsep + settings.FREPPLE_APP if os.path.isfile( os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')): os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME if 'DJANGO_SETTINGS_MODULE' not in os.environ: os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings' os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP) if options['background']: # Execute as background process on Windows if os.name == 'nt': subprocess.Popen(['frepple', cmd], creationflags=0x08000000) else: # Execute as background process on Linux subprocess.Popen(['frepple', cmd]) else: # Execute in foreground ret = subprocess.call(['frepple', cmd]) if ret != 0 and ret != 2: # Return code 0 is a successful run # Return code is 2 is a run cancelled by a user. That's shown in the status field. raise Exception('Failed with exit code %d' % ret) # Task update task.status = 'Done' task.finished = datetime.now() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: if task: task.save(using=database)
def handle(self, **options): # Pick up the options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None param = None try: # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_simulation", "simulation")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="simulation", submitted=now, started=now, status="0%", user=user, ) # Validate options task.arguments = "" horizon = int(options["horizon"]) if horizon < 0: raise ValueError("Invalid horizon: %s" % options["horizon"]) task.arguments += "--horizon=%d" % horizon step = int(options["step"]) if step < 0: raise ValueError("Invalid step: %s" % options["step"]) task.arguments += " --step=%d" % step verbosity = int(options["verbosity"]) # Log task task.save(using=database) # Load the initial status if options.get("initial", None): if verbosity > 0: print("Erasing simulation database") management.call_command("empty", database=database, verbosity=verbosity) if verbosity > 0: print("Loading initial data") management.call_command( "loaddata", options.get("initial"), database=database, verbosity=verbosity, ) # Get current date param = (Parameter.objects.all().using(database).get_or_create( name="currentdate")[0]) try: curdate = datetime.strptime(param.value, "%Y-%m-%d %H:%M:%S") except: curdate = datetime.now() curdate = curdate.date() # Compute how many simulation steps we need bckt_list = [] tmp = 0 while tmp <= horizon: bckt_list.append(curdate + timedelta(days=tmp)) tmp += step bckt_list_len = len(bckt_list) # Create the simulator class if options.get("simulator", None): cls = load_class(options["simulator"]) simulator = cls(database=database, verbosity=verbosity) else: simulator = Simulator(database=database, verbosity=verbosity) simulator.buckets = 1 # Loop over all dates in the simulation horizon idx = 0 strt = None nd = None for bckt in bckt_list: if nd: strt = nd nd = bckt else: nd = bckt continue # Start message task.status = "%.0f%%" % (100.0 * idx / bckt_list_len) task.message = "Simulating bucket from %s to %s " % (strt, nd) task.save(using=database) idx += 1 simulator.buckets += 1 if verbosity > 0: print( "\nStart simulating bucket from %s to %s (%s out of %s)" % (strt, nd, idx, bckt_list_len)) # Update currentdate parameter param.value = strt.strftime("%Y-%m-%d %H:%M:%S") param.save(using=database) # Initialization of the bucket if verbosity > 1: print(" Starting the bucket") with transaction.atomic(using=database): simulator.start_bucket(strt, nd) # Generate new demand records if verbosity > 1: print(" Receive new orders from customers") with transaction.atomic(using=database): simulator.generate_customer_demand(strt, nd) # Generate the constrained plan if verbosity > 1: print(" Generating plan...") management.call_command("runplan", database=database, env="supply") if options["pause"]: print( "\nYou can analyze the plan in the bucket in the user interface now..." ) input("\nPress Enter to continue the simulation...\n") # Release new purchase orders if verbosity > 1: print(" Create new purchase orders") with transaction.atomic(using=database): simulator.create_purchase_orders(strt, nd) # Release new manufacturing orders if verbosity > 1: print(" Create new manufacturing orders") with transaction.atomic(using=database): simulator.create_manufacturing_orders(strt, nd) # Release new distribution orders if verbosity > 1: print(" Create new distribution orders") with transaction.atomic(using=database): simulator.create_distribution_orders(strt, nd) # Receive open purchase orders if verbosity > 1: print(" Receive open purchase orders") with transaction.atomic(using=database): simulator.receive_purchase_orders(strt, nd) # Receive open distribution orders if verbosity > 1: print(" Receive open distribution orders") with transaction.atomic(using=database): simulator.receive_distribution_orders(strt, nd) # Finish open manufacturing orders if verbosity > 1: print(" Finish open manufacturing orders") with transaction.atomic(using=database): simulator.finish_manufacturing_orders(strt, nd) # Ship demand to customers if verbosity > 1: print(" Ship orders to customers") with transaction.atomic(using=database): simulator.ship_customer_demand(strt, nd) # Finish of the bucket if verbosity > 1: print(" Ending the bucket") with transaction.atomic(using=database): simulator.end_bucket(strt, nd) # Report statistics from the simulation. # The simulator class collected these results during its run. if verbosity > 1: print("Displaying final simulation metrics") with transaction.atomic(using=database): simulator.show_metrics() # Task update task.status = "Done" task.message = "Simulated from %s till %s" % (bckt_list[0], bckt_list[-1]) task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: # Final task status if task: task.save(using=database)
def wrapTask(request, action): # Allow only post if request.method != 'POST': raise Exception('Only post requests allowed') # Parse the posted parameters as arguments for an asynchronous task to add to the queue. TODO MAKE MODULAR WITH SEPERATE TASK CLASS worker_database = request.database now = datetime.now() task = None args = request.POST or request.GET # A if action in ('frepple_run', 'runplan'): if not request.user.has_perm('auth.generate_plan'): raise Exception('Missing execution privileges') constraint = 0 for value in args.getlist('constraint'): try: constraint += int(value) except: pass task = Task(name='runplan', submitted=now, status='Waiting', user=request.user) task.arguments = "--constraint=%s --plantype=%s" % ( constraint, args.get('plantype', 1)) env = [] for value in args.getlist('env'): env.append(value) if env: task.arguments = "%s --env=%s" % (task.arguments, ','.join(env)) request.session['env'] = env task.save(using=request.database) # Update the session object request.session['plantype'] = args.get('plantype') request.session['constraint'] = constraint # C elif action in ('frepple_flush', 'empty'): if not request.user.has_perm('auth.run_db'): raise Exception('Missing execution privileges') task = Task(name='empty', submitted=now, status='Waiting', user=request.user) models = ','.join(args.getlist('models')) if models: task.arguments = "--models=%s" % (models) task.save(using=request.database) # D elif action == 'loaddata': if not request.user.has_perm('auth.run_db'): raise Exception('Missing execution privileges') task = Task(name='loaddata', submitted=now, status='Waiting', user=request.user, arguments=args['fixture']) task.save(using=request.database) # E elif action in ('frepple_copy', 'scenario_copy'): worker_database = DEFAULT_DB_ALIAS if 'copy' in args: if not request.user.has_perm('auth.copy_scenario'): raise Exception('Missing execution privileges') source = args.get('source', DEFAULT_DB_ALIAS) worker_database = source destination = args.getlist('destination') force = args.get('force', False) for sc in Scenario.objects.all(): arguments = "%s %s" % (source, sc.name) if force: arguments += ' --force' if args.get(sc.name, 'off') == 'on' or sc.name in destination: task = Task(name='scenario_copy', submitted=now, status='Waiting', user=request.user, arguments=arguments) task.save(using=source) elif 'release' in args: # Note: release is immediate and synchronous. if not request.user.has_perm('auth.release_scenario'): raise Exception('Missing execution privileges') for sc in Scenario.objects.all().using(DEFAULT_DB_ALIAS): if args.get(sc.name, 'off') == 'on' and sc.status != 'Free': sc.status = 'Free' sc.lastrefresh = now sc.save(using=DEFAULT_DB_ALIAS) if request.database == sc.name: # Erasing the database that is currently selected. request.prefix = '' elif 'update' in args: # Note: update is immediate and synchronous. if not request.user.has_perm('auth.release_scenario'): raise Exception('Missing execution privileges') for sc in Scenario.objects.all().using(DEFAULT_DB_ALIAS): if args.get(sc.name, 'off') == 'on': sc.description = args.get('description', None) sc.save(using=DEFAULT_DB_ALIAS) else: raise Exception('Invalid scenario task') # G elif action in ('frepple_createbuckets', 'createbuckets'): if not request.user.has_perm('auth.run_db'): raise Exception('Missing execution privileges') task = Task(name='createbuckets', submitted=now, status='Waiting', user=request.user) arguments = [] start = args.get('start', None) if start: arguments.append("--start=%s" % start) end = args.get('end', None) if end: arguments.append("--end=%s" % end) weekstart = args.get('weekstart', None) if weekstart: arguments.append("--weekstart=%s" % weekstart) if arguments: task.arguments = " ".join(arguments) task.save(using=request.database) else: # Generic task wrapper # Find the command and verify we have permissions to run it command = None for commandname, appname in get_commands().items(): if commandname == action: try: c = getattr( import_module('%s.management.commands.%s' % (appname, commandname)), 'Command') if c.index >= 0: if getattr(c, 'getHTML', None) and c.getHTML(request): # Command class has getHTML method command = c break else: for p in c.__bases__: # Parent command class has getHTML method if getattr(p, 'getHTML', None) and p.getHTML(request): command = c break if command: break except Exception: pass # Silently ignore failures if not command: raise Exception("Invalid task name '%s'" % action) # Create a task arguments = [] for arg, val in args.lists(): if arg != 'csrfmiddlewaretoken': arguments.append('--%s=%s' % (arg, ','.join(val))) task = Task(name=action, submitted=now, status='Waiting', user=request.user) if arguments: task.arguments = " ".join(arguments) task.save(using=request.database) # Launch a worker process, making sure it inherits the right # environment variables from this parent os.environ['FREPPLE_CONFIGDIR'] = settings.FREPPLE_CONFIGDIR if task and not checkActive(worker_database): if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")): if "python" in sys.executable: # Development layout Popen([ sys.executable, # Python executable os.path.join(settings.FREPPLE_APP, "frepplectl.py"), "runworker", "--database=%s" % worker_database ]) else: # Deployment on Apache web server Popen([ "python", os.path.join(settings.FREPPLE_APP, "frepplectl.py"), "runworker", "--database=%s" % worker_database ], creationflags=0x08000000) elif sys.executable.find('freppleserver.exe') >= 0: # Py2exe executable Popen( [ sys.executable.replace( 'freppleserver.exe', 'frepplectl.exe'), # frepplectl executable "runworker", "--database=%s" % worker_database ], creationflags=0x08000000) # Do not create a console window else: # Linux standard installation Popen( ["frepplectl", "runworker", "--database=%s" % worker_database]) return task
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up options force = options["force"] promote = options["promote"] test = "FREPPLE_TEST" in os.environ if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() # Initialize the task source = options["source"] try: sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get( pk=source) except Exception: raise CommandError("No source database defined with name '%s'" % source) now = datetime.now() task = None if "task" in options and options["task"]: try: task = Task.objects.all().using(source).get(pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_copy", "scenario_copy")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="scenario_copy", submitted=now, started=now, status="0%", user=user) task.processid = os.getpid() task.save(using=source) # Validate the arguments destination = options["destination"] destinationscenario = None try: task.arguments = "%s %s" % (source, destination) if options["description"]: task.arguments += '--description="%s"' % options[ "description"].replace('"', '\\"') if force: task.arguments += " --force" task.save(using=source) try: destinationscenario = Scenario.objects.using( DEFAULT_DB_ALIAS).get(pk=destination) except Exception: raise CommandError( "No destination database defined with name '%s'" % destination) if source == destination: raise CommandError("Can't copy a schema on itself") if sourcescenario.status != "In use": raise CommandError("Source scenario is not in use") if destinationscenario.status != "Free" and not force and not promote: raise CommandError("Destination scenario is not free") if promote and (destination != DEFAULT_DB_ALIAS or source == DEFAULT_DB_ALIAS): raise CommandError( "Incorrect source or destination database with promote flag" ) # Logging message - always logging in the default database destinationscenario.status = "Busy" destinationscenario.save(using=DEFAULT_DB_ALIAS) # Copying the data # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[source]["PASSWORD"]: os.environ["PGPASSWORD"] = settings.DATABASES[source][ "PASSWORD"] if os.name == "nt": # On windows restoring with pg_restore over a pipe is broken :-( cmd = "pg_dump -c -Fp %s%s%s%s%s | psql %s%s%s%s" else: cmd = "pg_dump -Fc %s%s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s" commandline = cmd % ( settings.DATABASES[source]["USER"] and ("-U %s " % settings.DATABASES[source]["USER"]) or "", settings.DATABASES[source]["HOST"] and ("-h %s " % settings.DATABASES[source]["HOST"]) or "", settings.DATABASES[source]["PORT"] and ("-p %s " % settings.DATABASES[source]["PORT"]) or "", """ -T common_user -T common_scenario -T auth_group -T auth_group_permission -T auth_permission -T common_user_groups -T common_user_user_permissions -T common_preferences -T reportmanager_report """ if destination == DEFAULT_DB_ALIAS else "", test and settings.DATABASES[source]["TEST"]["NAME"] or settings.DATABASES[source]["NAME"], settings.DATABASES[destination]["USER"] and ("-U %s " % settings.DATABASES[destination]["USER"]) or "", settings.DATABASES[destination]["HOST"] and ("-h %s " % settings.DATABASES[destination]["HOST"]) or "", settings.DATABASES[destination]["PORT"] and ("-p %s " % settings.DATABASES[destination]["PORT"]) or "", test and settings.DATABASES[destination]["TEST"]["NAME"] or settings.DATABASES[destination]["NAME"], ) with subprocess.Popen( commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, ) as p: try: task.processid = p.pid task.save(using=source) p.wait() except Exception: p.kill() p.wait() # Consider the destination database free again destinationscenario.status = "Free" destinationscenario.lastrefresh = datetime.today() destinationscenario.save(using=DEFAULT_DB_ALIAS) raise Exception("Database copy failed") # Update the scenario table destinationscenario.status = "In use" destinationscenario.lastrefresh = datetime.today() if options["description"]: destinationscenario.description = options["description"] destinationscenario.save(using=DEFAULT_DB_ALIAS) # Give access to the destination scenario to: # a) the user doing the copy # b) all superusers from the source schema # unless it's a promotion if destination != DEFAULT_DB_ALIAS: User.objects.using(destination).filter( is_superuser=True).update(is_active=True) User.objects.using(destination).filter( is_superuser=False).update(is_active=False) if user: User.objects.using(destination).filter( username=user.username).update(is_active=True) # Logging message task.processid = None task.status = "Done" task.finished = datetime.now() # Update the task in the destination database task.message = "Scenario %s from %s" % ( "promoted" if promote else "copied", source, ) task.save(using=destination) task.message = "Scenario copied to %s" % destination # Delete any waiting tasks in the new copy. # This is needed for situations where the same source is copied to # multiple destinations at the same moment. Task.objects.all().using(destination).filter( id__gt=task.id).delete() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() if destinationscenario and destinationscenario.status == "Busy": destinationscenario.status = "Free" destinationscenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.processid = None task.save(using=source) settings.DEBUG = tmp_debug
def handle(self, *args, **options): # Pick up the options now = datetime.now() self.database = options['database'] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database) if options['user']: try: self.user = User.objects.all().using( self.database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = 'exporttofolder-%s.log' % timestamp else: logfile = 'exporttofolder_%s-%s.log' % (self.database, timestamp) task = None self.logfile = None errors = 0 try: # Initialize the task if options['task']: try: task = Task.objects.all().using( self.database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'export to folder': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.logfile = logfile else: task = Task(name='export to folder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile) task.arguments = ' '.join(['"%s"' % i for i in args]) task.save(using=self.database) # Execute if os.path.isdir( settings.DATABASES[self.database]['FILEUPLOADFOLDER']): # Open the logfile # The log file remains in the upload folder as different folders can be specified # We do not want t create one log file per folder if not os.path.isdir( os.path.join( settings.DATABASES[self.database] ['FILEUPLOADFOLDER'], 'export')): try: os.makedirs( os.path.join( settings.DATABASES[self.database] ['FILEUPLOADFOLDER'], 'export')) except OSError as exception: if exception.errno != errno.EEXIST: raise self.logfile = open( os.path.join(settings.FREPPLE_LOGDIR, logfile), "a") print("%s Started export to folder\n" % datetime.now(), file=self.logfile) cursor = connections[self.database].cursor() task.status = '0%' task.save(using=self.database) i = 0 cnt = len(self.statements) for filename, export, sqlquery in self.statements: print("%s Started export of %s" % (datetime.now(), filename), file=self.logfile) #make sure export folder exists exportFolder = os.path.join( settings.DATABASES[self.database]['FILEUPLOADFOLDER'], export) if not os.path.isdir(exportFolder): os.makedirs(exportFolder) try: if filename.lower().endswith(".gz"): csv_datafile = gzip.open( os.path.join(exportFolder, filename), "w") else: csv_datafile = open( os.path.join(exportFolder, filename), "w") cursor.copy_expert(sqlquery, csv_datafile) csv_datafile.close() i += 1 except Exception as e: errors += 1 print("%s Failed to export to %s" % (datetime.now(), filename), file=self.logfile) if task: task.message = 'Failed to export %s' % filename task.status = str(int(i / cnt * 100)) + '%' task.save(using=self.database) print("%s Exported %s file(s)\n" % (datetime.now(), cnt - errors), file=self.logfile) else: errors += 1 print("%s Failed, folder does not exist" % datetime.now(), file=self.logfile) task.message = "Destination folder does not exist" task.save(using=self.database) except Exception as e: if self.logfile: print("%s Failed" % datetime.now(), file=self.logfile) errors += 1 if task: task.message = 'Failed to export' logger.error("Failed to export: %s" % e) finally: if task: if not errors: task.status = '100%' task.message = "Exported %s data files" % (cnt) else: task.status = 'Failed' # task.message = "Exported %s data files, %s failed" % (cnt-errors, errors) task.finished = datetime.now() task.save(using=self.database) if self.logfile: print('%s End of export to folder\n' % datetime.now(), file=self.logfile) self.logfile.close()
def handle(self, *args, **options): # Pick up the options now = datetime.now() self.database = options["database"] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database) if options["user"]: try: self.user = (User.objects.all().using( self.database).get(username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = "exporttofolder-%s.log" % timestamp else: logfile = "exporttofolder_%s-%s.log" % (self.database, timestamp) try: handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding="utf-8") # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format'])) logger.addHandler(handler) logger.propagate = False except Exception as e: print("%s Failed to open logfile %s: %s" % (datetime.now().replace(microsecond=0), logfile, e)) task = None errors = 0 try: # Initialize the task setattr(_thread_locals, "database", self.database) if options["task"]: try: task = (Task.objects.all().using( self.database).get(pk=options["task"])) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_exporttofolder", "exporttofolder")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.logfile = logfile else: task = Task( name="exporttofolder", submitted=now, started=now, status="0%", user=self.user, logfile=logfile, ) task.arguments = " ".join(['"%s"' % i for i in args]) task.processid = os.getpid() task.save(using=self.database) # Execute if os.path.isdir( settings.DATABASES[self.database]["FILEUPLOADFOLDER"]): if not os.path.isdir( os.path.join( settings.DATABASES[self.database] ["FILEUPLOADFOLDER"], "export")): try: os.makedirs( os.path.join( settings.DATABASES[self.database] ["FILEUPLOADFOLDER"], "export", )) except OSError as exception: if exception.errno != errno.EEXIST: raise logger.info("%s Started export to folder" % datetime.now().replace(microsecond=0)) cursor = connections[self.database].cursor() task.status = "0%" task.save(using=self.database) i = 0 cnt = len(self.statements) # Calling all the pre-sql statements idx = 1 for stmt in self.pre_sql_statements: try: logger.info( "%s Executing pre-statement %s" % (datetime.now().replace(microsecond=0), idx)) cursor.execute(stmt) if cursor.rowcount > 0: logger.info("%s %s record(s) modified" % ( datetime.now().replace(microsecond=0), cursor.rowcount, )) except Exception: errors += 1 logger.error( "%s An error occurred when executing statement %s" % (datetime.now().replace(microsecond=0), idx)) idx += 1 for cfg in self.statements: # Validate filename filename = cfg.get("filename", None) if not filename: raise Exception( "Missing filename in export configuration") folder = cfg.get("folder", None) if not folder: raise Exception( "Missing folder in export configuration for %s" % filename) # Report progress logger.info( "%s Started export of %s" % (datetime.now().replace(microsecond=0), filename)) if task: task.message = "Exporting %s" % filename task.save(using=self.database) # Make sure export folder exists exportFolder = os.path.join( settings.DATABASES[self.database]["FILEUPLOADFOLDER"], folder) if not os.path.isdir(exportFolder): os.makedirs(exportFolder) try: reportclass = cfg.get("report", None) sql = cfg.get("sql", None) if reportclass: # Export from report class # Create a dummy request factory = RequestFactory() request = factory.get("/dummy/", cfg.get("data", {})) if self.user: request.user = self.user else: request.user = User.objects.all().get( username="******") request.database = self.database request.LANGUAGE_CODE = settings.LANGUAGE_CODE request.prefs = cfg.get("prefs", None) # Initialize the report if hasattr(reportclass, "initialize"): reportclass.initialize(request) if hasattr(reportclass, "rows"): if callable(reportclass.rows): request.rows = reportclass.rows(request) else: request.rows = reportclass.rows if hasattr(reportclass, "crosses"): if callable(reportclass.crosses): request.crosses = reportclass.crosses( request) else: request.crosses = reportclass.crosses if reportclass.hasTimeBuckets: reportclass.getBuckets(request) # Write the report file datafile = open( os.path.join(exportFolder, filename), "wb") if filename.endswith(".xlsx"): reportclass._generate_spreadsheet_data( request, [request.database], datafile, **cfg.get("data", {})) elif filename.endswith(".csv"): for r in reportclass._generate_csv_data( request, [request.database], **cfg.get("data", {})): datafile.write( r.encode(settings.CSV_CHARSET) if isinstance(r, str) else r) else: raise Exception( "Unknown output format for %s" % filename) elif sql: # Exporting using SQL if filename.lower().endswith(".gz"): datafile = gzip.open( os.path.join(exportFolder, filename), "w") else: datafile = open( os.path.join(exportFolder, filename), "w") cursor.copy_expert(sql, datafile) else: raise Exception("Unknown export type for %s" % filename) datafile.close() i += 1 except Exception as e: errors += 1 logger.error("%s Failed to export to %s: %s" % (datetime.now().replace(microsecond=0), filename, e)) if task: task.message = "Failed to export %s" % filename task.status = str(int(i / cnt * 100)) + "%" task.save(using=self.database) logger.info( "%s Exported %s file(s)" % (datetime.now().replace(microsecond=0), cnt - errors)) idx = 1 for stmt in self.post_sql_statements: try: logger.info( "%s Executing post-statement %s" % (datetime.now().replace(microsecond=0), idx)) cursor.execute(stmt) if cursor.rowcount > 0: logger.info("%s %s record(s) modified" % ( datetime.now().replace(microsecond=0), cursor.rowcount, )) except Exception: errors += 1 logger.error( "%s An error occured when executing statement %s" % (datetime.now().replace(microsecond=0), idx)) idx += 1 else: errors += 1 logger.error("%s Failed, folder does not exist" % datetime.now().replace(microsecond=0)) task.message = "Destination folder does not exist" task.save(using=self.database) except Exception as e: logger.error("%s Failed to export: %s" % (datetime.now().replace(microsecond=0), e)) errors += 1 if task: task.message = "Failed to export" finally: logger.info("%s End of export to folder\n" % datetime.now().replace(microsecond=0)) if task: if not errors: task.status = "100%" task.message = "Exported %s data files" % (cnt) else: task.status = "Failed" # task.message = "Exported %s data files, %s failed" % (cnt-errors, errors) task.finished = datetime.now() task.processid = None task.save(using=self.database) setattr(_thread_locals, "database", None)
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up the options if 'verbosity' in options: verbosity = int(options['verbosity']) else: verbosity = 1 if 'cluster' in options: cluster = int(options['cluster']) else: cluster = 100 if 'demand' in options: demand = int(options['demand']) else: demand = 30 if 'forecast_per_item' in options: forecast_per_item = int(options['forecast_per_item']) else: forecast_per_item = 50 if 'level' in options: level = int(options['level']) else: level = 5 if 'resource' in options: resource = int(options['resource']) else: resource = 60 if 'resource_size' in options: resource_size = int(options['resource_size']) else: resource_size = 5 if 'components' in options: components = int(options['components']) else: components = 200 if 'components_per' in options: components_per = int(options['components_per']) else: components_per = 5 if components == 0: components_per = 0 if 'deliver_lt' in options: deliver_lt = int(options['deliver_lt']) else: deliver_lt = 30 if 'procure_lt' in options: procure_lt = int(options['procure_lt']) else: procure_lt = 40 if 'currentdate' in options: currentdate = options['currentdate'] or datetime.strftime( date.today(), '%Y-%m-%d') else: currentdate = datetime.strftime(date.today(), '%Y-%m-%d') if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if 'user' in options and options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None random.seed(100) # Initialize random seed to get reproducible results now = datetime.now() task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'generate model': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='generate model', submitted=now, started=now, status='0%', user=user) task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \ "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % ( cluster, demand, forecast_per_item, level, resource, resource_size, components, components_per, deliver_lt, procure_lt ) task.save(using=database) # Pick up the startdate try: startdate = datetime.strptime(currentdate, '%Y-%m-%d') except: raise CommandError( "current date is not matching format YYYY-MM-DD") # Check whether the database is empty if Buffer.objects.using(database).count( ) > 0 or Item.objects.using(database).count() > 0: raise CommandError( "Database must be empty before creating a model") # Plan start date if verbosity > 0: print("Updating current date...") Parameter.objects.using(database).create(name="currentdate", value=datetime.strftime( startdate, "%Y-%m-%d %H:%M:%S")) Parameter.objects.using(database).create(name="plan.loglevel", value="3") # Planning horizon # minimum 10 daily buckets, weekly buckets till 40 days after current if verbosity > 0: print("Updating buckets...") management.call_command('frepple_createbuckets', user=user, database=database) task.status = '2%' task.save(using=database) # Weeks calendar if verbosity > 0: print("Creating weeks calendar...") with transaction.atomic(using=database): weeks = Calendar.objects.using(database).create(name="Weeks", defaultvalue=0) for i in BucketDetail.objects.using(database).filter( bucket="week").all(): CalendarBucket(startdate=i.startdate, enddate=i.enddate, value=1, calendar=weeks).save(using=database) task.status = '4%' task.save(using=database) # Working days calendar if verbosity > 0: print("Creating working days...") with transaction.atomic(using=database): workingdays = Calendar.objects.using(database).create( name="Working Days", defaultvalue=0) minmax = BucketDetail.objects.using(database).filter( bucket="week").aggregate(Min('startdate'), Max('startdate')) CalendarBucket(startdate=minmax['startdate__min'], enddate=minmax['startdate__max'], value=1, calendar=workingdays, priority=1, saturday=False, sunday=False).save(using=database) task.status = '6%' task.save(using=database) # Parent location loc = Location.objects.using(database).create( name="Factory", available=workingdays) # Create a random list of categories to choose from categories = [ 'cat A', 'cat B', 'cat C', 'cat D', 'cat E', 'cat F', 'cat G' ] # Create customers if verbosity > 0: print("Creating customers...") with transaction.atomic(using=database): cust = [] for i in range(100): c = Customer.objects.using(database).create( name='Cust %03d' % i) cust.append(c) task.status = '8%' task.save(using=database) # Create resources and their calendars if verbosity > 0: print("Creating resources and calendars...") with transaction.atomic(using=database): res = [] for i in range(resource): cal = Calendar.objects.using(database).create( name='capacity for res %03d' % i, category='capacity', defaultvalue=0) CalendarBucket.objects.using(database).create( startdate=startdate, value=resource_size, calendar=cal) r = Resource.objects.using(database).create( name='Res %03d' % i, maximum_calendar=cal, location=loc) res.append(r) task.status = '10%' task.save(using=database) random.shuffle(res) # Create the components if verbosity > 0: print("Creating raw materials...") with transaction.atomic(using=database): comps = [] compsupplier = Supplier.objects.using(database).create( name='component supplier') for i in range(components): it = Item.objects.using(database).create( name='Component %04d' % i, category='Procured', price=str(round(random.uniform(0, 100)))) ld = abs( round(random.normalvariate(procure_lt, procure_lt / 3))) Buffer.objects.using(database).create( name='%s @ %s' % (it.name, loc.name), location=loc, category='Procured', item=it, minimum=20, onhand=str( round(forecast_per_item * random.uniform(1, 3) * ld / 30)), ) ItemSupplier.objects.using(database).create( item=it, location=loc, supplier=compsupplier, leadtime=timedelta(days=ld), sizeminimum=80, sizemultiple=10, priority=1, cost=it.price) comps.append(it) task.status = '12%' task.save(using=database) # Loop over all clusters durations = [timedelta(days=i) for i in range(1, 6)] progress = 88.0 / cluster for i in range(cluster): with transaction.atomic(using=database): if verbosity > 0: print("Creating supply chain for end item %d..." % i) # Item it = Item.objects.using(database).create( name='Itm %05d' % i, category=random.choice(categories), price=str(round(random.uniform(100, 200)))) # Level 0 buffer buf = Buffer.objects.using(database).create( name='%s @ %s' % (it.name, loc.name), item=it, location=loc, category='00') # Demand for j in range(demand): Demand.objects.using(database).create( name='Dmd %05d %05d' % (i, j), item=it, location=loc, quantity=int(random.uniform(1, 6)), # Exponential distribution of due dates, with an average of deliver_lt days. due=startdate + timedelta(days=round( random.expovariate(float(1) / deliver_lt / 24)) / 24), # Orders have higher priority than forecast priority=random.choice([1, 2]), customer=random.choice(cust), category=random.choice(categories)) # Create upstream operations and buffers ops = [] previtem = it for k in range(level): if k == 1 and res: # Create a resource load for operations on level 1 oper = Operation.objects.using(database).create( name='Oper %05d L%02d' % (i, k), type='time_per', location=loc, duration_per=timedelta(days=1), sizemultiple=1, item=previtem) if resource < cluster and i < resource: # When there are more cluster than resources, we try to assure # that each resource is loaded by at least 1 operation. OperationResource.objects.using( database).create(resource=res[i], operation=oper) else: OperationResource.objects.using( database).create( resource=random.choice(res), operation=oper) else: oper = Operation.objects.using(database).create( name='Oper %05d L%02d' % (i, k), duration=random.choice(durations), sizemultiple=1, location=loc, item=previtem) ops.append(oper) # Some inventory in random buffers if random.uniform(0, 1) > 0.8: buf.onhand = int(random.uniform(5, 20)) buf.save(using=database) OperationMaterial.objects.using(database).create( operation=oper, item=previtem, quantity=1, type="end") if k != level - 1: # Consume from the next level in the bill of material it_tmp = Item.objects.using(database).create( name='Itm %05d L%02d' % (i, k + 1), category=random.choice(categories), price=str(round(random.uniform(100, 200)))) buf = Buffer.objects.using(database).create( name='%s @ %s' % (it_tmp.name, loc.name), item=it_tmp, location=loc, category='%02d' % (k + 1)) OperationMaterial.objects.using(database).create( operation=oper, item=it_tmp, quantity=-1) previtem = it_tmp # Consume raw materials / components c = [] for j in range(components_per): o = random.choice(ops) b = random.choice(comps) while (o, b) in c: # A flow with the same operation and buffer already exists o = random.choice(ops) b = random.choice(comps) c.append((o, b)) OperationMaterial.objects.using(database).create( operation=o, item=b, quantity=random.choice([-1, -1, -1, -2, -3])) # Commit the current cluster task.status = '%d%%' % (12 + progress * (i + 1)) task.save(using=database) # Task update task.status = 'Done' task.finished = datetime.now() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.save(using=database) raise e finally: if task: task.save(using=database) settings.DEBUG = tmp_debug
def handle(self, **options): # Pick up options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None if options['models']: models = options['models'].split(',') else: models = None now = datetime.now() task = None try: # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_flush': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='frepple_flush', submitted=now, started=now, status='0%', user=user) task.save(using=database) # Create a database connection cursor = connections[database].cursor() # Get a list of all django tables in the database tables = set( connections[database].introspection.django_table_names( only_existing=True)) ContentTypekeys = set() # Validate the user list of tables if models: models2tables = set() admin_log_positive = True for m in models: try: x = m.split('.', 1) x = apps.get_model(x[0], x[1]) if x in EXCLUDE_FROM_BULK_OPERATIONS: continue ContentTypekeys.add( ContentType.objects.get_for_model(x).pk) x = x._meta.db_table if x not in tables: raise models2tables.add(x) except Exception as e: raise CommandError("Invalid model to erase: %s" % m) tables = models2tables else: admin_log_positive = False tables.discard('django_admin_log') for i in EXCLUDE_FROM_BULK_OPERATIONS: tables.discard(i._meta.db_table) ContentTypekeys.add( ContentType.objects.get_for_model(i).pk) # Some tables need to be handled a bit special if 'operationplan' in tables: tables.add('operationplanmaterial') tables.add('operationplanresource') tables.add('out_problem') if 'resource' in tables and 'out_resourceplan' not in tables: tables.add('out_resourceplan') if 'demand' in tables and 'out_constraint' not in tables: tables.add('out_constraint') tables.discard('auth_group_permissions') tables.discard('auth_permission') tables.discard('auth_group') tables.discard('django_session') tables.discard('common_user') tables.discard('common_user_groups') tables.discard('common_user_user_permissions') tables.discard('common_preference') tables.discard('django_content_type') tables.discard('execute_log') tables.discard('common_scenario') # Delete all records from the tables. with transaction.atomic(using=database, savepoint=False): if ContentTypekeys: if admin_log_positive: cursor.execute( "delete from django_admin_log where content_type_id = any(%s)", (list(ContentTypekeys), )) else: cursor.execute( "delete from django_admin_log where content_type_id != any(%s)", (list(ContentTypekeys), )) if "common_bucket" in tables: cursor.execute( 'update common_user set horizonbuckets = null') for stmt in connections[database].ops.sql_flush( no_style(), tables, []): cursor.execute(stmt) if models: if 'input.purchaseorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'PO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'PO' ) ''') cursor.execute( "delete from operationplan where type = 'PO'") key = ContentType.objects.get_for_model( inputmodels.PurchaseOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) if 'input.distributionorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'DO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'DO' ) ''') cursor.execute( "delete from operationplan where type = 'DO'") key = ContentType.objects.get_for_model( inputmodels.DistributionOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) if 'input.manufacturingorder' in models: cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'MO' ) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'MO' ) ''') cursor.execute( "delete from operationplan where type = 'MO'") key = ContentType.objects.get_for_model( inputmodels.ManufacturingOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) if 'input.deliveryorder' in models: cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'DLVR' ) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'DLVR' ) ''') cursor.execute( "delete from operationplan where type = 'DLVR'") key = ContentType.objects.get_for_model( inputmodels.DeliveryOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) # Keep the database in shape cursor.execute("vacuum analyze") # Task update task.status = 'Done' task.finished = datetime.now() task.save(using=database) except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.save(using=database) raise CommandError('%s' % e)
def handle(self, **options): # Pick up the options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_loadxml", "loadxml")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="loadxml", submitted=now, started=now, status="0%", user=user) task.arguments = " ".join(options["file"]) task.processid = os.getpid() task.save(using=database) # Execute # TODO: if frePPLe is available as a module, we don't really need to spawn another process. os.environ["FREPPLE_HOME"] = settings.FREPPLE_HOME.replace( "\\", "\\\\") os.environ["FREPPLE_APP"] = settings.FREPPLE_APP os.environ["FREPPLE_DATABASE"] = database os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep + os.environ["PATH"] + os.pathsep + settings.FREPPLE_APP) os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME if "DJANGO_SETTINGS_MODULE" not in os.environ: os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings" if os.path.exists( os.path.join(os.environ["FREPPLE_HOME"], "python36.zip")): # For the py2exe executable os.environ["PYTHONPATH"] = (os.path.join( os.environ["FREPPLE_HOME"], "python%d%d.zip" % (sys.version_info[0], sys.version_info[1]), ) + os.pathsep + os.path.normpath(os.environ["FREPPLE_APP"])) else: # Other executables os.environ["PYTHONPATH"] = os.path.normpath( os.environ["FREPPLE_APP"]) cmdline = ['"%s"' % i for i in options["file"]] cmdline.insert(0, "frepple") cmdline.append('"%s"' % os.path.join( settings.FREPPLE_APP, "freppledb", "execute", "loadxml.py")) proc = subprocess.run(" ".join(cmdline)) if proc.returncode: raise Exception("Exit code of the batch run is %d" % proc.returncode) # Task update task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database)
def handle(self, **options): # Pick up the options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_restore", "restore")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="restore", submitted=now, started=now, status="0%", user=user) task.arguments = options["dump"] task.processid = os.getpid() task.save(using=database) # Validate options dumpfile = os.path.abspath( os.path.join(settings.FREPPLE_LOGDIR, options["dump"])) if not os.path.isfile(dumpfile): raise CommandError("Dump file not found") # Run the restore command # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[database]["PASSWORD"]: os.environ["PGPASSWORD"] = settings.DATABASES[database][ "PASSWORD"] cmd = ["pg_restore", "-n", "public", "-Fc", "-c", "--if-exists"] if settings.DATABASES[database]["USER"]: cmd.append("--username=%s" % settings.DATABASES[database]["USER"]) if settings.DATABASES[database]["HOST"]: cmd.append("--host=%s" % settings.DATABASES[database]["HOST"]) if settings.DATABASES[database]["PORT"]: cmd.append("--port=%s " % settings.DATABASES[database]["PORT"]) cmd.append("-d") cmd.append(settings.DATABASES[database]["NAME"]) cmd.append("<%s" % dumpfile) # Shell needs to be True in order to interpret the < character with subprocess.Popen(cmd, shell=True) as p: try: task.processid = p.pid task.save(using=database) p.wait() except: p.kill() p.wait() raise Exception("Database restoration failed") # Task update # We need to recreate a new task record, since the previous one is lost during the restoration. task = Task( name="restore", submitted=task.submitted, started=task.started, arguments=task.arguments, status="Done", finished=datetime.now(), user=task.user, ) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: # Commit it all, even in case of exceptions if task: task.processid = None task.save(using=database)
def handle(self, **options): now = datetime.now() database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) # Pick up options if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None task = None try: setattr(_thread_locals, "database", database) if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("emailreport")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="emailreport", submitted=now, started=now, status="0%", user=user, ) task.processid = os.getpid() task.save(using=database) if not settings.EMAIL_HOST: raise CommandError( "No SMTP mail server is configured in your djangosettings.py file" ) sender = options["sender"] recipient = options["recipient"] report = options["report"] if not sender: raise CommandError("No sender has been defined") if not recipient: raise CommandError("No recipient has been defined") if not report: raise CommandError("No report to email has been defined") # Make sure file exist in the export folder reports = report.split(",") correctedReports = [] missingFiles = [] for r in reports: if len(r.strip()) == 0: continue path = os.path.join( settings.DATABASES[database]["FILEUPLOADFOLDER"], "export", r.strip(), ) if not os.path.isfile(path): missingFiles.append(r.strip()) else: correctedReports.append(path) if len(missingFiles) > 0: raise CommandError( "Following files are missing in export folder: %s" % (",".join(str(x) for x in missingFiles))) if len(correctedReports) == 0: raise CommandError("No report defined in options") # Validate email adresses recipients = recipient.split(",") correctedRecipients = [] invalidEmails = [] for r in recipients: if len(r.strip()) == 0: continue if not re.fullmatch(r"[^@]+@[^@]+\.[^@]+", r.strip()): invalidEmails.append(r.strip()) else: correctedRecipients.append(r.strip()) if len(invalidEmails) > 0: raise CommandError( "Invalid email formatting for following addresses: %s" % (",".join(str(x) for x in invalidEmails))) if len(correctedRecipients) == 0: raise CommandError("No recipient defined in options") task.arguments = "--recipient=%s --report=%s" % (recipient, report) task.save(using=database) # create message message = EmailMessage( subject="Exported reports", body="", from_email=sender, to=correctedRecipients, ) b = BytesIO() with ZipFile(file=b, mode="w", compression=ZIP_DEFLATED) as zf: processedFiles = 0 for f in correctedReports: task.message = "Compressing file %s" % basename(f) task.status = (str( int(processedFiles / len(correctedReports) * 90.0)) + "%") task.save(using=database) zf.write(filename=f, arcname=basename(f)) processedFiles = processedFiles + 1 zf.close() # attach zip file task.status = "90%" task.message = "Sending email" task.save(using=database) message.attach("reports.zip", b.getvalue(), "application/zip") # send email message.send() b.close() # Logging message task.processid = None task.message = "" task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: setattr(_thread_locals, "database", None) if task: task.processid = None task.save(using=database)
def handle(self, **options): # Pick up the options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None now = datetime.now() task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'backup database': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='backup database', submitted=now, started=now, status='0%', user=user) # Choose the backup file name backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database) task.message = 'Backup to file %s' % backupfile task.save(using=database) # Run the backup command # Commenting the next line is a little more secure, but requires you to # create a .pgpass file. os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD'] args = [ "pg_dump", "-b", "-w", '--username=%s' % settings.DATABASES[database]['USER'], '--file=%s' % os.path.abspath( os.path.join(settings.FREPPLE_LOGDIR, backupfile)) ] if settings.DATABASES[database]['HOST']: args.append("--host=%s" % settings.DATABASES[database]['HOST']) if settings.DATABASES[database]['PORT']: args.append("--port=%s " % settings.DATABASES[database]['PORT']) args.append(settings.DATABASES[database]['NAME']) ret = subprocess.call(args) if ret: raise Exception("Run of run pg_dump failed") # Task update task.status = '99%' task.save(using=database) # Delete backups older than a month pattern = re.compile("database.*.*.*.dump") for f in os.listdir(settings.FREPPLE_LOGDIR): if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)): # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date... created = datetime.fromtimestamp( os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime) if pattern.match(f) and (now - created).days > 31: try: os.remove(os.path.join(settings.FREPPLE_LOGDIR, f)) except: pass # Task update task.status = 'Done' task.finished = datetime.now() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: if task: task.save(using=database)
def handle(self, **options): # Pick up options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None if options["models"]: models = options["models"].split(",") else: models = None now = datetime.now() task = None try: # Initialize the task setattr(_thread_locals, "database", database) if options["task"]: try: task = Task.objects.all().using(database).get(pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if ( task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_flush", "empty") ): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="empty", submitted=now, started=now, status="0%", user=user ) task.arguments = "%s%s" % ( "--user=%s " % options["user"] if options["user"] else "", "--models=%s " % options["models"] if options["models"] else "", ) task.processid = os.getpid() task.save(using=database) # Create a database connection cursor = connections[database].cursor() # Get a list of all django tables in the database tables = set( connections[database].introspection.django_table_names( only_existing=True ) ) ContentTypekeys = set() # Validate the user list of tables if models: hasDemand = True if "input.demand" in models else False hasOperation = True if "input.operation" in models else False hasPO = True if "input.purchaseorder" in models else False hasDO = True if "input.distributionorder" in models else False hasMO = True if "input.manufacturingorder" in models else False hasDeO = True if "input.deliveryorder" in models else False if not hasOperation: if hasDemand: models.remove("input.demand") cursor.execute( "update operationplan set demand_id = null where demand_id is not null" ) cursor.execute("delete from demand") key = ContentType.objects.get_for_model( inputmodels.Demand, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if not (hasPO and hasDO and hasMO and hasDeO): if "input.operationplanmaterial" in models: models.remove("input.operationplanmaterial") if "input.operationplanresource" in models: models.remove("input.operationplanresource") if hasPO and not (hasDO and hasMO and hasDeO): models.remove("input.purchaseorder") cursor.execute("delete from operationplan where type = 'PO'") key = ContentType.objects.get_for_model( inputmodels.PurchaseOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasDO and not (hasPO and hasMO and hasDeO): models.remove("input.distributionorder") cursor.execute("delete from operationplan where type = 'DO'") key = ContentType.objects.get_for_model( inputmodels.DistributionOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasMO and not (hasPO and hasDO and hasDeO): models.remove("input.manufacturingorder") cursor.execute("delete from operationplan where type = 'MO'") key = ContentType.objects.get_for_model( inputmodels.ManufacturingOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasDeO and not (hasPO and hasDO and hasMO): models.remove("input.deliveryorder") cursor.execute("delete from operationplan where type = 'DLVR'") key = ContentType.objects.get_for_model( inputmodels.DeliveryOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if (hasPO or hasDO or hasMO or hasDeO) and not ( hasPO and hasDO and hasMO and hasDeO ): # Keep the database in shape cursor.execute("vacuum analyze") models2tables = set() admin_log_positive = True for m in models: try: x = m.split(".", 1) x = apps.get_model(x[0], x[1]) if x in EXCLUDE_FROM_BULK_OPERATIONS: continue ContentTypekeys.add(ContentType.objects.get_for_model(x).pk) x = x._meta.db_table if x not in tables: raise models2tables.add(x) except Exception as e: raise CommandError("Invalid model to erase: %s" % m) tables = models2tables else: admin_log_positive = False tables.discard("django_admin_log") for i in EXCLUDE_FROM_BULK_OPERATIONS: tables.discard(i._meta.db_table) ContentTypekeys.add(ContentType.objects.get_for_model(i).pk) # Some tables need to be handled a bit special if "operationplan" in tables: tables.add("operationplanmaterial") tables.add("operationplanresource") tables.add("out_problem") if "resource" in tables and "out_resourceplan" not in tables: tables.add("out_resourceplan") if "demand" in tables and "out_constraint" not in tables: tables.add("out_constraint") if ( "reportmanager_report" in tables and "reportmanager_column" not in tables ): tables.add("reportmanager_column") tables.discard("auth_group_permissions") tables.discard("auth_permission") tables.discard("auth_group") tables.discard("django_session") tables.discard("common_user") tables.discard("common_user_groups") tables.discard("common_user_user_permissions") tables.discard("common_preference") tables.discard("django_content_type") tables.discard("execute_log") tables.discard("execute_schedule") tables.discard("common_scenario") # Delete all records from the tables. with transaction.atomic(using=database, savepoint=False): if ContentTypekeys: if admin_log_positive: cursor.execute( "delete from django_admin_log where content_type_id = any(%s)", (list(ContentTypekeys),), ) else: cursor.execute( "delete from django_admin_log where content_type_id != any(%s)", (list(ContentTypekeys),), ) if "common_bucket" in tables: cursor.execute("update common_user set horizonbuckets = null") for stmt in connections[database].ops.sql_flush(no_style(), tables, []): cursor.execute(stmt) # Task update task.status = "Done" task.finished = datetime.now() task.processid = None task.save(using=database) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database) raise CommandError("%s" % e) finally: setattr(_thread_locals, "database", None)
def handle(self, **options): # Pick up the options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_backup", "backup")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="backup", submitted=now, started=now, status="0%", user=user) # Choose the backup file name backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database) task.message = "Backup to file %s" % backupfile # Run the backup command # Commenting the next line is a little more secure, but requires you to # create a .pgpass file. os.environ["PGPASSWORD"] = settings.DATABASES[database]["PASSWORD"] args = [ "pg_dump", "-Fc", "-w", "--username=%s" % settings.DATABASES[database]["USER"], "--file=%s" % os.path.abspath( os.path.join(settings.FREPPLE_LOGDIR, backupfile)), ] if settings.DATABASES[database]["HOST"]: args.append("--host=%s" % settings.DATABASES[database]["HOST"]) if settings.DATABASES[database]["PORT"]: args.append("--port=%s" % settings.DATABASES[database]["PORT"]) args.append(settings.DATABASES[database]["NAME"]) with subprocess.Popen(args) as p: try: task.processid = p.pid task.save(using=database) p.wait() except: p.kill() p.wait() raise Exception("Run of run pg_dump failed") # Task update task.processid = None task.status = "99%" task.save(using=database) # Delete backups older than a month pattern = re.compile("database.*.*.*.dump") for f in os.listdir(settings.FREPPLE_LOGDIR): if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)): # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date... created = datetime.fromtimestamp( os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime) if pattern.match(f) and (now - created).days > 31: try: os.remove(os.path.join(settings.FREPPLE_LOGDIR, f)) except: pass # Task update task.status = "Done" task.finished = datetime.now() task.processid = None except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None raise e finally: if task: task.save(using=database)
def wrapTask(request, action): # Allow only post if request.method != 'POST': raise Exception('Only post requests allowed') # Check user permissions if not request.user.has_perm('execute'): raise Exception('Missing execution privileges') # Parse the posted parameters as arguments for an asynchronous task to add to the queue. TODO MAKE MODULAR WITH SEPERATE TASK CLASS worker_database = request.database now = datetime.now() task = None # A if action == 'frepple_run': if not request.user.has_perm('execute.generate_plan'): raise Exception('Missing execution privileges') constraint = 0 for value in request.POST.getlist('constraint'): try: constraint += int(value) except: pass task = Task(name='generate plan', submitted=now, status='Waiting', user=request.user) task.arguments = "--constraint=%s --plantype=%s" % ( constraint, request.POST.get('plantype')) env = [] if request.POST.get('odoo_read', None) == '1': env.append("odoo_read") request.session['odoo_read'] = True else: request.session['odoo_read'] = False if request.POST.get('odoo_write', None) == '1': env.append("odoo_write") request.session['odoo_write'] = True else: request.session['odoo_write'] = False if env: task.arguments = "%s --env=%s" % (task.arguments, ','.join(env)) task.save(using=request.database) # Update the session object request.session['plantype'] = request.POST.get('plantype') request.session['constraint'] = constraint # B elif action == 'frepple_createmodel': task = Task(name='generate model', submitted=now, status='Waiting', user=request.user) task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \ "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % ( request.POST['clusters'], request.POST['demands'], request.POST['fcst'], request.POST['levels'], request.POST['rsrc_number'], request.POST['rsrc_size'], request.POST['components'], request.POST['components_per'], request.POST['deliver_lt'], request.POST['procure_lt'] ) task.save(using=request.database) # C elif action == 'frepple_flush': task = Task(name='empty database', submitted=now, status='Waiting', user=request.user) if not request.POST.get('all'): task.arguments = "--models=%s" % ','.join( request.POST.getlist('entities')) task.save(using=request.database) # D elif action == 'loaddata': task = Task(name='load dataset', submitted=now, status='Waiting', user=request.user, arguments=request.POST['datafile']) task.save(using=request.database) # E elif action == 'frepple_copy': worker_database = DEFAULT_DB_ALIAS if 'copy' in request.POST: if not request.user.has_perm('execute.copy_scenario'): raise Exception('Missing execution privileges') source = request.POST.get('source', DEFAULT_DB_ALIAS) for sc in Scenario.objects.all(): if request.POST.get(sc.name, 'off') == 'on' and sc.status == 'Free': task = Task(name='copy scenario', submitted=now, status='Waiting', user=request.user, arguments="%s %s" % (source, sc.name)) task.save() elif 'release' in request.POST: # Note: release is immediate and synchronous. if not request.user.has_perm('execute.release_scenario'): raise Exception('Missing execution privileges') for sc in Scenario.objects.all(): if request.POST.get(sc.name, 'off') == 'on' and sc.status != 'Free': sc.status = 'Free' sc.lastrefresh = now sc.save() if request.database == sc.name: # Erasing the database that is currently selected. request.prefix = '' elif 'update' in request.POST: # Note: update is immediate and synchronous. for sc in Scenario.objects.all(): if request.POST.get(sc.name, 'off') == 'on': sc.description = request.POST.get('description', None) sc.save() else: raise Exception('Invalid scenario task') # F elif action == 'frepple_backup': task = Task(name='backup database', submitted=now, status='Waiting', user=request.user) task.save(using=request.database) # G elif action == 'frepple_createbuckets': task = Task(name='generate buckets', submitted=now, status='Waiting', user=request.user) task.arguments = "--start=%s --end=%s --weekstart=%s" % ( request.POST['start'], request.POST['end'], request.POST['weekstart']) task.save(using=request.database) # H elif action == 'openbravo_import' and 'freppledb.openbravo' in settings.INSTALLED_APPS: task = Task(name='Openbravo import', submitted=now, status='Waiting', user=request.user) task.arguments = "--delta=%s" % request.POST['delta'] task.save(using=request.database) # I elif action == 'openbravo_export' and 'freppledb.openbravo' in settings.INSTALLED_APPS: task = Task(name='Openbravo export', submitted=now, status='Waiting', user=request.user) task.save(using=request.database) else: # Task not recognized raise Exception('Invalid launching task') # Launch a worker process if task and not checkActive(worker_database): if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")): if "python" in sys.executable: # Development layout Popen([ sys.executable, # Python executable os.path.join(settings.FREPPLE_APP, "frepplectl.py"), "frepple_runworker", "--database=%s" % worker_database ]) else: # Deployment on Apache web server Popen([ "python", os.path.join(settings.FREPPLE_APP, "frepplectl.py"), "frepple_runworker", "--database=%s" % worker_database ], creationflags=0x08000000) elif sys.executable.find('freppleserver.exe') >= 0: # Py2exe executable Popen( [ sys.executable.replace( 'freppleserver.exe', 'frepplectl.exe'), # frepplectl executable "frepple_runworker", "--database=%s" % worker_database ], creationflags=0x08000000) # Do not create a console window else: # Linux standard installation Popen([ "frepplectl", "frepple_runworker", "--database=%s" % worker_database ]) return task
def handle(self, *fixture_labels, **options): # get the database object database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) now = datetime.now() task = None try: setattr(_thread_locals, "database", database) # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name != "loaddata"): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.processid = os.getpid() task.save(using=database, update_fields=["started", "status", "processid"]) else: if options["user"]: try: user = (User.objects.all().using(database).get( username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None task = Task( name="loaddata", submitted=now, started=now, status="0%", user=user, arguments=" ".join(fixture_labels), ) task.processid = os.getpid() task.save(using=database) # Excecute the standard django command super().handle(*fixture_labels, **options) # if the fixture doesn't contain the 'demo' word, let's not apply loaddata post-treatments for f in fixture_labels: if "demo" not in f.lower(): return with transaction.atomic(using=database, savepoint=False): if self.verbosity > 2: print("updating fixture to current date") cursor = connections[database].cursor() currentDate = parse( Parameter.objects.using(database).get( name="currentdate").value) now = datetime.now() offset = (now - currentDate).days # update currentdate to now cursor.execute(""" update common_parameter set value = 'now' where name = 'currentdate' """) # update demand due dates cursor.execute( """ update demand set due = due + %s * interval '1 day' """, (offset, ), ) # update PO/DO/MO due dates cursor.execute( """ update operationplan set startdate = startdate + %s * interval '1 day', enddate = enddate + %s * interval '1 day' """, 2 * (offset, ), ) # Update archive tables if "freppledb.archive" in settings.INSTALLED_APPS: # ax_manager table needs to be updated in the right order. # Otherwise we can get duplicates. cursor.execute( "select snapshot_date from ax_manager order by snapshot_date %s" % ("asc" if offset < 0 else "desc")) for ax in cursor.fetchall(): cursor.execute( """ update ax_manager set snapshot_date = snapshot_date + %s * interval '1 day' where snapshot_date = %s """, (offset, ax[0]), ) cursor.execute( """ update ax_buffer set snapshot_date_id = snapshot_date_id + %s * interval '1 day' """, (offset, ), ) cursor.execute( """ update ax_demand set snapshot_date_id = snapshot_date_id + %s * interval '1 day', due = due + %s * interval '1 day', deliverydate = deliverydate + %s * interval '1 day' """, 3 * (offset, ), ) cursor.execute( """ update ax_operationplan set snapshot_date_id = snapshot_date_id + %s * interval '1 day', startdate = startdate + %s * interval '1 day', enddate = enddate + %s * interval '1 day', due = due + %s * interval '1 day' """, 4 * (offset, ), ) # Task update task.status = "Done" task.finished = datetime.now() task.processid = None task.save(using=database, update_fields=["status", "finished"]) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database, update_fields=["status", "finished", "message"]) raise CommandError("%s" % e) finally: setattr(_thread_locals, "database", None)
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up the options start = options["start"] end = options["end"] weekstart = int(options["weekstart"]) database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get(username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if options["task"]: if options["task"] > 0: try: task = ( Task.objects.all().using(database).get(pk=options["task"]) ) except Task.DoesNotExist: raise CommandError("Task identifier not found") if ( task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_createbuckets", "createbuckets") ): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="createbuckets", submitted=now, started=now, status="0%", user=user, arguments="--start=%s --end=%s --weekstart=%s" % (start, end, weekstart), ) if task: task.processid = os.getpid() task.save(using=database) # Validate the date arguments try: curdate = datetime.strptime(start, "%Y-%m-%d") enddate = datetime.strptime(end, "%Y-%m-%d") except Exception as e: raise CommandError("Date is not matching format YYYY-MM-DD") with transaction.atomic(using=database, savepoint=False): # Delete previous contents with connections[database].cursor() as cursor: cursor.execute( "delete from common_bucketdetail where bucket_id in ('year','quarter','month','week','day')" ) cursor.execute( "delete from common_bucket where name in ('year','quarter','month','week','day')" ) # Create buckets y = Bucket(name="year", description="Yearly time buckets", level=1) q = Bucket( name="quarter", description="Quarterly time buckets", level=2 ) m = Bucket(name="month", description="Monthly time buckets", level=3) w = Bucket(name="week", description="Weeky time buckets", level=4) d = Bucket(name="day", description="Daily time buckets", level=5) y.save(using=database) q.save(using=database) m.save(using=database) w.save(using=database) d.save(using=database) # Loop over all days in the chosen horizon prev_year = None prev_quarter = None prev_month = None prev_week = None while curdate < enddate: month = int( curdate.strftime("%m") ) # an integer in the range 1 - 12 quarter = (month - 1) // 3 + 1 # an integer in the range 1 - 4 year = int(curdate.strftime("%Y")) dayofweek = int( curdate.strftime("%w") ) # day of the week, 0 = sunday, 1 = monday, ... year_start = datetime(year, 1, 1) year_end = datetime(year + 1, 1, 1) week_start = curdate - timedelta( (dayofweek + 6) % 7 + 1 - weekstart ) week_end = curdate - timedelta((dayofweek + 6) % 7 - 6 - weekstart) # Create buckets if year != prev_year: prev_year = year BucketDetail( bucket=y, name=self.formatDate(curdate, options["format_year"]), startdate=year_start, enddate=year_end, ).save(using=database) if quarter != prev_quarter: prev_quarter = quarter BucketDetail( bucket=q, name=self.formatDate(curdate, options["format_quarter"]), startdate=date(year, quarter * 3 - 2, 1), enddate=date( year + quarter // 4, quarter * 3 + 1 - 12 * (quarter // 4), 1, ), ).save(using=database) if month != prev_month: prev_month = month BucketDetail( bucket=m, name=self.formatDate(curdate, options["format_month"]), startdate=date(year, month, 1), enddate=date( year + month // 12, month + 1 - 12 * (month // 12), 1 ), ).save(using=database) if week_start != prev_week: prev_week = week_start # we need to avoid weeks 00 # we will therefore take the name of the week starting the monday # included in that week BucketDetail( bucket=w, name=self.formatDate( week_start + timedelta(days=(7 - week_start.weekday()) % 7), options["format_week"], ), startdate=week_start, enddate=week_end, ).save(using=database) BucketDetail( bucket=d, name=self.formatDate(curdate.date(), options["format_day"]), startdate=curdate, enddate=curdate + timedelta(1), ).save(using=database) # Next date curdate = curdate + timedelta(1) # Log success if task: task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database) settings.DEBUG = tmp_debug
def handle(self, **options): # Pick up the options now = datetime.now() self.database = options["database"] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database) if options["user"]: try: self.user = (User.objects.all().using( self.database).get(username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = "importfromfolder-%s.log" % timestamp else: logfile = "importfromfolder_%s-%s.log" % (self.database, timestamp) try: handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding="utf-8") # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format'])) logger.addHandler(handler) logger.propagate = False except Exception as e: print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e)) task = None errors = [0, 0] try: setattr(_thread_locals, "database", self.database) # Initialize the task if options["task"]: try: task = (Task.objects.all().using( self.database).get(pk=options["task"])) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_importfromfolder", "importfromfolder")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.logfile = logfile else: task = Task( name="importfromfolder", submitted=now, started=now, status="0%", user=self.user, logfile=logfile, ) task.processid = os.getpid() task.save(using=self.database) # Choose the right self.delimiter and language self.delimiter = (get_format("DECIMAL_SEPARATOR", settings.LANGUAGE_CODE, True) == "," and ";" or ",") translation.activate(settings.LANGUAGE_CODE) self.SQLrole = settings.DATABASES[self.database].get( "SQL_ROLE", "report_role") # Execute if "FILEUPLOADFOLDER" in settings.DATABASES[ self.database] and os.path.isdir( settings.DATABASES[self.database]["FILEUPLOADFOLDER"]): # Open the logfile logger.info("%s Started importfromfolder\n" % datetime.now().replace(microsecond=0)) all_models = [(ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class()] models = [] for ifile in os.listdir( settings.DATABASES[self.database]["FILEUPLOADFOLDER"]): if not ifile.lower().endswith(( ".sql", ".sql.gz", ".csv", ".csv.gz", ".cpy", ".cpy.gz", ".xlsx", )): continue filename0 = ifile.split(".")[0].split(" (")[0] model = None contenttype_id = None for m, ct in all_models: if matchesModelName(filename0, m): model = m contenttype_id = ct break if not model or model in EXCLUDE_FROM_BULK_OPERATIONS: logger.info( "%s Ignoring data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif self.user and not self.user.has_perm("%s.%s" % ( model._meta.app_label, get_permission_codename("add", model._meta), )): # Check permissions logger.info( "%s You don't have permissions to add: %s" % (datetime.now().replace(microsecond=0), ifile)) else: deps = set([model]) GridReport.dependent_models(model, deps) models.append((ifile, model, contenttype_id, deps)) # Sort the list of models, based on dependencies between models models = GridReport.sort_models(models) i = 0 cnt = len(models) for ifile, model, contenttype_id, dependencies in models: task.status = str(int(10 + i / cnt * 80)) + "%" task.message = "Processing data file %s" % ifile task.save(using=self.database) i += 1 filetoparse = os.path.join( os.path.abspath(settings.DATABASES[self.database] ["FILEUPLOADFOLDER"]), ifile, ) if ifile.lower().endswith((".sql", ".sql.gz")): logger.info( "%s Started executing SQL statements from file: %s" % (datetime.now().replace(microsecond=0), ifile)) errors[0] += self.executeSQLfile(filetoparse) logger.info( "%s Finished executing SQL statements from file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif ifile.lower().endswith((".cpy", ".cpy.gz")): logger.info( "%s Started uploading copy file: %s" % (datetime.now().replace(microsecond=0), ifile)) errors[0] += self.executeCOPYfile(model, filetoparse) logger.info( "%s Finished uploading copy file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif ifile.lower().endswith(".xlsx"): logger.info( "%s Started processing data in Excel file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadExcelfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info( "%s Finished processing data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: logger.info( "%s Started processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadCSVfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info( "%s Finished processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: errors[0] += 1 cnt = 0 logger.error("%s Failed, folder does not exist" % datetime.now().replace(microsecond=0)) # Task update if errors[0] > 0: task.status = "Failed" if not cnt: task.message = "Destination folder does not exist" else: task.message = ( "Uploaded %s data files with %s errors and %s warnings" % (cnt, errors[0], errors[1])) else: task.status = "Done" task.message = "Uploaded %s data files with %s warnings" % ( cnt, errors[1], ) task.finished = datetime.now() except KeyboardInterrupt: if task: task.status = "Cancelled" task.message = "Cancelled" logger.info("%s Cancelled\n" % datetime.now().replace(microsecond=0)) except Exception as e: logger.error("%s Failed" % datetime.now().replace(microsecond=0)) if task: task.status = "Failed" task.message = "%s" % e raise e finally: setattr(_thread_locals, "database", None) if task: if errors[0] == 0: task.status = "Done" else: task.status = "Failed" task.processid = None task.finished = datetime.now() task.save(using=self.database) logger.info("%s End of importfromfolder\n" % datetime.now().replace(microsecond=0))
def handle(self, **options): # Select the correct frePPLe scenario database self.database = options["database"] if self.database not in settings.DATABASES.keys(): raise CommandError("No database settings known for '%s'" % self.database) # FrePPle user running this task if options["user"]: try: self.user = (User.objects.all().using( self.database).get(username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: self.user = None # FrePPLe task identifier if options["task"]: try: self.task = (Task.objects.all().using( self.database).get(pk=options["task"])) except Exception: raise CommandError("Task identifier not found") if (self.task.started or self.task.finished or self.task.status != "Waiting" or self.task.name != "erp2frepple"): raise CommandError("Invalid task identifier") else: now = datetime.now() self.task = Task( name="erp2frepple", submitted=now, started=now, status="0%", user=self.user, ) self.task.processid = os.getpid() self.task.save(using=self.database) # Set the destination folder self.destination = settings.DATABASES[ self.database]["FILEUPLOADFOLDER"] if not os.access(self.destination, os.W_OK): raise CommandError("Can't write to folder %s " % self.destination) # Open database connection print("Connecting to the database") with getERPconnection(self.database) as erp_connection: self.cursor = erp_connection.cursor() self.fk = "_id" if self.ext == "cpy" else "" # Extract all files try: self.extractLocation() self.task.status = "6%" self.task.save(using=self.database) self.extractCustomer() self.task.status = "12%" self.task.save(using=self.database) self.extractItem() self.task.status = "18%" self.task.save(using=self.database) self.extractSupplier() self.task.status = "24%" self.task.save(using=self.database) self.extractResource() self.task.status = "30%" self.task.save(using=self.database) self.extractSalesOrder() self.task.status = "36%" self.task.save(using=self.database) self.extractOperation() self.task.status = "42%" self.task.save(using=self.database) # Note: the suboperation table is now deprecated. # The same data can now be directly loaded in the the operation table. self.extractSuboperation() self.task.status = "48%" self.task.save(using=self.database) self.extractOperationResource() self.task.status = "54%" self.task.save(using=self.database) self.extractOperationMaterial() self.task.status = "60%" self.task.save(using=self.database) self.extractItemSupplier() self.task.status = "66%" self.task.save(using=self.database) self.extractCalendar() self.task.status = "72%" self.task.save(using=self.database) self.extractCalendarBucket() self.task.status = "78%" self.task.save(using=self.database) self.extractBuffer() self.task.status = "84%" self.task.save(using=self.database) self.extractItemSupplier() self.task.status = "90%" self.task.save(using=self.database) self.extractCalendar() self.task.status = "96%" self.task.save(using=self.database) self.extractCalendarBucket() self.task.status = "100%" self.task.save(using=self.database) self.task.status = "Done" except Exception as e: self.task.status = "Failed" self.task.message = "Failed: %s" % e finally: self.task.processid = None self.task.finished = datetime.now() self.task.save(using=self.database)
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up options force = options['force'] test = 'FREPPLE_TEST' in os.environ if options['user']: try: user = User.objects.all().get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() # Initialize the task source = options['source'] try: sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get( pk=source) except: raise CommandError("No source database defined with name '%s'" % source) now = datetime.now() task = None if 'task' in options and options['task']: try: task = Task.objects.all().using(source).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ( 'frepple_copy', 'scenario_copy'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='scenario_copy', submitted=now, started=now, status='0%', user=user) task.save(using=source) # Validate the arguments destination = options['destination'] destinationscenario = None try: task.arguments = "%s %s" % (source, destination) if options['description']: task.arguments += '--description="%s"' % options[ 'description'].replace('"', '\\"') if force: task.arguments += " --force" task.save(using=source) try: destinationscenario = Scenario.objects.using( DEFAULT_DB_ALIAS).get(pk=destination) except: raise CommandError( "No destination database defined with name '%s'" % destination) if source == destination: raise CommandError("Can't copy a schema on itself") if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[ destination]['ENGINE']: raise CommandError( "Source and destination scenarios have a different engine") if sourcescenario.status != 'In use': raise CommandError("Source scenario is not in use") if destinationscenario.status != 'Free' and not force: raise CommandError("Destination scenario is not free") # Logging message - always logging in the default database destinationscenario.status = 'Busy' destinationscenario.save(using=DEFAULT_DB_ALIAS) # Copying the data # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[source]['PASSWORD']: os.environ['PGPASSWORD'] = settings.DATABASES[source][ 'PASSWORD'] if os.name == 'nt': # On windows restoring with pg_restore over a pipe is broken :-( cmd = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s" else: cmd = "pg_dump -Fc %s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s" commandline = cmd % ( settings.DATABASES[source]['USER'] and ("-U %s " % settings.DATABASES[source]['USER']) or '', settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '', settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '', test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['USER'] and ("-U %s " % settings.DATABASES[destination]['USER']) or '', settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '', settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '', test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'], ) ret = subprocess.call(commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) if ret: raise Exception( 'Exit code of the database copy command is %d' % ret) # Update the scenario table destinationscenario.status = 'In use' destinationscenario.lastrefresh = datetime.today() if 'description' in options: destinationscenario.description = options['description'] destinationscenario.save(using=DEFAULT_DB_ALIAS) # Give access to the destination scenario to: # a) the user doing the copy # b) all superusers from the source schema User.objects.using(destination).filter(is_superuser=True).update( is_active=True) User.objects.using(destination).filter(is_superuser=False).update( is_active=False) if user: User.objects.using(destination).filter( username=user.username).update(is_active=True) # Logging message task.status = 'Done' task.finished = datetime.now() # Update the task in the destination database task.message = "Scenario copied from %s" % source task.save(using=destination) task.message = "Scenario copied to %s" % destination # Delete any waiting tasks in the new copy. # This is needed for situations where the same source is copied to # multiple destinations at the same moment. Task.objects.all().using(destination).filter( id__gt=task.id).delete() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() if destinationscenario and destinationscenario.status == 'Busy': destinationscenario.status = 'Free' destinationscenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.save(using=source) settings.DEBUG = tmp_debug