def handle(self, *args, **options): if "database" in options: database = options["database"] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) task = None now = datetime.now() try: # Initialize the task if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name != "my_command"): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="my_command", submitted=now, started=now, status="0%") task.save(using=database) # Here goes the real business logic print("This command was called with argument %s" % options["my_arg"]) # The task has finished successfully task.message = "My task message" task.processid = None task.status = "Done" task.finished = datetime.now() task.save(using=database) except Exception as e: # The task failed if task: task = Task.objects.all().using(database).get(pk=task.id) task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database) raise e
def handle(self, **options): # Pick up the options now = datetime.now() if "database" in options: database = options["database"] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if "user" in options and options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None timestamp = now.strftime("%Y%m%d%H%M%S") if database == DEFAULT_DB_ALIAS: logfile = "frepple-%s.log" % timestamp else: logfile = "frepple_%s-%s.log" % (database, timestamp) task = None try: # Initialize the task setattr(_thread_locals, "database", database) if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("runplan", "odoo_import", "odoo_export")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.logfile = logfile else: task = Task( name="runplan", submitted=now, started=now, status="0%", user=user, logfile=logfile, ) # Validate options if "constraint" in options: constraint = int(options["constraint"]) if constraint < 0 or constraint > 15: raise ValueError("Invalid constraint: %s" % options["constraint"]) else: constraint = 15 if "plantype" in options: plantype = int(options["plantype"]) else: plantype = 1 # Reset environment variables # TODO avoid having to delete the environment variables. Use options directly? for label in freppledb.common.commands.PlanTaskRegistry.getLabels( ): if "env" in options: # Options specified if label[0] in os.environ: del os.environ[label[0]] else: # No options specified - default to activate them all os.environ[label[0]] = "1" # Set environment variables if options["env"]: task.arguments = "--constraint=%d --plantype=%d --env=%s" % ( constraint, plantype, options["env"], ) for i in options["env"].split(","): j = i.split("=") if len(j) == 1: os.environ[j[0]] = "1" else: os.environ[j[0]] = j[1] else: task.arguments = "--constraint=%d --plantype=%d" % ( constraint, plantype, ) if options["background"]: task.arguments += " --background" # Log task # Different from the other tasks the frepple engine will write the processid task.save(using=database) # Locate commands.py cmd = freppledb.common.commands.__file__ def setlimits(): import resource if settings.MAXMEMORYSIZE: resource.setrlimit( resource.RLIMIT_AS, ( settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024, ), ) if settings.MAXCPUTIME: resource.setrlimit( resource.RLIMIT_CPU, (settings.MAXCPUTIME, settings.MAXCPUTIME + 5), ) # Limiting the file size is a bit tricky as this limit not only applies to the log # file, but also to temp files during the export # if settings.MAXTOTALLOGFILESIZE: # resource.setrlimit( # resource.RLIMIT_FSIZE, # (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024) # ) # Make sure the forecast engine uses the same correct timezone os.environ["PGTZ"] = settings.TIME_ZONE # Prepare environment os.environ["FREPPLE_PLANTYPE"] = str(plantype) os.environ["FREPPLE_CONSTRAINT"] = str(constraint) os.environ["FREPPLE_TASKID"] = str(task.id) os.environ["FREPPLE_DATABASE"] = database os.environ["FREPPLE_LOGFILE"] = logfile os.environ["FREPPLE_PROCESSNAME"] = settings.DATABASES[database][ "NAME"].replace("demo", "") os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep + os.environ["PATH"] + os.pathsep + settings.FREPPLE_APP) if os.path.isfile( os.path.join(settings.FREPPLE_HOME, "libfrepple.so")): os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME if "DJANGO_SETTINGS_MODULE" not in os.environ: os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings" os.environ["PYTHONPATH"] = os.path.normpath(settings.FREPPLE_APP) libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), "lib") if os.path.isdir(libdir): # Folders used by the Windows version os.environ["PYTHONPATH"] += os.pathsep + libdir if os.path.isfile(os.path.join(libdir, "library.zip")): os.environ["PYTHONPATH"] += os.pathsep + os.path.join( libdir, "library.zip") if options["background"]: # Execute as background process on Windows if os.name == "nt": startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW subprocess.Popen( ["frepple", cmd], creationflags=0x08000000, startupinfo=startupinfo, ) else: # Execute as background process on Linux subprocess.Popen(["frepple", cmd], preexec_fn=setlimits) else: if os.name == "nt": # Execute in foreground on Windows startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW ret = subprocess.call(["frepple", cmd], startupinfo=startupinfo) else: # Execute in foreground on Linux ret = subprocess.call(["frepple", cmd], preexec_fn=setlimits) if ret != 0 and ret != 2: # Return code 0 is a successful run # Return code is 2 is a run cancelled by a user. That's shown in the status field. raise Exception("Failed with exit code %d" % ret) if options["background"]: # Wait for the background task to be ready while True: sleep(5) t = Task.objects.using(database).get(pk=task.id) if t.status in ["100%", "Canceled", "Failed", "Done"]: break if not self.process_exists(t.processid): t.status = "Failed" t.processid = None t.save(update_fields=["processid", "status"], using=database) break else: # Reread the task from the database and update it task = Task.objects.all().using(database).get(pk=task.id) task.processid = None task.status = "Done" task.finished = datetime.now() task.save(using=database) except Exception as e: if task: task = Task.objects.all().using(database).get(pk=task.id) task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database) raise e finally: setattr(_thread_locals, "database", None)
def handle(self, **options): now = datetime.now() database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) # Pick up options if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None task = None try: setattr(_thread_locals, "database", database) if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("emailreport")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="emailreport", submitted=now, started=now, status="0%", user=user, ) task.processid = os.getpid() task.save(using=database) if not settings.EMAIL_HOST: raise CommandError( "No SMTP mail server is configured in your djangosettings.py file" ) sender = options["sender"] recipient = options["recipient"] report = options["report"] if not sender: raise CommandError("No sender has been defined") if not recipient: raise CommandError("No recipient has been defined") if not report: raise CommandError("No report to email has been defined") # Make sure file exist in the export folder reports = report.split(",") correctedReports = [] missingFiles = [] for r in reports: if len(r.strip()) == 0: continue path = os.path.join( settings.DATABASES[database]["FILEUPLOADFOLDER"], "export", r.strip(), ) if not os.path.isfile(path): missingFiles.append(r.strip()) else: correctedReports.append(path) if len(missingFiles) > 0: raise CommandError( "Following files are missing in export folder: %s" % (",".join(str(x) for x in missingFiles))) if len(correctedReports) == 0: raise CommandError("No report defined in options") # Validate email adresses recipients = recipient.split(",") correctedRecipients = [] invalidEmails = [] for r in recipients: if len(r.strip()) == 0: continue if not re.fullmatch(r"[^@]+@[^@]+\.[^@]+", r.strip()): invalidEmails.append(r.strip()) else: correctedRecipients.append(r.strip()) if len(invalidEmails) > 0: raise CommandError( "Invalid email formatting for following addresses: %s" % (",".join(str(x) for x in invalidEmails))) if len(correctedRecipients) == 0: raise CommandError("No recipient defined in options") task.arguments = "--recipient=%s --report=%s" % (recipient, report) task.save(using=database) # create message message = EmailMessage( subject="Exported reports", body="", from_email=sender, to=correctedRecipients, ) b = BytesIO() with ZipFile(file=b, mode="w", compression=ZIP_DEFLATED) as zf: processedFiles = 0 for f in correctedReports: task.message = "Compressing file %s" % basename(f) task.status = (str( int(processedFiles / len(correctedReports) * 90.0)) + "%") task.save(using=database) zf.write(filename=f, arcname=basename(f)) processedFiles = processedFiles + 1 zf.close() # attach zip file task.status = "90%" task.message = "Sending email" task.save(using=database) message.attach("reports.zip", b.getvalue(), "application/zip") # send email message.send() b.close() # Logging message task.processid = None task.message = "" task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: setattr(_thread_locals, "database", None) if task: task.processid = None task.save(using=database)
def handle(self, **options): # Pick up the options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database ) if options['user']: try: user = User.objects.all().using(database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None now = datetime.now() task = None try: # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_restore', 'restore'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='restore', submitted=now, started=now, status='0%', user=user) task.arguments = options['dump'] task.processid = os.getpid() task.save(using=database) # Validate options dumpfile = os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, options['dump'])) if not os.path.isfile(dumpfile): raise CommandError("Dump file not found") # Run the restore command # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[database]['PASSWORD']: os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD'] cmd = [ "pg_restore", "-n", "public", "-Fc", "-c", "--if-exists" ] if settings.DATABASES[database]['USER']: cmd.append("--username=%s" % settings.DATABASES[database]['USER']) if settings.DATABASES[database]['HOST']: cmd.append("--host=%s" % settings.DATABASES[database]['HOST']) if settings.DATABASES[database]['PORT']: cmd.append("--port=%s " % settings.DATABASES[database]['PORT']) cmd.append("-d") cmd.append(settings.DATABASES[database]['NAME']) cmd.append('<%s' % dumpfile) # Shell needs to be True in order to interpret the < character with subprocess.Popen(cmd, shell=True) as p: try: task.processid = p.pid task.save(using=database) p.wait() except: p.kill() p.wait() raise Exception("Database restoration failed") # Task update # We need to recreate a new task record, since the previous one is lost during the restoration. task = Task( name='restore', submitted=task.submitted, started=task.started, arguments=task.arguments, status='Done', finished=datetime.now(), user=task.user ) except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: # Commit it all, even in case of exceptions if task: task.processid = None task.save(using=database)
def handle(self, *args, **options): # Pick up the options now = datetime.now() self.database = options["database"] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database) if options["user"]: try: self.user = (User.objects.all().using( self.database).get(username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = "exporttofolder-%s.log" % timestamp else: logfile = "exporttofolder_%s-%s.log" % (self.database, timestamp) try: handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding="utf-8") # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format'])) logger.addHandler(handler) logger.propagate = False except Exception as e: print("%s Failed to open logfile %s: %s" % (datetime.now().replace(microsecond=0), logfile, e)) task = None errors = 0 try: # Initialize the task setattr(_thread_locals, "database", self.database) if options["task"]: try: task = (Task.objects.all().using( self.database).get(pk=options["task"])) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_exporttofolder", "exporttofolder")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.logfile = logfile else: task = Task( name="exporttofolder", submitted=now, started=now, status="0%", user=self.user, logfile=logfile, ) task.arguments = " ".join(['"%s"' % i for i in args]) task.processid = os.getpid() task.save(using=self.database) # Execute if os.path.isdir( settings.DATABASES[self.database]["FILEUPLOADFOLDER"]): if not os.path.isdir( os.path.join( settings.DATABASES[self.database] ["FILEUPLOADFOLDER"], "export")): try: os.makedirs( os.path.join( settings.DATABASES[self.database] ["FILEUPLOADFOLDER"], "export", )) except OSError as exception: if exception.errno != errno.EEXIST: raise logger.info("%s Started export to folder" % datetime.now().replace(microsecond=0)) cursor = connections[self.database].cursor() task.status = "0%" task.save(using=self.database) i = 0 cnt = len(self.statements) # Calling all the pre-sql statements idx = 1 for stmt in self.pre_sql_statements: try: logger.info( "%s Executing pre-statement %s" % (datetime.now().replace(microsecond=0), idx)) cursor.execute(stmt) if cursor.rowcount > 0: logger.info("%s %s record(s) modified" % ( datetime.now().replace(microsecond=0), cursor.rowcount, )) except Exception: errors += 1 logger.error( "%s An error occurred when executing statement %s" % (datetime.now().replace(microsecond=0), idx)) idx += 1 for cfg in self.statements: # Validate filename filename = cfg.get("filename", None) if not filename: raise Exception( "Missing filename in export configuration") folder = cfg.get("folder", None) if not folder: raise Exception( "Missing folder in export configuration for %s" % filename) # Report progress logger.info( "%s Started export of %s" % (datetime.now().replace(microsecond=0), filename)) if task: task.message = "Exporting %s" % filename task.save(using=self.database) # Make sure export folder exists exportFolder = os.path.join( settings.DATABASES[self.database]["FILEUPLOADFOLDER"], folder) if not os.path.isdir(exportFolder): os.makedirs(exportFolder) try: reportclass = cfg.get("report", None) sql = cfg.get("sql", None) if reportclass: # Export from report class # Create a dummy request factory = RequestFactory() request = factory.get("/dummy/", cfg.get("data", {})) if self.user: request.user = self.user else: request.user = User.objects.all().get( username="******") request.database = self.database request.LANGUAGE_CODE = settings.LANGUAGE_CODE request.prefs = cfg.get("prefs", None) # Initialize the report if hasattr(reportclass, "initialize"): reportclass.initialize(request) if hasattr(reportclass, "rows"): if callable(reportclass.rows): request.rows = reportclass.rows(request) else: request.rows = reportclass.rows if hasattr(reportclass, "crosses"): if callable(reportclass.crosses): request.crosses = reportclass.crosses( request) else: request.crosses = reportclass.crosses if reportclass.hasTimeBuckets: reportclass.getBuckets(request) # Write the report file datafile = open( os.path.join(exportFolder, filename), "wb") if filename.endswith(".xlsx"): reportclass._generate_spreadsheet_data( request, [request.database], datafile, **cfg.get("data", {})) elif filename.endswith(".csv"): for r in reportclass._generate_csv_data( request, [request.database], **cfg.get("data", {})): datafile.write( r.encode(settings.CSV_CHARSET) if isinstance(r, str) else r) else: raise Exception( "Unknown output format for %s" % filename) elif sql: # Exporting using SQL if filename.lower().endswith(".gz"): datafile = gzip.open( os.path.join(exportFolder, filename), "w") else: datafile = open( os.path.join(exportFolder, filename), "w") cursor.copy_expert(sql, datafile) else: raise Exception("Unknown export type for %s" % filename) datafile.close() i += 1 except Exception as e: errors += 1 logger.error("%s Failed to export to %s: %s" % (datetime.now().replace(microsecond=0), filename, e)) if task: task.message = "Failed to export %s" % filename task.status = str(int(i / cnt * 100)) + "%" task.save(using=self.database) logger.info( "%s Exported %s file(s)" % (datetime.now().replace(microsecond=0), cnt - errors)) idx = 1 for stmt in self.post_sql_statements: try: logger.info( "%s Executing post-statement %s" % (datetime.now().replace(microsecond=0), idx)) cursor.execute(stmt) if cursor.rowcount > 0: logger.info("%s %s record(s) modified" % ( datetime.now().replace(microsecond=0), cursor.rowcount, )) except Exception: errors += 1 logger.error( "%s An error occured when executing statement %s" % (datetime.now().replace(microsecond=0), idx)) idx += 1 else: errors += 1 logger.error("%s Failed, folder does not exist" % datetime.now().replace(microsecond=0)) task.message = "Destination folder does not exist" task.save(using=self.database) except Exception as e: logger.error("%s Failed to export: %s" % (datetime.now().replace(microsecond=0), e)) errors += 1 if task: task.message = "Failed to export" finally: logger.info("%s End of export to folder\n" % datetime.now().replace(microsecond=0)) if task: if not errors: task.status = "100%" task.message = "Exported %s data files" % (cnt) else: task.status = "Failed" # task.message = "Exported %s data files, %s failed" % (cnt-errors, errors) task.finished = datetime.now() task.processid = None task.save(using=self.database) setattr(_thread_locals, "database", None)
def handle(self, *fixture_labels, **options): # get the database object database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) now = datetime.now() task = None try: setattr(_thread_locals, "database", database) # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name != "loaddata"): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.processid = os.getpid() task.save(using=database, update_fields=["started", "status", "processid"]) else: if options["user"]: try: user = (User.objects.all().using(database).get( username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None task = Task( name="loaddata", submitted=now, started=now, status="0%", user=user, arguments=" ".join(fixture_labels), ) task.processid = os.getpid() task.save(using=database) # Excecute the standard django command super().handle(*fixture_labels, **options) # if the fixture doesn't contain the 'demo' word, let's not apply loaddata post-treatments for f in fixture_labels: if "demo" not in f.lower(): return with transaction.atomic(using=database, savepoint=False): if self.verbosity > 2: print("updating fixture to current date") cursor = connections[database].cursor() currentDate = parse( Parameter.objects.using(database).get( name="currentdate").value) now = datetime.now() offset = (now - currentDate).days # update currentdate to now cursor.execute(""" update common_parameter set value = 'now' where name = 'currentdate' """) # update demand due dates cursor.execute( """ update demand set due = due + %s * interval '1 day' """, (offset, ), ) # update PO/DO/MO due dates cursor.execute( """ update operationplan set startdate = startdate + %s * interval '1 day', enddate = enddate + %s * interval '1 day' """, 2 * (offset, ), ) # Update archive tables if "freppledb.archive" in settings.INSTALLED_APPS: # ax_manager table needs to be updated in the right order. # Otherwise we can get duplicates. cursor.execute( "select snapshot_date from ax_manager order by snapshot_date %s" % ("asc" if offset < 0 else "desc")) for ax in cursor.fetchall(): cursor.execute( """ update ax_manager set snapshot_date = snapshot_date + %s * interval '1 day' where snapshot_date = %s """, (offset, ax[0]), ) cursor.execute( """ update ax_buffer set snapshot_date_id = snapshot_date_id + %s * interval '1 day' """, (offset, ), ) cursor.execute( """ update ax_demand set snapshot_date_id = snapshot_date_id + %s * interval '1 day', due = due + %s * interval '1 day', deliverydate = deliverydate + %s * interval '1 day' """, 3 * (offset, ), ) cursor.execute( """ update ax_operationplan set snapshot_date_id = snapshot_date_id + %s * interval '1 day', startdate = startdate + %s * interval '1 day', enddate = enddate + %s * interval '1 day', due = due + %s * interval '1 day' """, 4 * (offset, ), ) # Task update task.status = "Done" task.finished = datetime.now() task.processid = None task.save(using=database, update_fields=["status", "finished"]) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database, update_fields=["status", "finished", "message"]) raise CommandError("%s" % e) finally: setattr(_thread_locals, "database", None)
def handle(self, **options): # Pick up the options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_restore", "restore")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="restore", submitted=now, started=now, status="0%", user=user) task.arguments = options["dump"] task.processid = os.getpid() task.save(using=database) # Validate options dumpfile = os.path.abspath( os.path.join(settings.FREPPLE_LOGDIR, options["dump"])) if not os.path.isfile(dumpfile): raise CommandError("Dump file not found") # Run the restore command # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[database]["PASSWORD"]: os.environ["PGPASSWORD"] = settings.DATABASES[database][ "PASSWORD"] cmd = ["pg_restore", "-n", "public", "-Fc", "-c", "--if-exists"] if settings.DATABASES[database]["USER"]: cmd.append("--username=%s" % settings.DATABASES[database]["USER"]) if settings.DATABASES[database]["HOST"]: cmd.append("--host=%s" % settings.DATABASES[database]["HOST"]) if settings.DATABASES[database]["PORT"]: cmd.append("--port=%s " % settings.DATABASES[database]["PORT"]) cmd.append("-d") cmd.append(settings.DATABASES[database]["NAME"]) cmd.append("<%s" % dumpfile) # Shell needs to be True in order to interpret the < character with subprocess.Popen(cmd, shell=True) as p: try: task.processid = p.pid task.save(using=database) p.wait() except: p.kill() p.wait() raise Exception("Database restoration failed") # Task update # We need to recreate a new task record, since the previous one is lost during the restoration. task = Task( name="restore", submitted=task.submitted, started=task.started, arguments=task.arguments, status="Done", finished=datetime.now(), user=task.user, ) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: # Commit it all, even in case of exceptions if task: task.processid = None task.save(using=database)
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up options force = options["force"] promote = options["promote"] test = "FREPPLE_TEST" in os.environ if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() # Initialize the task source = options["source"] try: sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get( pk=source) except Exception: raise CommandError("No source database defined with name '%s'" % source) now = datetime.now() task = None if "task" in options and options["task"]: try: task = Task.objects.all().using(source).get(pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_copy", "scenario_copy")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="scenario_copy", submitted=now, started=now, status="0%", user=user) task.processid = os.getpid() task.save(using=source) # Validate the arguments destination = options["destination"] destinationscenario = None try: task.arguments = "%s %s" % (source, destination) if options["description"]: task.arguments += '--description="%s"' % options[ "description"].replace('"', '\\"') if force: task.arguments += " --force" task.save(using=source) try: destinationscenario = Scenario.objects.using( DEFAULT_DB_ALIAS).get(pk=destination) except Exception: raise CommandError( "No destination database defined with name '%s'" % destination) if source == destination: raise CommandError("Can't copy a schema on itself") if sourcescenario.status != "In use": raise CommandError("Source scenario is not in use") if destinationscenario.status != "Free" and not force and not promote: raise CommandError("Destination scenario is not free") if promote and (destination != DEFAULT_DB_ALIAS or source == DEFAULT_DB_ALIAS): raise CommandError( "Incorrect source or destination database with promote flag" ) # Logging message - always logging in the default database destinationscenario.status = "Busy" destinationscenario.save(using=DEFAULT_DB_ALIAS) # Copying the data # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[source]["PASSWORD"]: os.environ["PGPASSWORD"] = settings.DATABASES[source][ "PASSWORD"] if os.name == "nt": # On windows restoring with pg_restore over a pipe is broken :-( cmd = "pg_dump -c -Fp %s%s%s%s%s | psql %s%s%s%s" else: cmd = "pg_dump -Fc %s%s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s" commandline = cmd % ( settings.DATABASES[source]["USER"] and ("-U %s " % settings.DATABASES[source]["USER"]) or "", settings.DATABASES[source]["HOST"] and ("-h %s " % settings.DATABASES[source]["HOST"]) or "", settings.DATABASES[source]["PORT"] and ("-p %s " % settings.DATABASES[source]["PORT"]) or "", """ -T common_user -T common_scenario -T auth_group -T auth_group_permission -T auth_permission -T common_user_groups -T common_user_user_permissions -T common_preferences -T reportmanager_report """ if destination == DEFAULT_DB_ALIAS else "", test and settings.DATABASES[source]["TEST"]["NAME"] or settings.DATABASES[source]["NAME"], settings.DATABASES[destination]["USER"] and ("-U %s " % settings.DATABASES[destination]["USER"]) or "", settings.DATABASES[destination]["HOST"] and ("-h %s " % settings.DATABASES[destination]["HOST"]) or "", settings.DATABASES[destination]["PORT"] and ("-p %s " % settings.DATABASES[destination]["PORT"]) or "", test and settings.DATABASES[destination]["TEST"]["NAME"] or settings.DATABASES[destination]["NAME"], ) with subprocess.Popen( commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, ) as p: try: task.processid = p.pid task.save(using=source) p.wait() except Exception: p.kill() p.wait() # Consider the destination database free again destinationscenario.status = "Free" destinationscenario.lastrefresh = datetime.today() destinationscenario.save(using=DEFAULT_DB_ALIAS) raise Exception("Database copy failed") # Update the scenario table destinationscenario.status = "In use" destinationscenario.lastrefresh = datetime.today() if options["description"]: destinationscenario.description = options["description"] destinationscenario.save(using=DEFAULT_DB_ALIAS) # Give access to the destination scenario to: # a) the user doing the copy # b) all superusers from the source schema # unless it's a promotion if destination != DEFAULT_DB_ALIAS: User.objects.using(destination).filter( is_superuser=True).update(is_active=True) User.objects.using(destination).filter( is_superuser=False).update(is_active=False) if user: User.objects.using(destination).filter( username=user.username).update(is_active=True) # Logging message task.processid = None task.status = "Done" task.finished = datetime.now() # Update the task in the destination database task.message = "Scenario %s from %s" % ( "promoted" if promote else "copied", source, ) task.save(using=destination) task.message = "Scenario copied to %s" % destination # Delete any waiting tasks in the new copy. # This is needed for situations where the same source is copied to # multiple destinations at the same moment. Task.objects.all().using(destination).filter( id__gt=task.id).delete() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() if destinationscenario and destinationscenario.status == "Busy": destinationscenario.status = "Free" destinationscenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.processid = None task.save(using=source) settings.DEBUG = tmp_debug
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up options force = options["force"] promote = options["promote"] test = "FREPPLE_TEST" in os.environ if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() # Initialize the task source = options["source"] try: sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=source) except Exception: raise CommandError("No source database defined with name '%s'" % source) now = datetime.now() task = None if "task" in options and options["task"]: try: task = Task.objects.all().using(source).get(pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if ( task.started or task.finished or task.status != "Waiting" or task.name != "scenario_copy" ): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="scenario_copy", submitted=now, started=now, status="0%", user=user ) task.processid = os.getpid() task.save(using=source) # Validate the arguments destination = options["destination"] destinationscenario = None try: task.arguments = "%s%s %s" % ( ("--dumpfile=%s " % options["dumpfile"]) if options["dumpfile"] else "", source, destination, ) if options["description"]: task.arguments += '--description="%s"' % options["description"].replace( '"', '\\"' ) if force: task.arguments += " --force" task.save(using=source) try: destinationscenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get( pk=destination ) except Exception: raise CommandError( "No destination database defined with name '%s'" % destination ) if source == destination: raise CommandError("Can't copy a schema on itself") if sourcescenario.status != "In use": raise CommandError("Source scenario is not in use") if destinationscenario.status != "Free" and not force and not promote: # make sure destination scenario is properly built otherwise it is considered Free scenario_is_free = False try: User.objects.using( destination ).all().count() # fails if scenario not properly built except Exception: scenario_is_free = True if not scenario_is_free: raise CommandError("Destination scenario is not free") if promote and ( destination != DEFAULT_DB_ALIAS or source == DEFAULT_DB_ALIAS ): raise CommandError( "Incorrect source or destination database with promote flag" ) # check that dump file exists if options["dumpfile"] and not os.path.isfile( os.path.join(settings.FREPPLE_LOGDIR, options["dumpfile"]) ): raise CommandError("Cannot find dump file %s" % options["dumpfile"]) # Logging message - always logging in the default database destinationscenario.status = "Busy" destinationscenario.save(using=DEFAULT_DB_ALIAS) # tables excluded from promotion task excludedTables = [ "common_user", "common_scenario", "auth_group", "auth_group_permission", "auth_permission", "django_content_type", "common_comment", "common_notification", "common_follower", "common_user_groups", "common_attribute", "common_user_user_permissions", "common_preferences", "reportmanager_report", "reportmanager_column", "execute_schedule", ] # Copying the data # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if not options["dumpfile"]: if settings.DATABASES[source]["PASSWORD"]: os.environ["PGPASSWORD"] = settings.DATABASES[source]["PASSWORD"] if os.name == "nt": # On windows restoring with pg_restore over a pipe is broken :-( cmd = "pg_dump -c -Fp %s%s%s%s%s | psql %s%s%s%s" else: cmd = "pg_dump -Fc %s%s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s" commandline = cmd % ( settings.DATABASES[source]["USER"] and ("-U %s " % settings.DATABASES[source]["USER"]) or "", settings.DATABASES[source]["HOST"] and ("-h %s " % settings.DATABASES[source]["HOST"]) or "", settings.DATABASES[source]["PORT"] and ("-p %s " % settings.DATABASES[source]["PORT"]) or "", ("%s " % (" -T ".join(["", *excludedTables]))) if destination == DEFAULT_DB_ALIAS else "", test and settings.DATABASES[source]["TEST"]["NAME"] or settings.DATABASES[source]["NAME"], settings.DATABASES[destination]["USER"] and ("-U %s " % settings.DATABASES[destination]["USER"]) or "", settings.DATABASES[destination]["HOST"] and ("-h %s " % settings.DATABASES[destination]["HOST"]) or "", settings.DATABASES[destination]["PORT"] and ("-p %s " % settings.DATABASES[destination]["PORT"]) or "", test and settings.DATABASES[destination]["TEST"]["NAME"] or settings.DATABASES[destination]["NAME"], ) else: cmd = "pg_restore -n public -Fc -c --if-exists --no-password %s%s%s -d %s %s" commandline = cmd % ( settings.DATABASES[destination]["USER"] and ("-U %s " % settings.DATABASES[destination]["USER"]) or "", settings.DATABASES[destination]["HOST"] and ("-h %s " % settings.DATABASES[destination]["HOST"]) or "", settings.DATABASES[destination]["PORT"] and ("-p %s " % settings.DATABASES[destination]["PORT"]) or "", test and settings.DATABASES[destination]["TEST"]["NAME"] or settings.DATABASES[destination]["NAME"], os.path.join(settings.FREPPLE_LOGDIR, options["dumpfile"]), ) with subprocess.Popen( commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT, ) as p: try: task.processid = p.pid task.save(using=source) p.wait() # Successful copy can still leave warnings and errors # To confirm copy is ok, let's check that the scenario copy task exists # in the destination database t = Task.objects.using(destination).filter(id=task.id).first() if not t or t.name != task.name or t.submitted != task.submitted: destinationscenario.status = "Free" destinationscenario.lastrefresh = datetime.today() destinationscenario.save(using=DEFAULT_DB_ALIAS) raise Exception("Database copy failed") t.status = "Done" t.finished = datetime.now() t.message = "Scenario copied from %s" % source t.save( using=destination, update_fields=["status", "finished", "message"], ) except Exception: p.kill() p.wait() # Consider the destination database free again if destination != DEFAULT_DB_ALIAS: destinationscenario.status = "Free" destinationscenario.lastrefresh = datetime.today() destinationscenario.save(using=DEFAULT_DB_ALIAS) raise Exception("Database copy failed") # Check the permissions after restoring a backup. if ( options["dumpfile"] and task.user and not User.objects.using(destination) .filter(username=task.user.username, is_active=True) .count() ): # Restoring a backup shouldn't give a user access to data he didn't have access to before... raise Exception( "Permission denied - you did't have access rights to the scenario that was backed up" ) # Update the scenario table destinationscenario.status = "In use" destinationscenario.lastrefresh = datetime.today() if options["description"]: destinationscenario.description = options["description"] destinationscenario.save(using=DEFAULT_DB_ALIAS) # Delete parameter that marks a running worker if destination != DEFAULT_DB_ALIAS: try: Parameter.objects.using(destination).filter( name="Worker alive" ).delete() except BaseException: pass # Give access to the destination scenario to: # a) the user doing the copy # b) all active superusers from the source schema # unless it's a promotion if destination != DEFAULT_DB_ALIAS: User.objects.using(destination).filter( is_superuser=True, is_active=True ).update(is_active=True) User.objects.using(destination).filter(is_superuser=False).update( is_active=False ) if user: User.objects.using(destination).filter( username=user.username ).update(is_active=True) # Delete data files present in the scenario folders if destination != DEFAULT_DB_ALIAS and settings.DATABASES[destination][ "FILEUPLOADFOLDER" ] not in ( settings.DATABASES[DEFAULT_DB_ALIAS]["FILEUPLOADFOLDER"], settings.DATABASES[source]["FILEUPLOADFOLDER"], ): FileManager.cleanFolder(0, destination) FileManager.cleanFolder(1, destination) # Logging message task.processid = None task.status = "Done" task.finished = datetime.now() # Update the task in the destination database dest_task = Task( name=task.name, submitted=task.submitted, started=task.started, finished=task.finished, arguments=task.arguments, status="Done", message=task.message, user=user, ) if options["dumpfile"]: dest_task.message = "Scenario restored from %s" % options["dumpfile"] elif promote: dest_task.message = "Scenario promoted from %s" % source else: dest_task.message = "Scenario copied from %s" % source dest_task.save(using=destination) if options["dumpfile"]: task.message = "Scenario %s restored from %s" % ( destination, options["dumpfile"], ) else: task.message = "Scenario copied to %s" % destination # Delete any waiting tasks in the new copy. # This is needed for situations where the same source is copied to # multiple destinations at the same moment. if not options["dumpfile"]: Task.objects.all().using(destination).filter(id__gt=task.id).delete() # Don't automate any task in the new copy if not promote: for i in ScheduledTask.objects.all().using(destination): i.next_run = None i.data.pop("starttime", None) i.data.pop("monday", None) i.data.pop("tuesday", None) i.data.pop("wednesday", None) i.data.pop("thursday", None) i.data.pop("friday", None) i.data.pop("saturday", None) i.data.pop("sunday", None) i.save(using=destination) if options["dumpfile"]: setattr(_thread_locals, "database", destination) call_command("migrate", database=destination) delattr(_thread_locals, "database") except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() if destinationscenario and destinationscenario.status == "Busy": if destination == DEFAULT_DB_ALIAS: destinationscenario.status = "In use" else: destinationscenario.status = "Free" destinationscenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.processid = None task.save(using=source) settings.DEBUG = tmp_debug
def handle(self, **options): # Pick up the options now = datetime.now() self.database = options['database'] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database) if options['user']: try: self.user = User.objects.all().using( self.database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = 'importfromfolder-%s.log' % timestamp else: logfile = 'importfromfolder_%s-%s.log' % (self.database, timestamp) try: handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8') # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format'])) logger.addHandler(handler) logger.propagate = False except Exception as e: print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e)) task = None errors = [0, 0] returnederrors = [0, 0] try: setattr(_thread_locals, 'database', self.database) # Initialize the task if options['task']: try: task = Task.objects.all().using( self.database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ( 'frepple_importfromfolder', 'importfromfolder'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.logfile = logfile else: task = Task(name='importfromfolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile) task.processid = os.getpid() task.save(using=self.database) # Choose the right self.delimiter and language self.delimiter = get_format('DECIMAL_SEPARATOR', settings.LANGUAGE_CODE, True) == ',' and ';' or ',' translation.activate(settings.LANGUAGE_CODE) # Execute if 'FILEUPLOADFOLDER' in settings.DATABASES[self.database] \ and os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']): # Open the logfile logger.info("%s Started importfromfolder\n" % datetime.now().replace(microsecond=0)) all_models = [(ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class()] models = [] for ifile in os.listdir( settings.DATABASES[self.database]['FILEUPLOADFOLDER']): if not ifile.lower().endswith( ('.csv', '.csv.gz', '.xlsx')): continue filename0 = ifile.split('.')[0] model = None contenttype_id = None for m, ct in all_models: if matchesModelName(filename0, m): model = m contenttype_id = ct logger.info( "%s Matched a model to file: %s" % (datetime.now().replace(microsecond=0), ifile)) break if not model or model in EXCLUDE_FROM_BULK_OPERATIONS: logger.info( "%s Ignoring data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif self.user and not self.user.has_perm( '%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))): # Check permissions logger.info( "%s You don't have permissions to add: %s" % (datetime.now().replace(microsecond=0), ifile)) else: deps = set([model]) GridReport.dependent_models(model, deps) models.append((ifile, model, contenttype_id, deps)) # Sort the list of models, based on dependencies between models models = GridReport.sort_models(models) i = 0 cnt = len(models) for ifile, model, contenttype_id, dependencies in models: task.status = str(int(10 + i / cnt * 80)) + '%' task.message = 'Processing data file %s' % ifile task.save(using=self.database) i += 1 filetoparse = os.path.join( os.path.abspath(settings.DATABASES[self.database] ['FILEUPLOADFOLDER']), ifile) if ifile.lower().endswith('.xlsx'): logger.info( "%s Started processing data in Excel file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadExcelfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info( "%s Finished processing data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: logger.info( "%s Started processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadCSVfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info( "%s Finished processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: errors[0] += 1 cnt = 0 logger.error("%s Failed, folder does not exist" % datetime.now().replace(microsecond=0)) # Task update if errors[0] > 0: task.status = 'Failed' if not cnt: task.message = "Destination folder does not exist" else: task.message = "Uploaded %s data files with %s errors and %s warnings" % ( cnt, errors[0], errors[1]) else: task.status = 'Done' task.message = "Uploaded %s data files with %s warnings" % ( cnt, errors[1]) task.finished = datetime.now() except KeyboardInterrupt: if task: task.status = 'Cancelled' task.message = 'Cancelled' logger.info('%s Cancelled\n' % datetime.now().replace(microsecond=0)) except Exception as e: logger.error("%s Failed" % datetime.now().replace(microsecond=0)) if task: task.status = 'Failed' task.message = '%s' % e raise e finally: setattr(_thread_locals, 'database', None) if task: if errors[0] == 0: task.status = 'Done' else: task.status = 'Failed' task.processid = None task.finished = datetime.now() task.save(using=self.database) logger.info('%s End of importfromfolder\n' % datetime.now().replace(microsecond=0))
def handle(self, *fixture_labels, **options): # get the database object database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database ) now = datetime.now() task = None try: setattr(_thread_locals, 'database', database) # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name != 'loaddata': raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.processid = os.getpid() task.save(using=database, update_fields=['started', 'status', 'processid']) else: if options['user']: try: user = User.objects.all().using(database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None task = Task( name='loaddata', submitted=now, started=now, status='0%', user=user, arguments=' '.join(fixture_labels) ) task.processid = os.getpid() task.save(using=database) # Excecute the standard django command super(Command, self).handle(*fixture_labels, **options) # if the fixture doesn't contain the 'demo' word, let's not apply loaddata post-treatments for f in fixture_labels: if '_demo' not in f.lower(): return with transaction.atomic(using=database, savepoint=False): print('updating fixture to current date') cursor = connections[database].cursor() cursor.execute(''' select to_timestamp(value,'YYYY-MM-DD hh24:mi:ss') from common_parameter where name = 'currentdate' ''') currentDate = cursor.fetchone()[0] now = datetime.now() offset = (now - currentDate).days #update currentdate to now cursor.execute(''' update common_parameter set value = 'now' where name = 'currentdate' ''') #update demand due dates cursor.execute(''' update demand set due = due + %s * interval '1 day' ''', (offset,)) #update PO/DO/MO due dates cursor.execute(''' update operationplan set startdate = startdate + %s * interval '1 day', enddate = enddate + %s * interval '1 day' ''', 2 * (offset,)) # Task update task.status = 'Done' task.finished = datetime.now() task.processid = None task.save(using=database, update_fields=['status', 'finished']) except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.processid = None task.save(using=database, update_fields=['status', 'finished', 'message']) raise CommandError('%s' % e) finally: setattr(_thread_locals, 'database', None)
def handle(self, **options): # Pick up the options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database ) if options['user']: try: user = User.objects.all().using(database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None now = datetime.now() task = None try: # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_loadxml', 'loadxml'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='loadxml', submitted=now, started=now, status='0%', user=user) task.arguments = ' '.join(options['file']) task.processid = os.getpid() task.save(using=database) # Execute # TODO: if frePPLe is available as a module, we don't really need to spawn another process. os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace('\\', '\\\\') os.environ['FREPPLE_APP'] = settings.FREPPLE_APP os.environ['FREPPLE_DATABASE'] = database os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME if 'DJANGO_SETTINGS_MODULE' not in os.environ: os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings' if os.path.exists(os.path.join(os.environ['FREPPLE_HOME'], 'python36.zip')): # For the py2exe executable os.environ['PYTHONPATH'] = os.path.join( os.environ['FREPPLE_HOME'], 'python%d%d.zip' % (sys.version_info[0], sys.version_info[1]) ) + os.pathsep + os.path.normpath(os.environ['FREPPLE_APP']) else: # Other executables os.environ['PYTHONPATH'] = os.path.normpath(os.environ['FREPPLE_APP']) cmdline = [ '"%s"' % i for i in options['file'] ] cmdline.insert(0, 'frepple') cmdline.append( '"%s"' % os.path.join(settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py') ) proc = subprocess.run(' '.join(cmdline)) if proc.returncode: raise Exception('Exit code of the batch run is %d' % proc.returncode) # Task update task.status = 'Done' task.finished = datetime.now() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database)
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up options if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None sender = options["sender"] recipient = options["recipient"] report = options["report"] if not sender: raise CommandError("No sender has been defined") if not recipient: raise CommandError("No recipient has been defined") if not report: raise CommandError("No report to email has been defined") database = options["database"] # Make sure file exist in the export folder reports = report.split(";") correctedReports = [] missingFiles = [] for r in reports: if len(r.strip()) == 0: continue path = os.path.join( settings.DATABASES[database]["FILEUPLOADFOLDER"], "export", r.strip()) if not os.path.isfile(path): missingFiles.append(r.strip()) else: correctedReports.append(path) if len(missingFiles) > 0: raise CommandError( "Following files are missing in export folder: %s" % (";".join(str(x) for x in missingFiles))) if len(correctedReports) == 0: raise CommandError("No report defined in options") # Validate email adresses recipients = recipient.split(";") correctedRecipients = [] invalidEmails = [] for r in recipients: if len(r.strip()) == 0: continue if not re.fullmatch(r"[^@]+@[^@]+\.[^@]+", r.strip()): invalidEmails.append(r.strip()) else: correctedRecipients.append(r.strip()) if len(invalidEmails) > 0: raise CommandError( "Invalid email formatting for following addresses: %s" % (";".join(str(x) for x in invalidEmails))) if len(correctedRecipients) == 0: raise CommandError("No recipient defined in options") now = datetime.now() task = None if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("emailreport")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="emailreport", submitted=now, started=now, status="0%", user=user) task.processid = os.getpid() task.save(using=database) try: task.arguments = "--recipient=%s --report=%s" % (recipient, report) task.save(using=database) # create message message = EmailMessage( subject="Exported reports", body="", from_email=sender, to=correctedRecipients, ) b = BytesIO() with ZipFile(b, mode="w") as zf: for f in correctedReports: zf.write(f, basename(f)) zf.close() # attach zip file message.attach("reports.zip", b.getvalue(), "application/zip") # send email message.send() b.close() # Logging message task.processid = None task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database) settings.DEBUG = tmp_debug
def handle(self, *fixture_labels, **options): # get the database object database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) now = datetime.now() task = None try: setattr(_thread_locals, "database", database) # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get(pk=options["task"]) except: raise CommandError("Task identifier not found") if ( task.started or task.finished or task.status != "Waiting" or task.name != "loaddata" ): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.processid = os.getpid() task.save( using=database, update_fields=["started", "status", "processid"] ) else: if options["user"]: try: user = ( User.objects.all() .using(database) .get(username=options["user"]) ) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None task = Task( name="loaddata", submitted=now, started=now, status="0%", user=user, arguments=" ".join(fixture_labels), ) task.processid = os.getpid() task.save(using=database) # Excecute the standard django command super().handle(*fixture_labels, **options) # if the fixture doesn't contain the 'demo' word, let's not apply loaddata post-treatments for f in fixture_labels: if "demo" not in f.lower(): return with transaction.atomic(using=database, savepoint=False): if self.verbosity > 2: print("updating fixture to current date") cursor = connections[database].cursor() cursor.execute( """ select to_timestamp(value,'YYYY-MM-DD hh24:mi:ss') from common_parameter where name = 'currentdate' """ ) currentDate = cursor.fetchone()[0] now = datetime.now() offset = (now - currentDate).days # update currentdate to now cursor.execute( """ update common_parameter set value = 'now' where name = 'currentdate' """ ) # update demand due dates cursor.execute( """ update demand set due = due + %s * interval '1 day' """, (offset,), ) # update PO/DO/MO due dates cursor.execute( """ update operationplan set startdate = startdate + %s * interval '1 day', enddate = enddate + %s * interval '1 day' """, 2 * (offset,), ) # Task update task.status = "Done" task.finished = datetime.now() task.processid = None task.save(using=database, update_fields=["status", "finished"]) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save( using=database, update_fields=["status", "finished", "message"] ) raise CommandError("%s" % e) finally: setattr(_thread_locals, "database", None)
def handle(self, **options): # Pick up options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None if options['models']: models = options['models'].split(',') else: models = None now = datetime.now() task = None try: # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ( 'frepple_flush', 'empty'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='empty', submitted=now, started=now, status='0%', user=user) task.processid = os.getpid() task.save(using=database) # Create a database connection cursor = connections[database].cursor() # Get a list of all django tables in the database tables = set( connections[database].introspection.django_table_names( only_existing=True)) ContentTypekeys = set() # Validate the user list of tables if models: models2tables = set() admin_log_positive = True for m in models: try: x = m.split('.', 1) x = apps.get_model(x[0], x[1]) if x in EXCLUDE_FROM_BULK_OPERATIONS: continue ContentTypekeys.add( ContentType.objects.get_for_model(x).pk) x = x._meta.db_table if x not in tables: raise models2tables.add(x) except Exception as e: raise CommandError("Invalid model to erase: %s" % m) tables = models2tables else: admin_log_positive = False tables.discard('django_admin_log') for i in EXCLUDE_FROM_BULK_OPERATIONS: tables.discard(i._meta.db_table) ContentTypekeys.add( ContentType.objects.get_for_model(i).pk) # Some tables need to be handled a bit special if 'operationplan' in tables: tables.add('operationplanmaterial') tables.add('operationplanresource') tables.add('out_problem') if 'resource' in tables and 'out_resourceplan' not in tables: tables.add('out_resourceplan') if 'demand' in tables and 'out_constraint' not in tables: tables.add('out_constraint') tables.discard('auth_group_permissions') tables.discard('auth_permission') tables.discard('auth_group') tables.discard('django_session') tables.discard('common_user') tables.discard('common_user_groups') tables.discard('common_user_user_permissions') tables.discard('common_preference') tables.discard('django_content_type') tables.discard('execute_log') tables.discard('common_scenario') # Delete all records from the tables. with transaction.atomic(using=database, savepoint=False): if ContentTypekeys: if admin_log_positive: cursor.execute( "delete from django_admin_log where content_type_id = any(%s)", (list(ContentTypekeys), )) else: cursor.execute( "delete from django_admin_log where content_type_id != any(%s)", (list(ContentTypekeys), )) if "common_bucket" in tables: cursor.execute( 'update common_user set horizonbuckets = null') for stmt in connections[database].ops.sql_flush( no_style(), tables, []): cursor.execute(stmt) if models: if 'input.purchaseorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'PO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'PO' ) ''') cursor.execute( "delete from operationplan where type = 'PO'") key = ContentType.objects.get_for_model( inputmodels.PurchaseOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) if 'input.distributionorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'DO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'DO' ) ''') cursor.execute( "delete from operationplan where type = 'DO'") key = ContentType.objects.get_for_model( inputmodels.DistributionOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) if 'input.manufacturingorder' in models: cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'MO' ) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'MO' ) ''') cursor.execute( "delete from operationplan where type = 'MO'") key = ContentType.objects.get_for_model( inputmodels.ManufacturingOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) if 'input.deliveryorder' in models: cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.id from operationplan where type = 'DLVR' ) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.id from operationplan where type = 'DLVR' ) ''') cursor.execute( "delete from operationplan where type = 'DLVR'") key = ContentType.objects.get_for_model( inputmodels.DeliveryOrder, for_concrete_model=False).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key, )) # Keep the database in shape cursor.execute("vacuum analyze") # Task update task.status = 'Done' task.finished = datetime.now() task.processid = None task.save(using=database) except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.processid = None task.save(using=database) raise CommandError('%s' % e)
def handle(self, **options): now = datetime.now() database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) # Pick up options if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None task = None old_thread_locals = getattr(_thread_locals, "database", None) try: setattr(_thread_locals, "database", database) if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("uploadreport")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="uploadreport", submitted=now, started=now, status="0%", user=user, ) task.processid = os.getpid() task.save(using=database) ftp_protocol = (settings.FTP_PROTOCOL[database] if isinstance(settings.FTP_PROTOCOL, dict) and database in settings.FTP_PROTOCOL else (None if isinstance(settings.FTP_PROTOCOL, dict) else settings.FTP_PROTOCOL)) ftp_host = (settings.FTP_HOST[database] if isinstance(settings.FTP_HOST, dict) and database in settings.FTP_HOST else (None if isinstance(settings.FTP_HOST, dict) else settings.FTP_HOST)) ftp_port = (settings.FTP_PORT[database] if isinstance(settings.FTP_PORT, dict) and database in settings.FTP_PORT else (None if isinstance(settings.FTP_PORT, dict) else settings.FTP_PORT)) ftp_user = (settings.FTP_USER[database] if isinstance(settings.FTP_USER, dict) and database in settings.FTP_USER else (None if isinstance(settings.FTP_USER, dict) else settings.FTP_USER)) ftp_password = (settings.FTP_PASSWORD[database] if isinstance(settings.FTP_PASSWORD, dict) and database in settings.FTP_PASSWORD else (None if isinstance(settings.FTP_PASSWORD, dict) else settings.FTP_PASSWORD)) ftp_folder = (settings.FTP_FOLDER[database] if isinstance(settings.FTP_FOLDER, dict) and database in settings.FTP_FOLDER else (None if isinstance(settings.FTP_FOLDER, dict) else settings.FTP_FOLDER)) if not ftp_protocol: raise CommandError( "No protocol is configured in your djangosettings.py file") if not ftp_host: raise CommandError( "No FTP server is configured in your djangosettings.py file" ) if not ftp_port: raise CommandError( "No FTP port is configured in your djangosettings.py file") if not ftp_user: raise CommandError( "No FTP user is configured in your djangosettings.py file") if not ftp_password: raise CommandError( "No FTP password is configured in your djangosettings.py file" ) if not ftp_folder: raise CommandError( "No FTP folder is configured in your djangosettings.py file" ) report = options["report"] if not report: raise CommandError("No report to upload has been defined") # Make sure file exist in the export folder reports = report.split(",") correctedReports = [] missingFiles = [] for r in reports: if len(r.strip()) == 0: continue path = os.path.join( settings.DATABASES[database]["FILEUPLOADFOLDER"], "export", r.strip(), ) if not os.path.isfile(path): missingFiles.append(r.strip()) else: correctedReports.append((path, r.strip())) if len(missingFiles) > 0: raise CommandError( "Following files are missing in export folder: %s" % (",".join(str(x) for x in missingFiles))) if len(correctedReports) == 0: raise CommandError("No valid report defined in options") task.arguments = "--report=%s" % (report, ) task.status = "15%" task.message = "Uploading reports" task.save(using=database) # SFTP if ftp_protocol.strip().upper() == "SFTP": cinfo = { "host": ftp_host, "username": ftp_user, "password": ftp_password, "port": ftp_port, } conn = pysftp.Connection(**cinfo) with conn.cd(ftp_folder): for r in correctedReports: conn.put(r[0]) # Closes the connection conn.close() elif ftp_protocol.strip().upper() in ["FTPS", "FTP"]: session = (FTP(ftp_host, ftp_user, ftp_password) if ftp_protocol.strip().upper() == "FTP" else FTP_TLS( ftp_host, ftp_user, ftp_password)) session.cwd(ftp_folder) for r in correctedReports: file = open(r[0], "rb") session.storbinary("STOR %s" % (r[1], ), file) file.close() session.quit() else: raise CommandError( "FTP_PROTOCOL in djangosettings.py file is not supported") # Logging message task.processid = None task.message = "" task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: setattr(_thread_locals, "database", old_thread_locals) if task: task.processid = None task.save(using=database)
def handle(self, **options): # Pick up the options now = datetime.now() self.database = options["database"] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database) if options["user"]: try: self.user = (User.objects.all().using( self.database).get(username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = "importworkbook-%s.log" % timestamp else: logfile = "importworkbook_%s-%s.log" % (self.database, timestamp) task = None try: setattr(_thread_locals, "database", self.database) # Initialize the task if options["task"]: try: task = (Task.objects.all().using( self.database).get(pk=options["task"])) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_importworkbook", "importworkbook")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="importworkbook", submitted=now, started=now, status="0%", user=self.user, ) task.arguments = " ".join(options["file"]) task.save(using=self.database) all_models = [(ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class()] try: with transaction.atomic(using=self.database): # Find all models in the workbook if "filename" not in locals(): filename = options["file"] for file in filename: wb = load_workbook(filename=file, read_only=True, data_only=True) models = [] for ws_name in wb.sheetnames: # Find the model model = None contenttype_id = None for m, ct in all_models: if matchesModelName(ws_name, m): model = m contenttype_id = ct break if not model or model in EXCLUDE_FROM_BULK_OPERATIONS: print( force_text( _("Ignoring data in worksheet: %s") % ws_name)) # yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>' elif not self.user.has_perm("%s.%s" % ( model._meta.app_label, get_permission_codename( "add", model._meta), )): # Check permissions print( force_text( _("You don't permissions to add: %s") % ws_name)) # yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>' else: deps = set([model]) GridReport.dependent_models(model, deps) models.append( (ws_name, model, contenttype_id, deps)) # Sort the list of models, based on dependencies between models models = GridReport.sort_models(models) # Process all rows in each worksheet for ws_name, model, contenttype_id, dependencies in models: print( force_text( _("Processing data in worksheet: %s") % ws_name)) # yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong><br>' # yield ('<div class="table-responsive">' # '<table class="table table-condensed" style="white-space: nowrap;"><tbody>') numerrors = 0 numwarnings = 0 firsterror = True ws = wb[ws_name] for error in parseExcelWorksheet( model, ws, user=self.user, database=self.database, ping=True, ): if error[0] == logging.DEBUG: # Yield some result so we can detect disconnect clients and interrupt the upload # yield ' ' continue if firsterror and error[0] in ( logging.ERROR, logging.WARNING, ): print("%s %s %s %s %s%s%s" % ( capfirst(_("worksheet")), capfirst(_("row")), capfirst(_("field")), capfirst(_("value")), capfirst(_("error")), " / ", capfirst(_("warning")), )) # yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % ( # capfirst(_("worksheet")), capfirst(_("row")), # capfirst(_("field")), capfirst(_("value")), # capfirst(_("error")), " / ", capfirst(_("warning")) # ) firsterror = False if error[0] == logging.ERROR: print("%s %s %s %s %s: %s" % ( ws_name, error[1] if error[1] else "", error[2] if error[2] else "", error[3] if error[3] else "", capfirst(_("error")), error[4], )) # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % ( # ws_name, # error[1] if error[1] else '', # error[2] if error[2] else '', # error[3] if error[3] else '', # capfirst(_('error')), # error[4] # ) numerrors += 1 elif error[1] == logging.WARNING: print("%s %s %s %s %s: %s" % ( ws_name, error[1] if error[1] else "", error[2] if error[2] else "", error[3] if error[3] else "", capfirst(_("warning")), error[4], )) # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % ( # ws_name, # error[1] if error[1] else '', # error[2] if error[2] else '', # error[3] if error[3] else '', # capfirst(_('warning')), # error[4] # ) numwarnings += 1 else: print("%s %s %s %s %s %s" % ( "danger" if numerrors > 0 else "success", ws_name, error[1] if error[1] else "", error[2] if error[2] else "", error[3] if error[3] else "", error[4], )) # yield '<tr class=%s><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % ( # "danger" if numerrors > 0 else 'success', # ws_name, # error[1] if error[1] else '', # error[2] if error[2] else '', # error[3] if error[3] else '', # error[4] # ) # yield '</tbody></table></div>' print("%s" % _("Done")) # yield '<div><strong>%s</strong></div>' % _("Done") except GeneratorExit: logger.warning("Connection Aborted") except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: setattr(_thread_locals, "database", None) if task: task.save(using=self.database) # Task update task.status = "Done" task.finished = datetime.now() task.processid = None task.save(using=self.database, update_fields=["status", "finished"]) return _("Done")
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up options force = options['force'] test = 'FREPPLE_TEST' in os.environ if options['user']: try: user = User.objects.all().get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() # Initialize the task source = options['source'] try: sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get( pk=source) except: raise CommandError("No source database defined with name '%s'" % source) now = datetime.now() task = None if 'task' in options and options['task']: try: task = Task.objects.all().using(source).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ( 'frepple_copy', 'scenario_copy'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='scenario_copy', submitted=now, started=now, status='0%', user=user) task.processid = os.getpid() task.save(using=source) # Validate the arguments destination = options['destination'] destinationscenario = None try: task.arguments = "%s %s" % (source, destination) if options['description']: task.arguments += '--description="%s"' % options[ 'description'].replace('"', '\\"') if force: task.arguments += " --force" task.save(using=source) try: destinationscenario = Scenario.objects.using( DEFAULT_DB_ALIAS).get(pk=destination) except: raise CommandError( "No destination database defined with name '%s'" % destination) if source == destination: raise CommandError("Can't copy a schema on itself") if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[ destination]['ENGINE']: raise CommandError( "Source and destination scenarios have a different engine") if sourcescenario.status != 'In use': raise CommandError("Source scenario is not in use") if destinationscenario.status != 'Free' and not force: raise CommandError("Destination scenario is not free") # Logging message - always logging in the default database destinationscenario.status = 'Busy' destinationscenario.save(using=DEFAULT_DB_ALIAS) # Copying the data # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[source]['PASSWORD']: os.environ['PGPASSWORD'] = settings.DATABASES[source][ 'PASSWORD'] if os.name == 'nt': # On windows restoring with pg_restore over a pipe is broken :-( cmd = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s" else: cmd = "pg_dump -Fc %s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s" commandline = cmd % ( settings.DATABASES[source]['USER'] and ("-U %s " % settings.DATABASES[source]['USER']) or '', settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '', settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '', test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['USER'] and ("-U %s " % settings.DATABASES[destination]['USER']) or '', settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '', settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '', test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'], ) with subprocess.Popen(commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) as p: try: task.processid = p.pid task.save(using=source) p.wait() except: p.kill() p.wait() # Consider the destination database free again destinationscenario.status = 'Free' destinationscenario.lastrefresh = datetime.today() destinationscenario.save(using=DEFAULT_DB_ALIAS) raise Exception("Database copy failed") # Update the scenario table destinationscenario.status = 'In use' destinationscenario.lastrefresh = datetime.today() if 'description' in options: destinationscenario.description = options['description'] destinationscenario.save(using=DEFAULT_DB_ALIAS) # Give access to the destination scenario to: # a) the user doing the copy # b) all superusers from the source schema User.objects.using(destination).filter(is_superuser=True).update( is_active=True) User.objects.using(destination).filter(is_superuser=False).update( is_active=False) if user: User.objects.using(destination).filter( username=user.username).update(is_active=True) # Logging message task.processid = None task.status = 'Done' task.finished = datetime.now() # Update the task in the destination database task.message = "Scenario copied from %s" % source task.save(using=destination) task.message = "Scenario copied to %s" % destination # Delete any waiting tasks in the new copy. # This is needed for situations where the same source is copied to # multiple destinations at the same moment. Task.objects.all().using(destination).filter( id__gt=task.id).delete() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() if destinationscenario and destinationscenario.status == 'Busy': destinationscenario.status = 'Free' destinationscenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.processid = None task.save(using=source) settings.DEBUG = tmp_debug
def handle(self, **options): if options["user"]: try: user = User.objects.all().get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() now = datetime.now() task = None database = options["database"] if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name != "scenario_release"): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="scenario_release", submitted=now, started=now, status="0%", user=user, ) task.processid = os.getpid() task.save(using=database) # Validate the arguments try: releasedScenario = None try: releasedScenario = Scenario.objects.using( DEFAULT_DB_ALIAS).get(pk=database) except Exception: raise CommandError( "No destination database defined with name '%s'" % database) if database == DEFAULT_DB_ALIAS: raise CommandError("Production scenario cannot be released.") if releasedScenario.status != "In use": raise CommandError("Scenario to release is not in use") # Update the scenario table, set it free in the production database releasedScenario.status = "Free" releasedScenario.lastrefresh = datetime.today() releasedScenario.save(using=DEFAULT_DB_ALIAS) # Killing webservice if "freppledb.webservice" in settings.INSTALLED_APPS: management.call_command("stopwebservice", force=True, database=database) # Logging message task.processid = None task.status = "Done" task.finished = datetime.now() # Update the task in the destination database task.message = "Scenario %s released" % (database, ) task.save(using=database) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() if releasedScenario and releasedScenario.status == "Busy": releasedScenario.status = "Free" releasedScenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.processid = None task.save(using=database)
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up options force = options['force'] test = 'FREPPLE_TEST' in os.environ if options['user']: try: user = User.objects.all().get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None # Synchronize the scenario table with the settings Scenario.syncWithSettings() # Initialize the task source = options['source'] try: sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=source) except: raise CommandError("No source database defined with name '%s'" % source) now = datetime.now() task = None if 'task' in options and options['task']: try: task = Task.objects.all().using(source).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_copy', 'scenario_copy'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='scenario_copy', submitted=now, started=now, status='0%', user=user) task.processid = os.getpid() task.save(using=source) # Validate the arguments destination = options['destination'] destinationscenario = None try: task.arguments = "%s %s" % (source, destination) if options['description']: task.arguments += '--description="%s"' % options['description'].replace('"', '\\"') if force: task.arguments += " --force" task.save(using=source) try: destinationscenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=destination) except: raise CommandError("No destination database defined with name '%s'" % destination) if source == destination: raise CommandError("Can't copy a schema on itself") if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']: raise CommandError("Source and destination scenarios have a different engine") if sourcescenario.status != 'In use': raise CommandError("Source scenario is not in use") if destinationscenario.status != 'Free' and not force: raise CommandError("Destination scenario is not free") # Logging message - always logging in the default database destinationscenario.status = 'Busy' destinationscenario.save(using=DEFAULT_DB_ALIAS) # Copying the data # Commenting the next line is a little more secure, but requires you to create a .pgpass file. if settings.DATABASES[source]['PASSWORD']: os.environ['PGPASSWORD'] = settings.DATABASES[source]['PASSWORD'] if os.name == 'nt': # On windows restoring with pg_restore over a pipe is broken :-( cmd = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s" else: cmd = "pg_dump -Fc %s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s" commandline = cmd % ( settings.DATABASES[source]['USER'] and ("-U %s " % settings.DATABASES[source]['USER']) or '', settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '', settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '', test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['USER'] and ("-U %s " % settings.DATABASES[destination]['USER']) or '', settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '', settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '', test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'], ) with subprocess.Popen(commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) as p: try: task.processid = p.pid task.save(using=source) p.wait() except: p.kill() p.wait() # Consider the destination database free again destinationscenario.status = 'Free' destinationscenario.lastrefresh = datetime.today() destinationscenario.save(using=DEFAULT_DB_ALIAS) raise Exception("Database copy failed") # Update the scenario table destinationscenario.status = 'In use' destinationscenario.lastrefresh = datetime.today() if 'description' in options: destinationscenario.description = options['description'] destinationscenario.save(using=DEFAULT_DB_ALIAS) # Give access to the destination scenario to: # a) the user doing the copy # b) all superusers from the source schema User.objects.using(destination).filter(is_superuser=True).update(is_active=True) User.objects.using(destination).filter(is_superuser=False).update(is_active=False) if user: User.objects.using(destination).filter(username=user.username).update(is_active=True) # Logging message task.processid = None task.status = 'Done' task.finished = datetime.now() # Update the task in the destination database task.message = "Scenario copied from %s" % source task.save(using=destination) task.message = "Scenario copied to %s" % destination # Delete any waiting tasks in the new copy. # This is needed for situations where the same source is copied to # multiple destinations at the same moment. Task.objects.all().using(destination).filter(id__gt=task.id).delete() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() if destinationscenario and destinationscenario.status == 'Busy': destinationscenario.status = 'Free' destinationscenario.save(using=DEFAULT_DB_ALIAS) raise e finally: if task: task.processid = None task.save(using=source) settings.DEBUG = tmp_debug
def handle(self, **options): # Pick up the options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if "task" in options and options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_backup", "backup")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="backup", submitted=now, started=now, status="0%", user=user) # Choose the backup file name backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database) task.message = "Backup to file %s" % backupfile # Run the backup command # Commenting the next line is a little more secure, but requires you to # create a .pgpass file. os.environ["PGPASSWORD"] = settings.DATABASES[database]["PASSWORD"] args = [ "pg_dump", "-Fc", "-w", "--username=%s" % settings.DATABASES[database]["USER"], "--file=%s" % os.path.abspath( os.path.join(settings.FREPPLE_LOGDIR, backupfile)), ] if settings.DATABASES[database]["HOST"]: args.append("--host=%s" % settings.DATABASES[database]["HOST"]) if settings.DATABASES[database]["PORT"]: args.append("--port=%s" % settings.DATABASES[database]["PORT"]) args.append(settings.DATABASES[database]["NAME"]) with subprocess.Popen(args) as p: try: task.processid = p.pid task.save(using=database) p.wait() except: p.kill() p.wait() raise Exception("Run of run pg_dump failed") # Task update task.processid = None task.status = "99%" task.save(using=database) # Delete backups older than a month pattern = re.compile("database.*.*.*.dump") for f in os.listdir(settings.FREPPLE_LOGDIR): if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)): # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date... created = datetime.fromtimestamp( os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime) if pattern.match(f) and (now - created).days > 31: try: os.remove(os.path.join(settings.FREPPLE_LOGDIR, f)) except: pass # Task update task.status = "Done" task.finished = datetime.now() task.processid = None except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None raise e finally: if task: task.save(using=database)
def handle(self, **options): # Pick up the options now = datetime.now() if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database ) if 'user' in options and options['user']: try: user = User.objects.all().using(database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None timestamp = now.strftime("%Y%m%d%H%M%S") if database == DEFAULT_DB_ALIAS: logfile = 'frepple-%s.log' % timestamp else: logfile = 'frepple_%s-%s.log' % (database, timestamp) task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('runplan', 'frepple_run'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.logfile = logfile else: task = Task(name='runplan', submitted=now, started=now, status='0%', user=user, logfile=logfile) # Validate options if 'constraint' in options: constraint = int(options['constraint']) if constraint < 0 or constraint > 15: raise ValueError("Invalid constraint: %s" % options['constraint']) else: constraint = 15 if 'plantype' in options: plantype = int(options['plantype']) else: plantype = 1 # Reset environment variables # TODO avoid having to delete the environment variables. Use options directly? PlanTaskRegistry.autodiscover() for i in PlanTaskRegistry.reg: if 'env' in options: # Options specified if i.label and i.label[0] in os.environ: del os.environ[i.label[0]] elif i.label: # No options specified - default to activate them all os.environ[i.label[0]] = '1' # Set environment variables if options['env']: task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env']) for i in options['env'].split(','): j = i.split('=') if len(j) == 1: os.environ[j[0]] = '1' else: os.environ[j[0]] = j[1] else: task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype) if options['background']: task.arguments += " --background" # Log task # Different from the other tasks the frepple engine will write the processid task.save(using=database) # Locate commands.py import freppledb.common.commands cmd = freppledb.common.commands.__file__ def setlimits(): import resource if settings.MAXMEMORYSIZE: resource.setrlimit( resource.RLIMIT_AS, (settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024) ) if settings.MAXCPUTIME: resource.setrlimit( resource.RLIMIT_CPU, (settings.MAXCPUTIME, settings.MAXCPUTIME + 5) ) # Limiting the file size is a bit tricky as this limit not only applies to the log # file, but also to temp files during the export # if settings.MAXTOTALLOGFILESIZE: # resource.setrlimit( # resource.RLIMIT_FSIZE, # (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024) # ) # Prepare environment os.environ['FREPPLE_PLANTYPE'] = str(plantype) os.environ['FREPPLE_CONSTRAINT'] = str(constraint) os.environ['FREPPLE_TASKID'] = str(task.id) os.environ['FREPPLE_DATABASE'] = database os.environ['FREPPLE_LOGFILE'] = logfile os.environ['FREPPLE_PROCESSNAME'] = settings.DATABASES[database]['NAME'].replace('demo', '') os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')): os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME if 'DJANGO_SETTINGS_MODULE' not in os.environ: os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings' os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP) libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), 'lib') if os.path.isdir(libdir): # Folders used by the Windows version os.environ['PYTHONPATH'] += os.pathsep + libdir if os.path.isfile(os.path.join(libdir, 'library.zip')): os.environ['PYTHONPATH'] += os.pathsep + os.path.join(libdir, 'library.zip') if options['background']: # Execute as background process on Windows if os.name == 'nt': subprocess.Popen(['frepple', cmd], creationflags=0x08000000) else: # Execute as background process on Linux subprocess.Popen(['frepple', cmd], preexec_fn=setlimits) else: if os.name == 'nt': # Execute in foreground on Windows ret = subprocess.call(['frepple', cmd]) else: # Execute in foreground on Linux ret = subprocess.call(['frepple', cmd], preexec_fn=setlimits) if ret != 0 and ret != 2: # Return code 0 is a successful run # Return code is 2 is a run cancelled by a user. That's shown in the status field. raise Exception('Failed with exit code %d' % ret) # Reread the task from the database and update it if not options['background']: task = Task.objects.all().using(database).get(pk=task.id) task.processid = None task.status = 'Done' task.finished = datetime.now() task.save(using=database) except Exception as e: if task: task = Task.objects.all().using(database).get(pk=task.id) task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.processid = None task.save(using=database) raise e
def handle(self, **options): # Pick up the options now = datetime.now() self.database = options["database"] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database) if options["user"]: try: self.user = (User.objects.all().using( self.database).get(username=options["user"])) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = "importfromfolder-%s.log" % timestamp else: logfile = "importfromfolder_%s-%s.log" % (self.database, timestamp) try: handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding="utf-8") # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format'])) logger.addHandler(handler) logger.propagate = False except Exception as e: print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e)) task = None errors = [0, 0] try: setattr(_thread_locals, "database", self.database) # Initialize the task if options["task"]: try: task = (Task.objects.all().using( self.database).get(pk=options["task"])) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_importfromfolder", "importfromfolder")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now task.logfile = logfile else: task = Task( name="importfromfolder", submitted=now, started=now, status="0%", user=self.user, logfile=logfile, ) task.processid = os.getpid() task.save(using=self.database) # Choose the right self.delimiter and language self.delimiter = (get_format("DECIMAL_SEPARATOR", settings.LANGUAGE_CODE, True) == "," and ";" or ",") translation.activate(settings.LANGUAGE_CODE) self.SQLrole = settings.DATABASES[self.database].get( "SQL_ROLE", "report_role") # Execute if "FILEUPLOADFOLDER" in settings.DATABASES[ self.database] and os.path.isdir( settings.DATABASES[self.database]["FILEUPLOADFOLDER"]): # Open the logfile logger.info("%s Started importfromfolder\n" % datetime.now().replace(microsecond=0)) all_models = [(ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class()] models = [] for ifile in os.listdir( settings.DATABASES[self.database]["FILEUPLOADFOLDER"]): if not ifile.lower().endswith(( ".sql", ".sql.gz", ".csv", ".csv.gz", ".cpy", ".cpy.gz", ".xlsx", )): continue filename0 = ifile.split(".")[0].split(" (")[0] model = None contenttype_id = None for m, ct in all_models: if matchesModelName(filename0, m): model = m contenttype_id = ct break if not model or model in EXCLUDE_FROM_BULK_OPERATIONS: logger.info( "%s Ignoring data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif self.user and not self.user.has_perm("%s.%s" % ( model._meta.app_label, get_permission_codename("add", model._meta), )): # Check permissions logger.info( "%s You don't have permissions to add: %s" % (datetime.now().replace(microsecond=0), ifile)) else: deps = set([model]) GridReport.dependent_models(model, deps) models.append((ifile, model, contenttype_id, deps)) # Sort the list of models, based on dependencies between models models = GridReport.sort_models(models) i = 0 cnt = len(models) for ifile, model, contenttype_id, dependencies in models: task.status = str(int(10 + i / cnt * 80)) + "%" task.message = "Processing data file %s" % ifile task.save(using=self.database) i += 1 filetoparse = os.path.join( os.path.abspath(settings.DATABASES[self.database] ["FILEUPLOADFOLDER"]), ifile, ) if ifile.lower().endswith((".sql", ".sql.gz")): logger.info( "%s Started executing SQL statements from file: %s" % (datetime.now().replace(microsecond=0), ifile)) errors[0] += self.executeSQLfile(filetoparse) logger.info( "%s Finished executing SQL statements from file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif ifile.lower().endswith((".cpy", ".cpy.gz")): logger.info( "%s Started uploading copy file: %s" % (datetime.now().replace(microsecond=0), ifile)) errors[0] += self.executeCOPYfile(model, filetoparse) logger.info( "%s Finished uploading copy file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif ifile.lower().endswith(".xlsx"): logger.info( "%s Started processing data in Excel file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadExcelfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info( "%s Finished processing data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: logger.info( "%s Started processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadCSVfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info( "%s Finished processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: errors[0] += 1 cnt = 0 logger.error("%s Failed, folder does not exist" % datetime.now().replace(microsecond=0)) # Task update if errors[0] > 0: task.status = "Failed" if not cnt: task.message = "Destination folder does not exist" else: task.message = ( "Uploaded %s data files with %s errors and %s warnings" % (cnt, errors[0], errors[1])) else: task.status = "Done" task.message = "Uploaded %s data files with %s warnings" % ( cnt, errors[1], ) task.finished = datetime.now() except KeyboardInterrupt: if task: task.status = "Cancelled" task.message = "Cancelled" logger.info("%s Cancelled\n" % datetime.now().replace(microsecond=0)) except Exception as e: logger.error("%s Failed" % datetime.now().replace(microsecond=0)) if task: task.status = "Failed" task.message = "%s" % e raise e finally: setattr(_thread_locals, "database", None) if task: if errors[0] == 0: task.status = "Done" else: task.status = "Failed" task.processid = None task.finished = datetime.now() task.save(using=self.database) logger.info("%s End of importfromfolder\n" % datetime.now().replace(microsecond=0))
def handle(self, *args, **options): # Pick up the options now = datetime.now() self.database = options['database'] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database ) if options['user']: try: self.user = User.objects.all().using(self.database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = 'exporttofolder-%s.log' % timestamp else: logfile = 'exporttofolder_%s-%s.log' % (self.database, timestamp) try: handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8') # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format'])) logger.addHandler(handler) logger.propagate = False except Exception as e: print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e)) task = None errors = 0 try: # Initialize the task if options['task']: try: task = Task.objects.all().using(self.database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_exporttofolder', 'exporttofolder'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.logfile = logfile else: task = Task(name='exporttofolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile) task.arguments = ' '.join(['"%s"' % i for i in args]) task.processid = os.getpid() task.save(using=self.database) # Execute if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']): if not os.path.isdir(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export')): try: os.makedirs(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export')) except OSError as exception: if exception.errno != errno.EEXIST: raise logger.info("%s Started export to folder\n" % datetime.now()) cursor = connections[self.database].cursor() task.status = '0%' task.save(using=self.database) i = 0 cnt = len(self.statements) # Calling all the pre-sql statements for stmt in self.pre_sql_statements: try: logger.info("Executing pre-statement '%s'" % stmt) cursor.execute(stmt) logger.info("%s record(s) modified" % cursor.rowcount) except: errors += 1 logger.error("An error occurred when executing statement '%s'" % stmt) for cfg in self.statements: # Validate filename filename = cfg.get('filename', None) if not filename: raise Exception("Missing filename in export configuration") folder = cfg.get('folder', None) if not folder: raise Exception("Missing folder in export configuration for %s" % filename) logger.info("%s Started export of %s" % (datetime.now(), filename)) # Make sure export folder exists exportFolder = os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], folder) if not os.path.isdir(exportFolder): os.makedirs(exportFolder) try: reportclass = cfg.get('report', None) sql = cfg.get('sql', None) if reportclass: # Export from report class # Create a dummy request factory = RequestFactory() request = factory.get("/dummy/", cfg.get('data', {})) if self.user: request.user = self.user else: request.user = User.objects.all().get(username="******") request.database = self.database request.LANGUAGE_CODE = settings.LANGUAGE_CODE request.prefs = cfg.get('prefs', None) # Initialize the report if hasattr(reportclass, "initialize"): reportclass.initialize(request) if not reportclass._attributes_added and reportclass.model: reportclass._attributes_added = True for f in reportclass.getAttributeFields(reportclass.model): reportclass.rows += (f,) if reportclass.hasTimeBuckets: reportclass.getBuckets(request) # Write the report file datafile = open(os.path.join(exportFolder, filename), "wb") if filename.endswith(".xlsx"): reportclass._generate_spreadsheet_data(request, datafile, **cfg.get('data', {})) elif filename.endswith(".csv"): for r in reportclass._generate_csv_data(request, **cfg.get('data', {})): datafile.write( r.encode(settings.CSV_CHARSET) if isinstance(r, str) else r ) else: raise Exception("Unknown output format for %s" % filename) elif sql: # Exporting using SQL if filename.lower().endswith(".gz"): datafile = gzip.open(os.path.join(exportFolder, filename), "w") else: datafile = open(os.path.join(exportFolder, filename), "w") cursor.copy_expert(sql, datafile) else: raise Exception("Unknown export type for %s" % filename) datafile.close() i += 1 except Exception as e: errors += 1 logger.error("%s Failed to export to %s" % (datetime.now(), filename)) if task: task.message = 'Failed to export %s' % filename task.status = str(int(i / cnt * 100)) + '%' task.save(using=self.database) logger.info("%s Exported %s file(s)\n" % (datetime.now(), cnt - errors)) for stmt in self.post_sql_statements: try: logger.info("Executing post-statement '%s'" % stmt) cursor.execute(stmt) logger.info("%s record(s) modified" % cursor.rowcount) except: errors += 1 logger.error("An error occured when executing statement '%s'" % stmt) else: errors += 1 logger.error("%s Failed, folder does not exist" % datetime.now()) task.message = "Destination folder does not exist" task.save(using=self.database) except Exception as e: logger.error("%s Failed to export: %s" % (datetime.now(), e)) errors += 1 if task: task.message = 'Failed to export' finally: logger.info('%s End of export to folder\n' % datetime.now()) if task: if not errors: task.status = '100%' task.message = "Exported %s data files" % (cnt) else: task.status = 'Failed' # task.message = "Exported %s data files, %s failed" % (cnt-errors, errors) task.finished = datetime.now() task.processid = None task.save(using=self.database)
def handle(self, **options): # Pick up the options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get( username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if options["task"]: try: task = Task.objects.all().using(database).get( pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if (task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_loadxml", "loadxml")): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task(name="loadxml", submitted=now, started=now, status="0%", user=user) task.arguments = " ".join(options["file"]) task.processid = os.getpid() task.save(using=database) # Execute # TODO: if frePPLe is available as a module, we don't really need to spawn another process. os.environ["FREPPLE_HOME"] = settings.FREPPLE_HOME.replace( "\\", "\\\\") os.environ["FREPPLE_APP"] = settings.FREPPLE_APP os.environ["FREPPLE_DATABASE"] = database os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep + os.environ["PATH"] + os.pathsep + settings.FREPPLE_APP) os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME if "DJANGO_SETTINGS_MODULE" not in os.environ: os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings" if os.path.exists( os.path.join(os.environ["FREPPLE_HOME"], "python36.zip")): # For the py2exe executable os.environ["PYTHONPATH"] = (os.path.join( os.environ["FREPPLE_HOME"], "python%d%d.zip" % (sys.version_info[0], sys.version_info[1]), ) + os.pathsep + os.path.normpath(os.environ["FREPPLE_APP"])) else: # Other executables os.environ["PYTHONPATH"] = os.path.normpath( os.environ["FREPPLE_APP"]) cmdline = ['"%s"' % i for i in options["file"]] cmdline.insert(0, "frepple") cmdline.append('"%s"' % os.path.join( settings.FREPPLE_APP, "freppledb", "execute", "loadxml.py")) proc = subprocess.run(" ".join(cmdline)) if proc.returncode: raise Exception("Exit code of the batch run is %d" % proc.returncode) # Task update task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database)
def handle(self, **options): # Pick up the options now = datetime.now() self.database = options['database'] if self.database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % self.database ) if options['user']: try: self.user = User.objects.all().using(self.database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: self.user = None timestamp = now.strftime("%Y%m%d%H%M%S") if self.database == DEFAULT_DB_ALIAS: logfile = 'importfromfolder-%s.log' % timestamp else: logfile = 'importfromfolder_%s-%s.log' % (self.database, timestamp) try: handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8') # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format'])) logger.addHandler(handler) logger.propagate = False except Exception as e: print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e)) task = None errors = [0, 0] returnederrors = [0, 0] try: setattr(_thread_locals, 'database', self.database) # Initialize the task if options['task']: try: task = Task.objects.all().using(self.database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_importfromfolder', 'importfromfolder'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.logfile = logfile else: task = Task(name='importfromfolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile) task.processid = os.getpid() task.save(using=self.database) # Choose the right self.delimiter and language self.delimiter = get_format('DECIMAL_SEPARATOR', settings.LANGUAGE_CODE, True) == ',' and ';' or ',' translation.activate(settings.LANGUAGE_CODE) # Execute if 'FILEUPLOADFOLDER' in settings.DATABASES[self.database] \ and os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']): # Open the logfile logger.info("%s Started importfromfolder\n" % datetime.now().replace(microsecond=0)) all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ] models = [] for ifile in os.listdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']): if not ifile.lower().endswith(('.csv', '.csv.gz', '.xlsx')): continue filename0 = ifile.split('.')[0] model = None contenttype_id = None for m, ct in all_models: if matchesModelName(filename0, m): model = m contenttype_id = ct logger.info("%s Matched a model to file: %s" % (datetime.now().replace(microsecond=0), ifile)) break if not model or model in EXCLUDE_FROM_BULK_OPERATIONS: logger.info("%s Ignoring data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) elif self.user and not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))): # Check permissions logger.info("%s You don't have permissions to add: %s" % (datetime.now().replace(microsecond=0), ifile)) else: deps = set([model]) GridReport.dependent_models(model, deps) models.append( (ifile, model, contenttype_id, deps) ) # Sort the list of models, based on dependencies between models models = GridReport.sort_models(models) i = 0 cnt = len(models) for ifile, model, contenttype_id, dependencies in models: task.status = str(int(10 + i / cnt * 80)) + '%' task.message = 'Processing data file %s' % ifile task.save(using=self.database) i += 1 filetoparse = os.path.join(os.path.abspath(settings.DATABASES[self.database]['FILEUPLOADFOLDER']), ifile) if ifile.lower().endswith('.xlsx'): logger.info("%s Started processing data in Excel file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadExcelfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info("%s Finished processing data in file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: logger.info("%s Started processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) returnederrors = self.loadCSVfile(model, filetoparse) errors[0] += returnederrors[0] errors[1] += returnederrors[1] logger.info("%s Finished processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile)) else: errors[0] += 1 cnt = 0 logger.error("%s Failed, folder does not exist" % datetime.now().replace(microsecond=0)) # Task update if errors[0] > 0: task.status = 'Failed' if not cnt: task.message = "Destination folder does not exist" else: task.message = "Uploaded %s data files with %s errors and %s warnings" % (cnt, errors[0], errors[1]) else: task.status = 'Done' task.message = "Uploaded %s data files with %s warnings" % (cnt, errors[1]) task.finished = datetime.now() except KeyboardInterrupt: if task: task.status = 'Cancelled' task.message = 'Cancelled' logger.info('%s Cancelled\n' % datetime.now().replace(microsecond=0)) except Exception as e: logger.error("%s Failed" % datetime.now().replace(microsecond=0)) if task: task.status = 'Failed' task.message = '%s' % e raise e finally: setattr(_thread_locals, 'database', None) if task: if errors[0] == 0: task.status = 'Done' else: task.status = 'Failed' task.processid = None task.finished = datetime.now() task.save(using=self.database) logger.info('%s End of importfromfolder\n' % datetime.now().replace(microsecond=0))
def handle(self, **options): # Pick up the options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options['user']: try: user = User.objects.all().using(database).get( username=options['user']) except: raise CommandError("User '%s' not found" % options['user']) else: user = None now = datetime.now() task = None try: # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get( pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ( 'frepple_loadxml', 'loadxml'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='loadxml', submitted=now, started=now, status='0%', user=user) task.arguments = ' '.join(options['file']) task.processid = os.getpid() task.save(using=database) # Execute # TODO: if frePPLe is available as a module, we don't really need to spawn another process. os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace( '\\', '\\\\') os.environ['FREPPLE_APP'] = settings.FREPPLE_APP os.environ['FREPPLE_DATABASE'] = database os.environ[ 'PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ[ 'PATH'] + os.pathsep + settings.FREPPLE_APP os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME if 'DJANGO_SETTINGS_MODULE' not in os.environ: os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings' if os.path.exists( os.path.join(os.environ['FREPPLE_HOME'], 'python36.zip')): # For the py2exe executable os.environ['PYTHONPATH'] = os.path.join( os.environ['FREPPLE_HOME'], 'python%d%d.zip' % (sys.version_info[0], sys.version_info[1]) ) + os.pathsep + os.path.normpath(os.environ['FREPPLE_APP']) else: # Other executables os.environ['PYTHONPATH'] = os.path.normpath( os.environ['FREPPLE_APP']) cmdline = ['"%s"' % i for i in options['file']] cmdline.insert(0, 'frepple') cmdline.append('"%s"' % os.path.join( settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py')) proc = subprocess.run(' '.join(cmdline)) if proc.returncode: raise Exception('Exit code of the batch run is %d' % proc.returncode) # Task update task.status = 'Done' task.finished = datetime.now() except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database)
def handle(self, **options): # Pick up the options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database ) if options['user']: try: user = User.objects.all().using(database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None now = datetime.now() task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_backup', 'backup'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='backup', submitted=now, started=now, status='0%', user=user) # Choose the backup file name backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database) task.message = 'Backup to file %s' % backupfile # Run the backup command # Commenting the next line is a little more secure, but requires you to # create a .pgpass file. os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD'] args = [ "pg_dump", "-Fc", "-w", '--username=%s' % settings.DATABASES[database]['USER'], '--file=%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, backupfile)) ] if settings.DATABASES[database]['HOST']: args.append("--host=%s" % settings.DATABASES[database]['HOST']) if settings.DATABASES[database]['PORT']: args.append("--port=%s " % settings.DATABASES[database]['PORT']) args.append(settings.DATABASES[database]['NAME']) with subprocess.Popen(args) as p: try: task.processid = p.pid task.save(using=database) p.wait() except: p.kill() p.wait() raise Exception("Run of run pg_dump failed") # Task update task.processid = None task.status = '99%' task.save(using=database) # Delete backups older than a month pattern = re.compile("database.*.*.*.dump") for f in os.listdir(settings.FREPPLE_LOGDIR): if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)): # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date... created = datetime.fromtimestamp(os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime) if pattern.match(f) and (now - created).days > 31: try: os.remove(os.path.join(settings.FREPPLE_LOGDIR, f)) except: pass # Task update task.status = 'Done' task.finished = datetime.now() task.processid = None except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.processid = None raise e finally: if task: task.save(using=database)
def handle(self, **options): # Pick up options database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get(username=options["user"]) except Exception: raise CommandError("User '%s' not found" % options["user"]) else: user = None if options["models"]: models = options["models"].split(",") else: models = None now = datetime.now() task = None try: # Initialize the task setattr(_thread_locals, "database", database) if options["task"]: try: task = Task.objects.all().using(database).get(pk=options["task"]) except Exception: raise CommandError("Task identifier not found") if ( task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_flush", "empty") ): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="empty", submitted=now, started=now, status="0%", user=user ) task.arguments = "%s%s" % ( "--user=%s " % options["user"] if options["user"] else "", "--models=%s " % options["models"] if options["models"] else "", ) task.processid = os.getpid() task.save(using=database) # Create a database connection cursor = connections[database].cursor() # Get a list of all django tables in the database tables = set( connections[database].introspection.django_table_names( only_existing=True ) ) ContentTypekeys = set() # Validate the user list of tables if models: hasDemand = True if "input.demand" in models else False hasOperation = True if "input.operation" in models else False hasPO = True if "input.purchaseorder" in models else False hasDO = True if "input.distributionorder" in models else False hasMO = True if "input.manufacturingorder" in models else False hasDeO = True if "input.deliveryorder" in models else False if not hasOperation: if hasDemand: models.remove("input.demand") cursor.execute( "update operationplan set demand_id = null where demand_id is not null" ) cursor.execute("delete from demand") key = ContentType.objects.get_for_model( inputmodels.Demand, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if not (hasPO and hasDO and hasMO and hasDeO): if "input.operationplanmaterial" in models: models.remove("input.operationplanmaterial") if "input.operationplanresource" in models: models.remove("input.operationplanresource") if hasPO and not (hasDO and hasMO and hasDeO): models.remove("input.purchaseorder") cursor.execute("delete from operationplan where type = 'PO'") key = ContentType.objects.get_for_model( inputmodels.PurchaseOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasDO and not (hasPO and hasMO and hasDeO): models.remove("input.distributionorder") cursor.execute("delete from operationplan where type = 'DO'") key = ContentType.objects.get_for_model( inputmodels.DistributionOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasMO and not (hasPO and hasDO and hasDeO): models.remove("input.manufacturingorder") cursor.execute("delete from operationplan where type = 'MO'") key = ContentType.objects.get_for_model( inputmodels.ManufacturingOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if hasDeO and not (hasPO and hasDO and hasMO): models.remove("input.deliveryorder") cursor.execute("delete from operationplan where type = 'DLVR'") key = ContentType.objects.get_for_model( inputmodels.DeliveryOrder, for_concrete_model=False ).pk cursor.execute( "delete from django_admin_log where content_type_id = %s", (key,), ) if (hasPO or hasDO or hasMO or hasDeO) and not ( hasPO and hasDO and hasMO and hasDeO ): # Keep the database in shape cursor.execute("vacuum analyze") models2tables = set() admin_log_positive = True for m in models: try: x = m.split(".", 1) x = apps.get_model(x[0], x[1]) if x in EXCLUDE_FROM_BULK_OPERATIONS: continue ContentTypekeys.add(ContentType.objects.get_for_model(x).pk) x = x._meta.db_table if x not in tables: raise models2tables.add(x) except Exception as e: raise CommandError("Invalid model to erase: %s" % m) tables = models2tables else: admin_log_positive = False tables.discard("django_admin_log") for i in EXCLUDE_FROM_BULK_OPERATIONS: tables.discard(i._meta.db_table) ContentTypekeys.add(ContentType.objects.get_for_model(i).pk) # Some tables need to be handled a bit special if "operationplan" in tables: tables.add("operationplanmaterial") tables.add("operationplanresource") tables.add("out_problem") if "resource" in tables and "out_resourceplan" not in tables: tables.add("out_resourceplan") if "demand" in tables and "out_constraint" not in tables: tables.add("out_constraint") if ( "reportmanager_report" in tables and "reportmanager_column" not in tables ): tables.add("reportmanager_column") tables.discard("auth_group_permissions") tables.discard("auth_permission") tables.discard("auth_group") tables.discard("django_session") tables.discard("common_user") tables.discard("common_user_groups") tables.discard("common_user_user_permissions") tables.discard("common_preference") tables.discard("django_content_type") tables.discard("execute_log") tables.discard("execute_schedule") tables.discard("common_scenario") # Delete all records from the tables. with transaction.atomic(using=database, savepoint=False): if ContentTypekeys: if admin_log_positive: cursor.execute( "delete from django_admin_log where content_type_id = any(%s)", (list(ContentTypekeys),), ) else: cursor.execute( "delete from django_admin_log where content_type_id != any(%s)", (list(ContentTypekeys),), ) if "common_bucket" in tables: cursor.execute("update common_user set horizonbuckets = null") for stmt in connections[database].ops.sql_flush(no_style(), tables, []): cursor.execute(stmt) # Task update task.status = "Done" task.finished = datetime.now() task.processid = None task.save(using=database) except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() task.processid = None task.save(using=database) raise CommandError("%s" % e) finally: setattr(_thread_locals, "database", None)
def handle(self, **options): # Pick up the options now = datetime.now() if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS else: database = DEFAULT_DB_ALIAS if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database ) if 'user' in options and options['user']: try: user = User.objects.all().using(database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None timestamp = now.strftime("%Y%m%d%H%M%S") if database == DEFAULT_DB_ALIAS: logfile = 'frepple-%s.log' % timestamp else: logfile = 'frepple_%s-%s.log' % (database, timestamp) task = None try: # Initialize the task if 'task' in options and options['task']: try: task = Task.objects.all().using(database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('runplan', 'frepple_run'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now task.logfile = logfile else: task = Task(name='runplan', submitted=now, started=now, status='0%', user=user, logfile=logfile) # Validate options if 'constraint' in options: constraint = int(options['constraint']) if constraint < 0 or constraint > 15: raise ValueError("Invalid constraint: %s" % options['constraint']) else: constraint = 15 if 'plantype' in options: plantype = int(options['plantype']) else: plantype = 1 # Reset environment variables # TODO avoid having to delete the environment variables. Use options directly? PlanTaskRegistry.autodiscover() for i in PlanTaskRegistry.reg: if 'env' in options: # Options specified if i.label and i.label[0] in os.environ: del os.environ[i.label[0]] elif i.label: # No options specified - default to activate them all os.environ[i.label[0]] = '1' # Set environment variables if options['env']: task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env']) for i in options['env'].split(','): j = i.split('=') if len(j) == 1: os.environ[j[0]] = '1' else: os.environ[j[0]] = j[1] else: task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype) if options['background']: task.arguments += " --background" # Log task # Different from the other tasks the frepple engine will write the processid task.save(using=database) # Locate commands.py import freppledb.common.commands cmd = freppledb.common.commands.__file__ def setlimits(): import resource if settings.MAXMEMORYSIZE: resource.setrlimit( resource.RLIMIT_AS, (settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024) ) if settings.MAXCPUTIME: resource.setrlimit( resource.RLIMIT_CPU, (settings.MAXCPUTIME, settings.MAXCPUTIME + 5) ) # Limiting the file size is a bit tricky as this limit not only applies to the log # file, but also to temp files during the export # if settings.MAXTOTALLOGFILESIZE: # resource.setrlimit( # resource.RLIMIT_FSIZE, # (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024) # ) # Prepare environment os.environ['FREPPLE_PLANTYPE'] = str(plantype) os.environ['FREPPLE_CONSTRAINT'] = str(constraint) os.environ['FREPPLE_TASKID'] = str(task.id) os.environ['FREPPLE_DATABASE'] = database os.environ['FREPPLE_LOGFILE'] = logfile os.environ['FREPPLE_PROCESSNAME'] = settings.DATABASES[database]['NAME'].replace('demo', '') os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')): os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME if 'DJANGO_SETTINGS_MODULE' not in os.environ: os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings' os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP) libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), 'lib') if os.path.isdir(libdir): # Folders used by the Windows version os.environ['PYTHONPATH'] += os.pathsep + libdir if os.path.isfile(os.path.join(libdir, 'library.zip')): os.environ['PYTHONPATH'] += os.pathsep + os.path.join(libdir, 'library.zip') if options['background']: # Execute as background process on Windows if os.name == 'nt': subprocess.Popen(['frepple', cmd], creationflags=0x08000000) else: # Execute as background process on Linux subprocess.Popen(['frepple', cmd], preexec_fn=setlimits) else: if os.name == 'nt': # Execute in foreground on Windows ret = subprocess.call(['frepple', cmd]) else: # Execute in foreground on Linux ret = subprocess.call(['frepple', cmd], preexec_fn=setlimits) if ret != 0 and ret != 2: # Return code 0 is a successful run # Return code is 2 is a run cancelled by a user. That's shown in the status field. raise Exception('Failed with exit code %d' % ret) # Reread the task from the database and update it task = Task.objects.all().using(database).get(pk=task.id) task.processid = None task.status = 'Done' task.finished = datetime.now() task.save(using=database) except Exception as e: if task: task = Task.objects.all().using(database).get(pk=task.id) task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.processid = None task.save(using=database) raise e
def handle(self, **options): # Make sure the debug flag is not set! # When it is set, the django database wrapper collects a list of all sql # statements executed and their timings. This consumes plenty of memory # and cpu time. tmp_debug = settings.DEBUG settings.DEBUG = False # Pick up the options start = options["start"] end = options["end"] weekstart = int(options["weekstart"]) database = options["database"] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database) if options["user"]: try: user = User.objects.all().using(database).get(username=options["user"]) except: raise CommandError("User '%s' not found" % options["user"]) else: user = None now = datetime.now() task = None try: # Initialize the task if options["task"]: if options["task"] > 0: try: task = ( Task.objects.all().using(database).get(pk=options["task"]) ) except Task.DoesNotExist: raise CommandError("Task identifier not found") if ( task.started or task.finished or task.status != "Waiting" or task.name not in ("frepple_createbuckets", "createbuckets") ): raise CommandError("Invalid task identifier") task.status = "0%" task.started = now else: task = Task( name="createbuckets", submitted=now, started=now, status="0%", user=user, arguments="--start=%s --end=%s --weekstart=%s" % (start, end, weekstart), ) if task: task.processid = os.getpid() task.save(using=database) # Validate the date arguments try: curdate = datetime.strptime(start, "%Y-%m-%d") enddate = datetime.strptime(end, "%Y-%m-%d") except Exception as e: raise CommandError("Date is not matching format YYYY-MM-DD") with transaction.atomic(using=database, savepoint=False): # Delete previous contents with connections[database].cursor() as cursor: cursor.execute( "delete from common_bucketdetail where bucket_id in ('year','quarter','month','week','day')" ) cursor.execute( "delete from common_bucket where name in ('year','quarter','month','week','day')" ) # Create buckets y = Bucket(name="year", description="Yearly time buckets", level=1) q = Bucket( name="quarter", description="Quarterly time buckets", level=2 ) m = Bucket(name="month", description="Monthly time buckets", level=3) w = Bucket(name="week", description="Weeky time buckets", level=4) d = Bucket(name="day", description="Daily time buckets", level=5) y.save(using=database) q.save(using=database) m.save(using=database) w.save(using=database) d.save(using=database) # Loop over all days in the chosen horizon prev_year = None prev_quarter = None prev_month = None prev_week = None while curdate < enddate: month = int( curdate.strftime("%m") ) # an integer in the range 1 - 12 quarter = (month - 1) // 3 + 1 # an integer in the range 1 - 4 year = int(curdate.strftime("%Y")) dayofweek = int( curdate.strftime("%w") ) # day of the week, 0 = sunday, 1 = monday, ... year_start = datetime(year, 1, 1) year_end = datetime(year + 1, 1, 1) week_start = curdate - timedelta( (dayofweek + 6) % 7 + 1 - weekstart ) week_end = curdate - timedelta((dayofweek + 6) % 7 - 6 - weekstart) # Create buckets if year != prev_year: prev_year = year BucketDetail( bucket=y, name=self.formatDate(curdate, options["format_year"]), startdate=year_start, enddate=year_end, ).save(using=database) if quarter != prev_quarter: prev_quarter = quarter BucketDetail( bucket=q, name=self.formatDate(curdate, options["format_quarter"]), startdate=date(year, quarter * 3 - 2, 1), enddate=date( year + quarter // 4, quarter * 3 + 1 - 12 * (quarter // 4), 1, ), ).save(using=database) if month != prev_month: prev_month = month BucketDetail( bucket=m, name=self.formatDate(curdate, options["format_month"]), startdate=date(year, month, 1), enddate=date( year + month // 12, month + 1 - 12 * (month // 12), 1 ), ).save(using=database) if week_start != prev_week: prev_week = week_start # we need to avoid weeks 00 # we will therefore take the name of the week starting the monday # included in that week BucketDetail( bucket=w, name=self.formatDate( week_start + timedelta(days=(7 - week_start.weekday()) % 7), options["format_week"], ), startdate=week_start, enddate=week_end, ).save(using=database) BucketDetail( bucket=d, name=self.formatDate(curdate.date(), options["format_day"]), startdate=curdate, enddate=curdate + timedelta(1), ).save(using=database) # Next date curdate = curdate + timedelta(1) # Log success if task: task.status = "Done" task.finished = datetime.now() except Exception as e: if task: task.status = "Failed" task.message = "%s" % e task.finished = datetime.now() raise e finally: if task: task.processid = None task.save(using=database) settings.DEBUG = tmp_debug
def handle(self, **options): # Pick up options database = options['database'] if database not in settings.DATABASES: raise CommandError("No database settings known for '%s'" % database ) if options['user']: try: user = User.objects.all().using(database).get(username=options['user']) except: raise CommandError("User '%s' not found" % options['user'] ) else: user = None if options['models']: models = options['models'].split(',') else: models = None now = datetime.now() task = None try: # Initialize the task if options['task']: try: task = Task.objects.all().using(database).get(pk=options['task']) except: raise CommandError("Task identifier not found") if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_flush', 'empty'): raise CommandError("Invalid task identifier") task.status = '0%' task.started = now else: task = Task(name='empty', submitted=now, started=now, status='0%', user=user) task.processid = os.getpid() task.save(using=database) # Create a database connection cursor = connections[database].cursor() # Get a list of all django tables in the database tables = set(connections[database].introspection.django_table_names(only_existing=True)) ContentTypekeys = set() # Validate the user list of tables if models: models2tables = set() admin_log_positive = True for m in models: try: x = m.split('.', 1) x = apps.get_model(x[0], x[1]) if x in EXCLUDE_FROM_BULK_OPERATIONS: continue ContentTypekeys.add(ContentType.objects.get_for_model(x).pk) x = x._meta.db_table if x not in tables: raise models2tables.add(x) except Exception as e: raise CommandError("Invalid model to erase: %s" % m) tables = models2tables else: admin_log_positive = False tables.discard('django_admin_log') for i in EXCLUDE_FROM_BULK_OPERATIONS: tables.discard(i._meta.db_table) ContentTypekeys.add(ContentType.objects.get_for_model(i).pk) # Some tables need to be handled a bit special if 'operationplan' in tables: tables.add('operationplanmaterial') tables.add('operationplanresource') tables.add('out_problem') if 'resource' in tables and 'out_resourceplan' not in tables: tables.add('out_resourceplan') if 'demand' in tables and 'out_constraint' not in tables: tables.add('out_constraint') tables.discard('auth_group_permissions') tables.discard('auth_permission') tables.discard('auth_group') tables.discard('django_session') tables.discard('common_user') tables.discard('common_user_groups') tables.discard('common_user_user_permissions') tables.discard('common_preference') tables.discard('django_content_type') tables.discard('execute_log') tables.discard('common_scenario') # Delete all records from the tables. with transaction.atomic(using=database, savepoint=False): if ContentTypekeys: if admin_log_positive: cursor.execute( "delete from django_admin_log where content_type_id = any(%s)", ( list(ContentTypekeys), ) ) else: cursor.execute( "delete from django_admin_log where content_type_id != any(%s)", ( list(ContentTypekeys), ) ) if "common_bucket" in tables: cursor.execute('update common_user set horizonbuckets = null') for stmt in connections[database].ops.sql_flush(no_style(), tables, []): cursor.execute(stmt) if models: if 'input.purchaseorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'PO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'PO' ) ''') cursor.execute("delete from operationplan where type = 'PO'") key = ContentType.objects.get_for_model(inputmodels.PurchaseOrder, for_concrete_model=False).pk cursor.execute("delete from django_admin_log where content_type_id = %s", (key,)) if 'input.distributionorder' in models: cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'DO' ) ''') cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'DO' ) ''') cursor.execute("delete from operationplan where type = 'DO'") key = ContentType.objects.get_for_model(inputmodels.DistributionOrder, for_concrete_model=False).pk cursor.execute("delete from django_admin_log where content_type_id = %s", (key,)) if 'input.manufacturingorder' in models: cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'MO' ) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'MO' ) ''') cursor.execute("delete from operationplan where type = 'MO'") key = ContentType.objects.get_for_model(inputmodels.ManufacturingOrder, for_concrete_model=False).pk cursor.execute("delete from django_admin_log where content_type_id = %s", (key,)) if 'input.deliveryorder' in models: cursor.execute(''' delete from operationplanmaterial where operationplan_id in ( select operationplan.reference from operationplan where type = 'DLVR' ) ''') cursor.execute(''' delete from operationplanresource where operationplan_id in ( select operationplan.reference from operationplan where type = 'DLVR' ) ''') cursor.execute("delete from operationplan where type = 'DLVR'") key = ContentType.objects.get_for_model(inputmodels.DeliveryOrder, for_concrete_model=False).pk cursor.execute("delete from django_admin_log where content_type_id = %s", (key,)) # Keep the database in shape cursor.execute("vacuum analyze") # Task update task.status = 'Done' task.finished = datetime.now() task.processid = None task.save(using=database) except Exception as e: if task: task.status = 'Failed' task.message = '%s' % e task.finished = datetime.now() task.processid = None task.save(using=database) raise CommandError('%s' % e)