Beispiel #1
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up the options
    if 'verbosity' in options:
      verbosity = int(options['verbosity'])
    else:
      verbosity = 1
    if 'cluster' in options:
      cluster = int(options['cluster'])
    else:
      cluster = 100
    if 'demand' in options:
      demand = int(options['demand'])
    else:
      demand = 30
    if 'forecast_per_item' in options:
      forecast_per_item = int(options['forecast_per_item'])
    else:
      forecast_per_item = 50
    if 'level' in options:
      level = int(options['level'])
    else:
      level = 5
    if 'resource' in options:
      resource = int(options['resource'])
    else:
      resource = 60
    if 'resource_size' in options:
      resource_size = int(options['resource_size'])
    else:
      resource_size = 5
    if 'components' in options:
      components = int(options['components'])
    else:
      components = 200
    if 'components_per' in options:
      components_per = int(options['components_per'])
    else:
      components_per = 5
    if components == 0:
      components_per = 0
    if 'deliver_lt' in options:
      deliver_lt = int(options['deliver_lt'])
    else:
      deliver_lt = 30
    if 'procure_lt' in options:
      procure_lt = int(options['procure_lt'])
    else:
      procure_lt = 40
    if 'currentdate' in options:
      currentdate = options['currentdate'] or datetime.strftime(date.today(), '%Y-%m-%d')
    else:
      currentdate = datetime.strftime(date.today(), '%Y-%m-%d')
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    random.seed(100)  # Initialize random seed to get reproducible results

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate model':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate model', submitted=now, started=now, status='0%', user=user)
      task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
        "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
          cluster, demand, forecast_per_item, level, resource,
          resource_size, components, components_per, deliver_lt, procure_lt
        )
      task.save(using=database)
      transaction.commit(using=database)

      # Pick up the startdate
      try:
        startdate = datetime.strptime(currentdate, '%Y-%m-%d')
      except:
        raise CommandError("current date is not matching format YYYY-MM-DD")

      # Check whether the database is empty
      if Buffer.objects.using(database).count() > 0 or Item.objects.using(database).count() > 0:
        raise CommandError("Database must be empty before creating a model")

      # Plan start date
      if verbosity > 0:
        print("Updating current date...")
      param = Parameter.objects.using(database).create(name="currentdate")
      param.value = datetime.strftime(startdate, "%Y-%m-%d %H:%M:%S")
      param.save(using=database)

      # Planning horizon
      # minimum 10 daily buckets, weekly buckets till 40 days after current
      if verbosity > 0:
        print("Updating buckets...")
      management.call_command('frepple_createbuckets', user=user, database=database)
      if verbosity > 0:
        print("Updating horizon telescope...")
      updateTelescope(10, 40, 730, database)
      task.status = '2%'
      task.save(using=database)

      # Weeks calendar
      if verbosity > 0:
        print("Creating weeks calendar...")
      with transaction.atomic(using=database):
        weeks = Calendar.objects.using(database).create(name="Weeks", defaultvalue=0)
        for i in BucketDetail.objects.using(database).filter(bucket="week").all():
          CalendarBucket(
            startdate=i.startdate, enddate=i.enddate, value=1, calendar=weeks
            ).save(using=database)
        task.status = '4%'
        task.save(using=database)

      # Working days calendar
      if verbosity > 0:
        print("Creating working days...")
      with transaction.atomic(using=database):
        workingdays = Calendar.objects.using(database).create(name="Working Days", defaultvalue=0)
        minmax = BucketDetail.objects.using(database).filter(bucket="week").aggregate(Min('startdate'), Max('startdate'))
        CalendarBucket(
          startdate=minmax['startdate__min'], enddate=minmax['startdate__max'],
          value=1, calendar=workingdays, priority=1, saturday=False, sunday=False
          ).save(using=database)
        task.status = '6%'
        task.save(using=database)

      # Create a random list of categories to choose from
      categories = [
        'cat A', 'cat B', 'cat C', 'cat D', 'cat E', 'cat F', 'cat G'
        ]

      # Create customers
      if verbosity > 0:
        print("Creating customers...")
      with transaction.atomic(using=database):
        cust = []
        for i in range(100):
          c = Customer.objects.using(database).create(name='Cust %03d' % i)
          cust.append(c)
        task.status = '8%'
        task.save(using=database)

      # Create resources and their calendars
      if verbosity > 0:
        print("Creating resources and calendars...")
      with transaction.atomic(using=database):
        res = []
        for i in range(resource):
          loc = Location(name='Loc %05d' % int(random.uniform(1, cluster)))
          loc.save(using=database)
          cal = Calendar(name='capacity for res %03d' % i, category='capacity', defaultvalue=0)
          bkt = CalendarBucket(startdate=startdate, value=resource_size, calendar=cal)
          cal.save(using=database)
          bkt.save(using=database)
          r = Resource.objects.using(database).create(
            name='Res %03d' % i, maximum_calendar=cal, location=loc
            )
          res.append(r)
        task.status = '10%'
        task.save(using=database)
        random.shuffle(res)

      # Create the components
      if verbosity > 0:
        print("Creating raw materials...")
      with transaction.atomic(using=database):
        comps = []
        comploc = Location.objects.using(database).create(name='Procured materials')
        for i in range(components):
          it = Item.objects.using(database).create(
            name='Component %04d' % i,
            category='Procured',
            price=str(round(random.uniform(0, 100)))
            )
          ld = abs(round(random.normalvariate(procure_lt, procure_lt / 3)))
          c = Buffer.objects.using(database).create(
            name='Component %04d' % i,
            location=comploc,
            category='Procured',
            item=it,
            type='procure',
            min_inventory=20,
            max_inventory=100,
            size_multiple=10,
            leadtime=str(ld * 86400),
            onhand=str(round(forecast_per_item * random.uniform(1, 3) * ld / 30)),
            )
          comps.append(c)
        task.status = '12%'
        task.save(using=database)

      # Loop over all clusters
      durations = [ 86400, 86400 * 2, 86400 * 3, 86400 * 5, 86400 * 6 ]
      progress = 88.0 / cluster
      for i in range(cluster):
        with transaction.atomic(using=database):
          if verbosity > 0:
            print("Creating supply chain for end item %d..." % i)

          # location
          loc = Location.objects.using(database).get_or_create(name='Loc %05d' % i)[0]
          loc.available = workingdays
          loc.save(using=database)

          # Item and delivery operation
          oper = Operation.objects.using(database).create(name='Del %05d' % i, sizemultiple=1, location=loc)
          it = Item.objects.using(database).create(
            name='Itm %05d' % i,
            operation=oper,
            category=random.choice(categories),
            price=str(round(random.uniform(100, 200)))
            )

          # Level 0 buffer
          buf = Buffer.objects.using(database).create(
            name='Buf %05d L00' % i,
            item=it,
            location=loc,
            category='00'
            )
          Flow.objects.using(database).create(operation=oper, thebuffer=buf, quantity=-1)

          # Demand
          for j in range(demand):
            Demand.objects.using(database).create(
              name='Dmd %05d %05d' % (i, j),
              item=it,
              quantity=int(random.uniform(1, 6)),
              # Exponential distribution of due dates, with an average of deliver_lt days.
              due=startdate + timedelta(days=round(random.expovariate(float(1) / deliver_lt / 24)) / 24),
              # Orders have higher priority than forecast
              priority=random.choice([1, 2]),
              customer=random.choice(cust),
              category=random.choice(categories)
              )

          # Create upstream operations and buffers
          ops = []
          for k in range(level):
            if k == 1 and res:
              # Create a resource load for operations on level 1
              oper = Operation.objects.using(database).create(
                name='Oper %05d L%02d' % (i, k),
                type='time_per',
                location=loc,
                duration_per=86400,
                sizemultiple=1,
                )
              if resource < cluster and i < resource:
                # When there are more cluster than resources, we try to assure
                # that each resource is loaded by at least 1 operation.
                Load.objects.using(database).create(resource=res[i], operation=oper)
              else:
                Load.objects.using(database).create(resource=random.choice(res), operation=oper)
            else:
              oper = Operation.objects.using(database).create(
                name='Oper %05d L%02d' % (i, k),
                duration=random.choice(durations),
                sizemultiple=1,
                location=loc,
                )
            ops.append(oper)
            buf.producing = oper
            # Some inventory in random buffers
            if random.uniform(0, 1) > 0.8:
              buf.onhand = int(random.uniform(5, 20))
            buf.save(using=database)
            Flow(operation=oper, thebuffer=buf, quantity=1, type="end").save(using=database)
            if k != level - 1:
              # Consume from the next level in the bill of material
              buf = Buffer.objects.using(database).create(
                name='Buf %05d L%02d' % (i, k + 1),
                item=it,
                location=loc,
                category='%02d' % (k + 1)
                )
              Flow.objects.using(database).create(operation=oper, thebuffer=buf, quantity=-1)

          # Consume raw materials / components
          c = []
          for j in range(components_per):
            o = random.choice(ops)
            b = random.choice(comps)
            while (o, b) in c:
              # A flow with the same operation and buffer already exists
              o = random.choice(ops)
              b = random.choice(comps)
            c.append( (o, b) )
            Flow.objects.using(database).create(
              operation=o, thebuffer=b,
              quantity=random.choice([-1, -1, -1, -2, -3])
              )

          # Commit the current cluster
          task.status = '%d%%' % (12 + progress * (i + 1))
          task.save(using=database)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.save(using=database)
      raise e

    finally:
      if task:
        task.save(using=database)
      settings.DEBUG = tmp_debug
Beispiel #2
0
    def handle(self, *args, **options):
        # Pick up the options
        if "database" in options:
            self.database = options["database"] or DEFAULT_DB_ALIAS
        else:
            self.database = DEFAULT_DB_ALIAS
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" % self.database)

        if "user" in options and options["user"]:
            try:
                self.user = User.objects.all().using(self.database).get(username=options["user"])
            except:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            self.user = None

        now = datetime.now()

        task = None
        self.logfile = None
        errors = 0
        try:
            # Initialize the task
            if "task" in options and options["task"]:
                try:
                    task = Task.objects.all().using(self.database).get(pk=options["task"])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != "export to folder":
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(name="export to folder", submitted=now, started=now, status="0%", user=self.user)
            task.arguments = " ".join(['"%s"' % i for i in args])
            task.save(using=self.database)

            # Execute
            if os.path.isdir(settings.DATABASES[self.database]["FILEUPLOADFOLDER"]):

                # Open the logfile
                self.logfile = open(
                    os.path.join(settings.DATABASES[self.database]["FILEUPLOADFOLDER"], "exporttofolder.log"), "a"
                )
                print("%s Started export to folder\n" % datetime.now(), file=self.logfile)

                # Define our connection string
                conn_string = (
                    "host='localhost' dbname='"
                    + settings.DATABASES[self.database]["NAME"]
                    + "' user='******' password='******'"
                )

                conn = psycopg2.connect(conn_string)

                cursor = conn.cursor()

                task.status = "0%"
                task.save(using=self.database)

                i = 0
                cnt = len(self.statements)

                for filename, sqlquery in self.statements:
                    print("%s Started export of %s" % (datetime.now(), filename), file=self.logfile)

                    try:
                        csv_datafile = open(
                            os.path.join(settings.DATABASES[self.database]["FILEUPLOADFOLDER"], filename), "w"
                        )

                        cursor.copy_expert(sqlquery, csv_datafile)

                        csv_datafile.close()
                        i += 1

                    except Exception as e:
                        errors += 1
                        print("%s Failed to export to %s" % (datetime.now(), filename), file=self.logfile)
                        if task:
                            task.message = "%s" % e
                        conn = psycopg2.connect(conn_string)
                        cursor = conn.cursor()

                    task.status = str(int(i / cnt * 100)) + "%"
                    task.save(using=self.database)

                conn.close()
                print("%s Exported %s file(s)\n" % (datetime.now(), cnt - errors), file=self.logfile)

            else:
                errors += 1
                print("%s Failed, folder does not exist" % datetime.now(), file=self.logfile)
                task.message = "Destination folder does not exist"
                task.save(using=self.database)

        except Exception as e:
            print("%s Failed" % datetime.now(), file=self.logfile)
            errors += 1
            if task:
                task.message = "%s" % e

        finally:
            if task:
                if not errors:
                    task.status = "100%"
                    task.message = "Exported %s data files" % (cnt)
                else:
                    task.status = "Failed"
                    # task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
                task.finished = datetime.now()
                task.save(using=self.database)

            if self.logfile:
                print("%s End of export to folder\n" % datetime.now(), file=self.logfile)
                self.logfile.close()
Beispiel #3
0
    def handle(self, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up options
        force = options["force"]
        promote = options["promote"]
        test = "FREPPLE_TEST" in os.environ
        if options["user"]:
            try:
                user = User.objects.all().get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        # Synchronize the scenario table with the settings
        Scenario.syncWithSettings()

        # Initialize the task
        source = options["source"]
        try:
            sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(
                pk=source)
        except Exception:
            raise CommandError("No source database defined with name '%s'" %
                               source)
        now = datetime.now()
        task = None
        if "task" in options and options["task"]:
            try:
                task = Task.objects.all().using(source).get(pk=options["task"])
            except Exception:
                raise CommandError("Task identifier not found")
            if (task.started or task.finished or task.status != "Waiting"
                    or task.name not in ("frepple_copy", "scenario_copy")):
                raise CommandError("Invalid task identifier")
            task.status = "0%"
            task.started = now
        else:
            task = Task(name="scenario_copy",
                        submitted=now,
                        started=now,
                        status="0%",
                        user=user)
        task.processid = os.getpid()
        task.save(using=source)

        # Validate the arguments
        destination = options["destination"]
        destinationscenario = None
        try:
            task.arguments = "%s %s" % (source, destination)
            if options["description"]:
                task.arguments += '--description="%s"' % options[
                    "description"].replace('"', '\\"')
            if force:
                task.arguments += " --force"
            task.save(using=source)
            try:
                destinationscenario = Scenario.objects.using(
                    DEFAULT_DB_ALIAS).get(pk=destination)
            except Exception:
                raise CommandError(
                    "No destination database defined with name '%s'" %
                    destination)
            if source == destination:
                raise CommandError("Can't copy a schema on itself")
            if sourcescenario.status != "In use":
                raise CommandError("Source scenario is not in use")
            if destinationscenario.status != "Free" and not force and not promote:
                raise CommandError("Destination scenario is not free")
            if promote and (destination != DEFAULT_DB_ALIAS
                            or source == DEFAULT_DB_ALIAS):
                raise CommandError(
                    "Incorrect source or destination database with promote flag"
                )

            # Logging message - always logging in the default database
            destinationscenario.status = "Busy"
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Copying the data
            # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
            if settings.DATABASES[source]["PASSWORD"]:
                os.environ["PGPASSWORD"] = settings.DATABASES[source][
                    "PASSWORD"]
            if os.name == "nt":
                # On windows restoring with pg_restore over a pipe is broken :-(
                cmd = "pg_dump -c -Fp %s%s%s%s%s | psql %s%s%s%s"
            else:
                cmd = "pg_dump -Fc %s%s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s"
            commandline = cmd % (
                settings.DATABASES[source]["USER"] and
                ("-U %s " % settings.DATABASES[source]["USER"]) or "",
                settings.DATABASES[source]["HOST"] and
                ("-h %s " % settings.DATABASES[source]["HOST"]) or "",
                settings.DATABASES[source]["PORT"] and
                ("-p %s " % settings.DATABASES[source]["PORT"]) or "",
                """
                -T common_user 
                -T common_scenario 
                -T auth_group 
                -T auth_group_permission 
                -T auth_permission 
                -T common_user_groups 
                -T common_user_user_permissions
                -T common_preferences
                -T reportmanager_report
                """ if destination == DEFAULT_DB_ALIAS else "",
                test and settings.DATABASES[source]["TEST"]["NAME"]
                or settings.DATABASES[source]["NAME"],
                settings.DATABASES[destination]["USER"] and
                ("-U %s " % settings.DATABASES[destination]["USER"]) or "",
                settings.DATABASES[destination]["HOST"] and
                ("-h %s " % settings.DATABASES[destination]["HOST"]) or "",
                settings.DATABASES[destination]["PORT"] and
                ("-p %s " % settings.DATABASES[destination]["PORT"]) or "",
                test and settings.DATABASES[destination]["TEST"]["NAME"]
                or settings.DATABASES[destination]["NAME"],
            )
            with subprocess.Popen(
                    commandline,
                    shell=True,
                    stdout=subprocess.DEVNULL,
                    stderr=subprocess.STDOUT,
            ) as p:
                try:
                    task.processid = p.pid
                    task.save(using=source)
                    p.wait()
                except Exception:
                    p.kill()
                    p.wait()
                    # Consider the destination database free again
                    destinationscenario.status = "Free"
                    destinationscenario.lastrefresh = datetime.today()
                    destinationscenario.save(using=DEFAULT_DB_ALIAS)
                    raise Exception("Database copy failed")

            # Update the scenario table
            destinationscenario.status = "In use"
            destinationscenario.lastrefresh = datetime.today()
            if options["description"]:
                destinationscenario.description = options["description"]
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Give access to the destination scenario to:
            #  a) the user doing the copy
            #  b) all superusers from the source schema
            # unless it's a promotion
            if destination != DEFAULT_DB_ALIAS:
                User.objects.using(destination).filter(
                    is_superuser=True).update(is_active=True)
                User.objects.using(destination).filter(
                    is_superuser=False).update(is_active=False)
                if user:
                    User.objects.using(destination).filter(
                        username=user.username).update(is_active=True)

            # Logging message
            task.processid = None
            task.status = "Done"
            task.finished = datetime.now()

            # Update the task in the destination database
            task.message = "Scenario %s from %s" % (
                "promoted" if promote else "copied",
                source,
            )
            task.save(using=destination)
            task.message = "Scenario copied to %s" % destination

            # Delete any waiting tasks in the new copy.
            # This is needed for situations where the same source is copied to
            # multiple destinations at the same moment.
            Task.objects.all().using(destination).filter(
                id__gt=task.id).delete()

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            if destinationscenario and destinationscenario.status == "Busy":
                destinationscenario.status = "Free"
                destinationscenario.save(using=DEFAULT_DB_ALIAS)
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=source)
            settings.DEBUG = tmp_debug
Beispiel #4
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    param = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'plan simulation':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='plan simulation', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'horizon' in options:
        horizon = int(options['horizon'])
        if horizon < 0:
          raise ValueError("Invalid horizon: %s" % options['horizon'])
        task.arguments = "--horizon=%d" % horizon
      else:
        horizon = 60
      if 'step' in options:
        step = int(options['step'])
        if step < 0:
          raise ValueError("Invalid step: %s" % options['step'])
        task.arguments = "--step=%d" % step
      else:
        step = 1
      if 'verbosity' in options:
        verbosity = int(options['verbosity'])
      else:
        verbosity = 0

      # Log task
      task.save(using=database)

      # Load the initial status
      if options.get('initial', None):
        if verbosity > 0:
          print("Erasing simulation database")
        management.call_command('frepple_flush', database=database, verbosity=verbosity)
        if verbosity > 0:
          print("Loading initial data")
        management.call_command('loaddata', options.get('initial'), database=database, verbosity=verbosity)

      # Get current date
      param = Parameter.objects.all().using(database).get_or_create(name='currentdate')[0]
      try:
        curdate = datetime.strptime(param.value, "%Y-%m-%d %H:%M:%S")
      except:
        curdate = datetime.now()
      curdate = curdate.date()

      # Compute how many simulation steps we need
      bckt_list = []
      tmp = 0
      while tmp <= horizon:
        bckt_list.append( curdate + timedelta(days=tmp) )
        tmp += step
      bckt_list_len = len(bckt_list)

      # Create the simulator class
      if options.get('simulator', None):
        cls = load_class(options['simulator'])
        simulator = cls(database=database, verbosity=verbosity)
      else:
        simulator = Simulator(database=database, verbosity=verbosity)
      simulator.buckets = 1

      # Loop over all dates in the simulation horizon
      idx = 0
      strt = None
      nd = None
      for bckt in bckt_list:
        if nd:
          strt = nd
          nd = bckt
        else:
          nd = bckt
          continue

        # Start message
        task.status = "%.0f%%" % (100.0 * idx / bckt_list_len)
        task.message = 'Simulating bucket from %s to %s ' % (strt, nd)
        task.save(using=database)
        idx += 1
        simulator.buckets += 1

        if verbosity > 0:
          print("\nStart simulating bucket from %s to %s (%s out of %s)" % (strt, nd, idx, bckt_list_len))

        # Update currentdate parameter
        param.value = strt.strftime("%Y-%m-%d %H:%M:%S")
        param.save(using=database)

        # Initialization of the bucket
        if verbosity > 1:
          print("  Starting the bucket")
        with transaction.atomic(using=database):
          simulator.start_bucket(strt, nd)

        # Generate new demand records
        if verbosity > 1:
          print("  Receive new orders from customers")
        with transaction.atomic(using=database):
          simulator.generate_customer_demand(strt, nd)

        # Generate the constrained plan
        if verbosity > 1:
          print("  Generating plan...")
        management.call_command('frepple_run', database=database)

        if options['pause']:
          print("\nYou can analyze the plan in the bucket in the user interface now...")
          input("\nPress Enter to continue the simulation...\n")

        # Release new purchase orders
        if verbosity > 1:
          print("  Create new purchase orders")
        with transaction.atomic(using=database):
          simulator.create_purchase_orders(strt, nd)

        # Release new manufacturing orders
        if verbosity > 1:
          print("  Create new manufacturing orders")
        with transaction.atomic(using=database):
          simulator.create_manufacturing_orders(strt, nd)

        # Release new distribution orders
        if verbosity > 1:
          print("  Create new distribution orders")
        with transaction.atomic(using=database):
          simulator.create_distribution_orders(strt, nd)

        # Receive open purchase orders
        if verbosity > 1:
          print("  Receive open purchase orders")
        with transaction.atomic(using=database):
          simulator.receive_purchase_orders(strt, nd)

        # Receive open distribution orders
        if verbosity > 1:
          print("  Receive open distribution orders")
        with transaction.atomic(using=database):
          simulator.receive_distribution_orders(strt, nd)

        # Finish open manufacturing orders
        if verbosity > 1:
          print("  Finish open manufacturing orders")
        with transaction.atomic(using=database):
          simulator.finish_manufacturing_orders(strt, nd)

        # Ship demand to customers
        if verbosity > 1:
          print("  Ship orders to customers")
        with transaction.atomic(using=database):
          simulator.ship_customer_demand(strt, nd)

        # Finish of the bucket
        if verbosity > 1:
          print("  Ending the bucket")
        with transaction.atomic(using=database):
          simulator.end_bucket(strt, nd)

      # Report statistics from the simulation.
      # The simulator class collected these results during its run.
      if verbosity > 1:
        print("Displaying final simulation metrics")
      with transaction.atomic(using=database):
        simulator.show_metrics()

      # Task update
      task.status = 'Done'
      task.message = "Simulated from %s till %s" % (bckt_list[0], bckt_list[-1])
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Final task status
      if task:
        task.save(using=database)
Beispiel #5
0
    def handle(self, *args, **options):

        # Pick up the options
        if 'database' in options:
            database = options['database'] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'restore database':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='restore database',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.arguments = args and args[0] or None
            task.save(using=database)

            # Validate options
            if not args:
                raise CommandError("No dump file specified")
            if not os.path.isfile(
                    os.path.join(settings.FREPPLE_LOGDIR, args[0])):
                raise CommandError("Dump file not found")

            # Run the restore command
            # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
            if settings.DATABASES[database]['PASSWORD']:
                os.environ['PGPASSWORD'] = settings.DATABASES[database][
                    'PASSWORD']
            cmd = [
                "psql",
            ]
            if settings.DATABASES[database]['USER']:
                cmd.append("--username=%s" %
                           settings.DATABASES[database]['USER'])
            if settings.DATABASES[database]['HOST']:
                cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
            if settings.DATABASES[database]['PORT']:
                cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
            cmd.append(settings.DATABASES[database]['NAME'])
            cmd.append('<%s' % os.path.abspath(
                os.path.join(settings.FREPPLE_LOGDIR, args[0])))
            ret = subprocess.call(
                cmd, shell=True
            )  # Shell needs to be True in order to interpret the < character
            if ret:
                raise Exception("Run of run psql failed")

            # Task update
            # We need to recreate a new task record, since the previous one is lost during the restoration.
            task = Task(name='restore database',
                        submitted=task.submitted,
                        started=task.started,
                        arguments=task.arguments,
                        status='Done',
                        finished=datetime.now(),
                        user=task.user)

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            # Commit it all, even in case of exceptions
            if task:
                task.save(using=database)
Beispiel #6
0
    def handle(self, *args, **options):
        # Pick up the options
        now = datetime.now()
        self.database = options["database"]
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)

        if options["user"]:
            try:
                self.user = (User.objects.all().using(
                    self.database).get(username=options["user"]))
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            self.user = None

        timestamp = now.strftime("%Y%m%d%H%M%S")
        if self.database == DEFAULT_DB_ALIAS:
            logfile = "exporttofolder-%s.log" % timestamp
        else:
            logfile = "exporttofolder_%s-%s.log" % (self.database, timestamp)

        try:
            handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR,
                                                       logfile),
                                          encoding="utf-8")
            # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format']))
            logger.addHandler(handler)
            logger.propagate = False
        except Exception as e:
            print("%s Failed to open logfile %s: %s" %
                  (datetime.now().replace(microsecond=0), logfile, e))

        task = None
        errors = 0
        try:
            # Initialize the task
            setattr(_thread_locals, "database", self.database)
            if options["task"]:
                try:
                    task = (Task.objects.all().using(
                        self.database).get(pk=options["task"]))
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name
                        not in ("frepple_exporttofolder", "exporttofolder")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
                task.logfile = logfile
            else:
                task = Task(
                    name="exporttofolder",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=self.user,
                    logfile=logfile,
                )
            task.arguments = " ".join(['"%s"' % i for i in args])
            task.processid = os.getpid()
            task.save(using=self.database)

            # Execute
            if os.path.isdir(
                    settings.DATABASES[self.database]["FILEUPLOADFOLDER"]):
                if not os.path.isdir(
                        os.path.join(
                            settings.DATABASES[self.database]
                            ["FILEUPLOADFOLDER"], "export")):
                    try:
                        os.makedirs(
                            os.path.join(
                                settings.DATABASES[self.database]
                                ["FILEUPLOADFOLDER"],
                                "export",
                            ))
                    except OSError as exception:
                        if exception.errno != errno.EEXIST:
                            raise

                logger.info("%s Started export to folder" %
                            datetime.now().replace(microsecond=0))

                cursor = connections[self.database].cursor()

                task.status = "0%"
                task.save(using=self.database)

                i = 0
                cnt = len(self.statements)

                # Calling all the pre-sql statements
                idx = 1
                for stmt in self.pre_sql_statements:
                    try:
                        logger.info(
                            "%s Executing pre-statement %s" %
                            (datetime.now().replace(microsecond=0), idx))
                        cursor.execute(stmt)
                        if cursor.rowcount > 0:
                            logger.info("%s %s record(s) modified" % (
                                datetime.now().replace(microsecond=0),
                                cursor.rowcount,
                            ))
                    except Exception:
                        errors += 1
                        logger.error(
                            "%s An error occurred when executing statement %s"
                            % (datetime.now().replace(microsecond=0), idx))
                    idx += 1

                for cfg in self.statements:
                    # Validate filename
                    filename = cfg.get("filename", None)
                    if not filename:
                        raise Exception(
                            "Missing filename in export configuration")
                    folder = cfg.get("folder", None)
                    if not folder:
                        raise Exception(
                            "Missing folder in export configuration for %s" %
                            filename)

                    # Report progress
                    logger.info(
                        "%s Started export of %s" %
                        (datetime.now().replace(microsecond=0), filename))
                    if task:
                        task.message = "Exporting %s" % filename
                        task.save(using=self.database)

                    # Make sure export folder exists
                    exportFolder = os.path.join(
                        settings.DATABASES[self.database]["FILEUPLOADFOLDER"],
                        folder)
                    if not os.path.isdir(exportFolder):
                        os.makedirs(exportFolder)

                    try:
                        reportclass = cfg.get("report", None)
                        sql = cfg.get("sql", None)
                        if reportclass:
                            # Export from report class

                            # Create a dummy request
                            factory = RequestFactory()
                            request = factory.get("/dummy/",
                                                  cfg.get("data", {}))
                            if self.user:
                                request.user = self.user
                            else:
                                request.user = User.objects.all().get(
                                    username="******")
                            request.database = self.database
                            request.LANGUAGE_CODE = settings.LANGUAGE_CODE
                            request.prefs = cfg.get("prefs", None)

                            # Initialize the report
                            if hasattr(reportclass, "initialize"):
                                reportclass.initialize(request)
                            if hasattr(reportclass, "rows"):
                                if callable(reportclass.rows):
                                    request.rows = reportclass.rows(request)
                                else:
                                    request.rows = reportclass.rows
                            if hasattr(reportclass, "crosses"):
                                if callable(reportclass.crosses):
                                    request.crosses = reportclass.crosses(
                                        request)
                                else:
                                    request.crosses = reportclass.crosses
                            if reportclass.hasTimeBuckets:
                                reportclass.getBuckets(request)

                            # Write the report file
                            datafile = open(
                                os.path.join(exportFolder, filename), "wb")
                            if filename.endswith(".xlsx"):
                                reportclass._generate_spreadsheet_data(
                                    request, [request.database], datafile,
                                    **cfg.get("data", {}))
                            elif filename.endswith(".csv"):
                                for r in reportclass._generate_csv_data(
                                        request, [request.database],
                                        **cfg.get("data", {})):
                                    datafile.write(
                                        r.encode(settings.CSV_CHARSET)
                                        if isinstance(r, str) else r)
                            else:
                                raise Exception(
                                    "Unknown output format for %s" % filename)
                        elif sql:
                            # Exporting using SQL
                            if filename.lower().endswith(".gz"):
                                datafile = gzip.open(
                                    os.path.join(exportFolder, filename), "w")
                            else:
                                datafile = open(
                                    os.path.join(exportFolder, filename), "w")
                            cursor.copy_expert(sql, datafile)
                        else:
                            raise Exception("Unknown export type for %s" %
                                            filename)
                        datafile.close()
                        i += 1

                    except Exception as e:
                        errors += 1
                        logger.error("%s Failed to export to %s: %s" %
                                     (datetime.now().replace(microsecond=0),
                                      filename, e))
                        if task:
                            task.message = "Failed to export %s" % filename

                    task.status = str(int(i / cnt * 100)) + "%"
                    task.save(using=self.database)

                logger.info(
                    "%s Exported %s file(s)" %
                    (datetime.now().replace(microsecond=0), cnt - errors))

                idx = 1
                for stmt in self.post_sql_statements:
                    try:
                        logger.info(
                            "%s Executing post-statement %s" %
                            (datetime.now().replace(microsecond=0), idx))
                        cursor.execute(stmt)
                        if cursor.rowcount > 0:
                            logger.info("%s %s record(s) modified" % (
                                datetime.now().replace(microsecond=0),
                                cursor.rowcount,
                            ))
                    except Exception:
                        errors += 1
                        logger.error(
                            "%s An error occured when executing statement %s" %
                            (datetime.now().replace(microsecond=0), idx))
                    idx += 1

            else:
                errors += 1
                logger.error("%s Failed, folder does not exist" %
                             datetime.now().replace(microsecond=0))
                task.message = "Destination folder does not exist"
                task.save(using=self.database)

        except Exception as e:
            logger.error("%s Failed to export: %s" %
                         (datetime.now().replace(microsecond=0), e))
            errors += 1
            if task:
                task.message = "Failed to export"

        finally:
            logger.info("%s End of export to folder\n" %
                        datetime.now().replace(microsecond=0))
            if task:
                if not errors:
                    task.status = "100%"
                    task.message = "Exported %s data files" % (cnt)
                else:
                    task.status = "Failed"
                    #  task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
                task.finished = datetime.now()
                task.processid = None
                task.save(using=self.database)
            setattr(_thread_locals, "database", None)
Beispiel #7
0
  def handle(self, **options):

    # Pick up the options
    database = options['database']
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_restore', 'restore'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='restore', submitted=now, started=now, status='0%', user=user)
      task.arguments = options['dump']
      task.processid = os.getpid()
      task.save(using=database)

      # Validate options
      dumpfile = os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, options['dump']))
      if not os.path.isfile(dumpfile):
        raise CommandError("Dump file not found")

      # Run the restore command
      # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
      if settings.DATABASES[database]['PASSWORD']:
        os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
      cmd = [ "pg_restore", "-n", "public", "-Fc", "-c", "--if-exists" ]
      if settings.DATABASES[database]['USER']:
        cmd.append("--username=%s" % settings.DATABASES[database]['USER'])
      if settings.DATABASES[database]['HOST']:
        cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
      if settings.DATABASES[database]['PORT']:
        cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
      cmd.append("-d")
      cmd.append(settings.DATABASES[database]['NAME'])
      cmd.append('<%s' % dumpfile)
      # Shell needs to be True in order to interpret the < character
      with subprocess.Popen(cmd, shell=True) as p:
        try:
          task.processid = p.pid
          task.save(using=database)
          p.wait()
        except:
          p.kill()
          p.wait()
          raise Exception("Database restoration failed")

      # Task update
      # We need to recreate a new task record, since the previous one is lost during the restoration.
      task = Task(
        name='restore', submitted=task.submitted, started=task.started,
        arguments=task.arguments, status='Done', finished=datetime.now(),
        user=task.user
        )

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Commit it all, even in case of exceptions
      if task:
        task.processid = None
        task.save(using=database)
    def handle(self, *args, **options):
        # Pick up the options
        if 'database' in options:
            self.database = options['database'] or DEFAULT_DB_ALIAS
        else:
            self.database = DEFAULT_DB_ALIAS
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)

        if 'user' in options and options['user']:
            try:
                self.user = User.objects.all().using(
                    self.database).get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            self.user = None

        now = datetime.now()

        task = None
        self.logfile = None
        errors = 0
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'export to folder':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='export to folder',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=self.user)
            task.arguments = ' '.join(['"%s"' % i for i in args])
            task.save(using=self.database)

            # Execute
            if os.path.isdir(
                    settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

                # Open the logfile
                self.logfile = open(
                    os.path.join(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER'],
                        'exporttofolder.log'), "a")
                print("%s Started export to folder\n" % datetime.now(),
                      file=self.logfile)

                #Define our connection string
                conn_string = "host='localhost' dbname='" + settings.DATABASES[
                    self.database]['NAME'] + "' user='******'USER'] + "' password='******'PASSWORD'] + "'"

                conn = psycopg2.connect(conn_string)

                cursor = conn.cursor()

                task.status = '0%'
                task.save(using=self.database)

                i = 0
                cnt = len(self.statements)

                for filename, sqlquery in self.statements:
                    print("%s Started export of %s" %
                          (datetime.now(), filename),
                          file=self.logfile)

                    try:
                        csv_datafile = open(
                            os.path.join(
                                settings.DATABASES[self.database]
                                ['FILEUPLOADFOLDER'], filename), "w")

                        cursor.copy_expert(sqlquery, csv_datafile)

                        csv_datafile.close()
                        i += 1

                    except Exception as e:
                        errors += 1
                        print("%s Failed to export to %s" %
                              (datetime.now(), filename),
                              file=self.logfile)
                        if task:
                            task.message = '%s' % e
                        conn = psycopg2.connect(conn_string)
                        cursor = conn.cursor()

                    task.status = str(int(i / cnt * 100)) + '%'
                    task.save(using=self.database)

                conn.close()
                print("%s Exported %s file(s)\n" %
                      (datetime.now(), cnt - errors),
                      file=self.logfile)

            else:
                errors += 1
                print("%s Failed, folder does not exist" % datetime.now(),
                      file=self.logfile)
                task.message = "Destination folder does not exist"
                task.save(using=self.database)

        except Exception as e:
            print("%s Failed" % datetime.now(), file=self.logfile)
            errors += 1
            if task:
                task.message = '%s' % e

        finally:
            if task:
                if not errors:
                    task.status = '100%'
                    task.message = "Exported %s data files" % (cnt)
                else:
                    task.status = 'Failed'
                    #task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
                task.finished = datetime.now()
                task.save(using=self.database)

            if self.logfile:
                print('%s End of export to folder\n' % datetime.now(),
                      file=self.logfile)
                self.logfile.close()
Beispiel #9
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    force = options['force']
    test = 'FREPPLE_TEST' in os.environ
    if options['user']:
      try:
        user = User.objects.all().get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    # Synchronize the scenario table with the settings
    Scenario.syncWithSettings()

    # Initialize the task
    source = options['source']
    try:
      sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=source)
    except:
      raise CommandError("No source database defined with name '%s'" % source)
    now = datetime.now()
    task = None
    if 'task' in options and options['task']:
      try:
        task = Task.objects.all().using(source).get(pk=options['task'])
      except:
        raise CommandError("Task identifier not found")
      if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_copy', 'scenario_copy'):
        raise CommandError("Invalid task identifier")
      task.status = '0%'
      task.started = now
    else:
      task = Task(name='scenario_copy', submitted=now, started=now, status='0%', user=user)
    task.processid = os.getpid()
    task.save(using=source)

    # Validate the arguments
    destination = options['destination']
    destinationscenario = None
    try:
      task.arguments = "%s %s" % (source, destination)
      if options['description']:
        task.arguments += '--description="%s"' % options['description'].replace('"', '\\"')
      if force:
        task.arguments += " --force"
      task.save(using=source)
      try:
        destinationscenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=destination)
      except:
        raise CommandError("No destination database defined with name '%s'" % destination)
      if source == destination:
        raise CommandError("Can't copy a schema on itself")
      if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
        raise CommandError("Source and destination scenarios have a different engine")
      if sourcescenario.status != 'In use':
        raise CommandError("Source scenario is not in use")
      if destinationscenario.status != 'Free' and not force:
        raise CommandError("Destination scenario is not free")

      # Logging message - always logging in the default database
      destinationscenario.status = 'Busy'
      destinationscenario.save(using=DEFAULT_DB_ALIAS)

      # Copying the data
      # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
      if settings.DATABASES[source]['PASSWORD']:
        os.environ['PGPASSWORD'] = settings.DATABASES[source]['PASSWORD']
      if os.name == 'nt':
        # On windows restoring with pg_restore over a pipe is broken :-(
        cmd = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s"
      else:
        cmd = "pg_dump -Fc %s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s"
      commandline = cmd % (
        settings.DATABASES[source]['USER'] and ("-U %s " % settings.DATABASES[source]['USER']) or '',
        settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
        settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
        test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'],
        settings.DATABASES[destination]['USER'] and ("-U %s " % settings.DATABASES[destination]['USER']) or '',
        settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
        settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
        test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'],
        )
      with subprocess.Popen(commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) as p:
        try:
          task.processid = p.pid
          task.save(using=source)
          p.wait()
        except:
          p.kill()
          p.wait()
          # Consider the destination database free again
          destinationscenario.status = 'Free'
          destinationscenario.lastrefresh = datetime.today()
          destinationscenario.save(using=DEFAULT_DB_ALIAS)
          raise Exception("Database copy failed")

      # Update the scenario table
      destinationscenario.status = 'In use'
      destinationscenario.lastrefresh = datetime.today()
      if 'description' in options:
        destinationscenario.description = options['description']
      destinationscenario.save(using=DEFAULT_DB_ALIAS)

      # Give access to the destination scenario to:
      #  a) the user doing the copy
      #  b) all superusers from the source schema
      User.objects.using(destination).filter(is_superuser=True).update(is_active=True)
      User.objects.using(destination).filter(is_superuser=False).update(is_active=False)
      if user:
        User.objects.using(destination).filter(username=user.username).update(is_active=True)

      # Logging message
      task.processid = None
      task.status = 'Done'
      task.finished = datetime.now()

      # Update the task in the destination database
      task.message = "Scenario copied from %s" % source
      task.save(using=destination)
      task.message = "Scenario copied to %s" % destination

      # Delete any waiting tasks in the new copy.
      # This is needed for situations where the same source is copied to
      # multiple destinations at the same moment.
      Task.objects.all().using(destination).filter(id__gt=task.id).delete()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      if destinationscenario and destinationscenario.status == 'Busy':
        destinationscenario.status = 'Free'
        destinationscenario.save(using=DEFAULT_DB_ALIAS)
      raise e

    finally:
      if task:
        task.processid = None
        task.save(using=source)
      settings.DEBUG = tmp_debug
Beispiel #10
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None
    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'importworkbook-%s.log' % timestamp
    else:
      logfile = 'importworkbook_%s-%s.log' % (self.database, timestamp)

    task = None
    try:
      setattr(_thread_locals, 'database', self.database)
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_importworkbook', 'importworkbook'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='importworkbook', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(options['file'])
      task.save(using=self.database)

      all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
      try:
        with transaction.atomic(using=self.database):
          # Find all models in the workbook
          for file in filename:
            wb = load_workbook(filename=file, read_only=True, data_only=True)
            models = []
            for ws_name in wb.sheetnames:
              # Find the model
              model = None
              contenttype_id = None
              for m, ct in all_models:
                if matchesModelName(ws_name, m):
                  model = m
                  contenttype_id = ct
                  break
              if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                print(force_text(_("Ignoring data in worksheet: %s") % ws_name))
                # yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>'
              elif not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
                # Check permissions
                print(force_text(_("You don't permissions to add: %s") % ws_name))
                # yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>'
              else:
                deps = set([model])
                GridReport.dependent_models(model, deps)
                models.append( (ws_name, model, contenttype_id, deps) )

            # Sort the list of models, based on dependencies between models
            models = GridReport.sort_models(models)
            print('197----', models)
            # Process all rows in each worksheet
            for ws_name, model, contenttype_id, dependencies in models:
              print(force_text(_("Processing data in worksheet: %s") % ws_name))
              # yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong><br>'
              # yield ('<div class="table-responsive">'
                     # '<table class="table table-condensed" style="white-space: nowrap;"><tbody>')
              numerrors = 0
              numwarnings = 0
              firsterror = True
              ws = wb[ws_name]
              for error in parseExcelWorksheet(model, ws, user=self.user, database=self.database, ping=True):
                if error[0] == DEBUG:
                  # Yield some result so we can detect disconnect clients and interrupt the upload
                  # yield ' '
                  continue
                if firsterror and error[0] in (ERROR, WARNING):
                  print('%s %s %s %s %s%s%s' % (
                    capfirst(_("worksheet")), capfirst(_("row")),
                    capfirst(_("field")), capfirst(_("value")),
                    capfirst(_("error")), " / ", capfirst(_("warning"))
                    ))
                  # yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % (
                  #   capfirst(_("worksheet")), capfirst(_("row")),
                  #   capfirst(_("field")), capfirst(_("value")),
                  #   capfirst(_("error")), " / ", capfirst(_("warning"))
                  #   )
                  firsterror = False
                if error[0] == ERROR:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('error')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('error')),
                  #   error[4]
                  #   )
                  numerrors += 1
                elif error[1] == WARNING:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('warning')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('warning')),
                  #   error[4]
                  #   )
                  numwarnings += 1
                else:
                  print('%s %s %s %s %s %s' % (
                    "danger" if numerrors > 0 else 'success',
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    error[4]
                    ))
              #     yield '<tr class=%s><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % (
              #       "danger" if numerrors > 0 else 'success',
              #       ws_name,
              #       error[1] if error[1] else '',
              #       error[2] if error[2] else '',
              #       error[3] if error[3] else '',
              #       error[4]
              #       )
              # yield '</tbody></table></div>'
            print('%s' % _("Done"))
            # yield '<div><strong>%s</strong></div>' % _("Done")
      except GeneratorExit:
        logger.warning('Connection Aborted')
    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      setattr(_thread_locals, 'database', None)
      if task:
        task.save(using=self.database)

    return _("Done")
Beispiel #11
0
def LaunchTask(request, action):
  # Allow only post
  if request.method != 'POST':
    raise Http404('Only post requests allowed')

  # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
  worker_database = request.database
  try:
    now = datetime.now()
    # A
    if action == 'generate plan':
      constraint = 0
      for value in request.POST.getlist('constraint'):
        try: constraint += int(value)
        except: pass
      task = Task(name='generate plan', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--constraint=%s --plantype=%s" % (constraint, request.POST.get('plantype'))
      task.save(using=request.database)
      # Update the session object   TODO REPLACE WITH PREFERENCE INFO
      request.session['plantype'] = request.POST.get('plantype')
      request.session['constraint'] = constraint
    # B
    elif action == 'generate model':
      task = Task(name='generate model', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
        "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
        request.POST['clusters'], request.POST['demands'], request.POST['fcst'], request.POST['levels'],
        request.POST['rsrc_number'], request.POST['rsrc_size'], request.POST['components'],
        request.POST['components_per'], request.POST['deliver_lt'], request.POST['procure_lt']
        )
      task.save(using=request.database)
    # C
    elif action == 'empty database':
      task = Task(name='empty database', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    # D
    elif action == 'load dataset':
      task = Task(name='load dataset', submitted=now, status='Waiting', user=request.user, arguments=request.POST['datafile'])
      task.save(using=request.database)
    # E
    elif action == 'manage scenarios':
      worker_database = DEFAULT_DB_ALIAS
      if 'copy' in request.POST:
        source = request.POST.get('source', DEFAULT_DB_ALIAS)
        for sc in Scenario.objects.all():
          if request.POST.get(sc.name,'off') == 'on' and sc.status == u'Free':
            task = Task(name='copy scenario', submitted=now, status='Waiting', user=request.user, arguments="%s %s" % (source, sc.name))
            task.save()
      elif 'release' in request.POST:
        # Note: release is immediate and synchronous.
        for sc in Scenario.objects.all():
          if request.POST.get(sc.name,'off') == u'on' and sc.status != u'Free':
            sc.status = u'Free'
            sc.lastrefresh = now
            sc.save()
            if request.database == sc.name:
              # Erasing the database that is currently selected.
              request.prefix = ''
      elif 'update' in request.POST:
        # Note: update is immediate and synchronous.
        for sc in Scenario.objects.all():
          if request.POST.get(sc.name, 'off') == 'on':
            sc.description = request.POST.get('description',None)
            sc.save()
      else:
        raise Http404('Invalid scenario task')
    # F
    elif action == 'backup database':
      task = Task(name='backup database', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    # G
    elif action == 'generate buckets':
      task = Task(name='generate buckets', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--start=%s --end=%s --weekstart=%s" % (
        request.POST['start'], request.POST['end'], request.POST['weekstart']
        )
      task.save(using=request.database)
    # H
    elif action == 'exportworkbook':
      return exportWorkbook(request)
    # I
    elif action == 'importworkbook':
      return importWorkbook(request)
    # J
    elif action == 'openbravo_import' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
      task = Task(name='Openbravo import', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--delta=%s" % request.POST['delta']
      task.save(using=request.database)
    # K
    elif action == 'openbravo_export' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
      task = Task(name='Openbravo export', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    # L
    elif action == 'openerp_import' and 'freppledb.openerp' in settings.INSTALLED_APPS:
      task = Task(name='OpenERP import', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--delta=%s" % request.POST['delta']
      task.save(using=request.database)
    # M
    elif action == 'openerp_export' and 'freppledb.openerp' in settings.INSTALLED_APPS:
      task = Task(name='OpenERP export', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    else:
      # Task not recognized
      raise Http404('Invalid launching task')

    # Launch a worker process
    if not checkActive(worker_database):
      if os.path.isfile(os.path.join(settings.FREPPLE_APP,"frepplectl.py")):
        # Development layout
        Popen([
          sys.executable, # Python executable
          os.path.join(settings.FREPPLE_APP,"frepplectl.py"),
          "frepple_runworker",
          "--database=%s" % worker_database
          ])
      elif sys.executable.find('freppleserver.exe') >= 0:
        # Py2exe executable
        Popen([
          sys.executable.replace('freppleserver.exe','frepplectl.exe'), # frepplectl executable
          "frepple_runworker",
          "--database=%s" % worker_database
          ], creationflags=0x08000000) # Do not create a console window
      else:
        # Linux standard installation
        Popen([
          "frepplectl",
          "frepple_runworker",
          "--database=%s" % worker_database
          ])

    # Task created successfully
    return HttpResponseRedirect('%s/execute/' % request.prefix)
  except Exception as e:
    messages.add_message(request, messages.ERROR,
        force_unicode(_('Failure launching action: %(msg)s') % {'msg':e}))
    return HttpResponseRedirect('%s/execute/' % request.prefix)
Beispiel #12
0
    def handle(self, *args, **options):

        # Pick up the options
        if 'database' in options:
            database = options['database'] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if not database in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        now = datetime.now()
        transaction.enter_transaction_management(using=database)
        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'restore database':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='restore database',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.arguments = args and args[0] or None
            task.save(using=database)
            transaction.commit(using=database)

            # Validate options
            if not args:
                raise CommandError("No dump file specified")
            if not os.path.isfile(
                    os.path.join(settings.FREPPLE_LOGDIR, args[0])):
                raise CommandError("Dump file not found")

            # Run the restore command
            if settings.DATABASES[database][
                    'ENGINE'] == 'django.db.backends.sqlite3':
                # SQLITE
                shutil.copy2(
                    os.path.abspath(
                        os.path.join(settings.FREPPLE_LOGDIR, args[0])),
                    settings.DATABASES[database]['NAME'])
            elif settings.DATABASES[database][
                    'ENGINE'] == 'django.db.backends.mysql':
                # MYSQL
                cmd = [
                    'mysql',
                    '--password=%s' % settings.DATABASES[database]['PASSWORD'],
                    '--user=%s' % settings.DATABASES[database]['USER']
                ]
                if settings.DATABASES[database]['HOST']:
                    cmd.append("--host=%s " %
                               settings.DATABASES[database]['HOST'])
                if settings.DATABASES[database]['PORT']:
                    cmd.append("--port=%s " %
                               settings.DATABASES[database]['PORT'])
                cmd.append(settings.DATABASES[database]['NAME'])
                cmd.append('<%s' % os.path.abspath(
                    os.path.join(settings.FREPPLE_LOGDIR, args[0])))
                ret = subprocess.call(
                    cmd, shell=True
                )  # Shell needs to be True in order to interpret the < character
                if ret: raise Exception("Run of mysql failed")
            elif settings.DATABASES[database][
                    'ENGINE'] == 'django.db.backends.oracle':
                # ORACLE
                if settings.DATABASES[database]['HOST'] and settings.DATABASES[
                        database]['PORT']:
                    # The setting 'NAME' contains the SID name
                    dsn = "%s/%s@//%s:%s/%s" % (
                        settings.DATABASES[database]['USER'],
                        settings.DATABASES[database]['PASSWORD'],
                        settings.DATABASES[database]['HOST'],
                        settings.DATABASES[database]['PORT'],
                        settings.DATABASES[database]['NAME'])
                else:
                    # The setting 'NAME' contains the TNS name
                    dsn = "%s/%s@%s" % (
                        settings.DATABASES[database]['USER'],
                        settings.DATABASES[database]['PASSWORD'],
                        settings.DATABASES[database]['NAME'])
                cmd = [
                    "impdp", dsn, "table_exists_action=replace", "nologfile=Y",
                    "directory=frepple_logdir",
                    "dumpfile=%s" % args[0]
                ]
                ret = subprocess.call(cmd)
                if ret: raise Exception("Run of impdp failed")
            elif settings.DATABASES[database][
                    'ENGINE'] == 'django.db.backends.postgresql_psycopg2':
                # POSTGRESQL
                cmd = [
                    "psql",
                    '--username=%s' % settings.DATABASES[database]['USER']
                ]
                if settings.DATABASES[database]['HOST']:
                    cmd.append("--host=%s" %
                               settings.DATABASES[database]['HOST'])
                if settings.DATABASES[database]['PORT']:
                    cmd.append("--port=%s " %
                               settings.DATABASES[database]['PORT'])
                cmd.append(settings.DATABASES[database]['NAME'])
                cmd.append('<%s' % os.path.abspath(
                    os.path.join(settings.FREPPLE_LOGDIR, args[0])))
                ret = subprocess.call(
                    cmd, shell=True
                )  # Shell needs to be True in order to interpret the < character
                if ret: raise Exception("Run of run psql failed")
            else:
                raise Exception(
                    'Database backup command not supported for engine %s' %
                    settings.DATABASES[database]['ENGINE'])

            # Task update
            # We need to recreate a new task record, since the previous one is lost during the restoration.
            task = Task(name='restore database',
                        submitted=task.submitted,
                        started=task.started,
                        arguments=task.arguments,
                        status='Done',
                        finished=datetime.now(),
                        user=task.user)

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            # Commit it all, even in case of exceptions
            if task: task.save(using=database)
            try:
                transaction.commit(using=database)
            except:
                pass
            transaction.leave_transaction_management(using=database)
  def handle(self, *args, **options):
    # Pick up the options
    if 'database' in options:
      self.database = options['database'] or DEFAULT_DB_ALIAS
    else:
      self.database = DEFAULT_DB_ALIAS
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if 'user' in options and options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    now = datetime.now()

    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'load from folder':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='load from folder', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.save(using=self.database)
      
      # Choose the right self.delimiter and language
      self.delimiter = get_format('DECIMAL_SEPARATOR', settings.LANGUAGE_CODE, True) == ',' and ';' or ','
      translation.activate(settings.LANGUAGE_CODE)
      
      # Execute        
      filestoupload = list()
      if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
        thisfolder = settings.DATABASES[self.database]['FILEUPLOADFOLDER']
        for fileindir in os.listdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
          if fileindir.endswith('.csv'):
            filestoupload.append(fileindir)
            #filestoupload.append([file,strftime("%Y-%m-%d %H:%M:%S",localtime(os.stat(os.path.join(thisfolder, file)).st_mtime)),sizeof_fmt(os.stat(os.path.join(thisfolder, file)).st_size, 'B')])
        
        all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
        models = []
        for ifile in filestoupload:
          
          filename0 = ifile.split('.')[0]
          
          model = None
          contenttype_id = None
          for m, ct in all_models:
            if filename0.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
              model = m
              contenttype_id = ct
              break
            
          if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
            print("Ignoring data in file: %s" % ifile)
          elif not self.user==None and not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
            # Check permissions
            print("You don't permissions to add: %s" % ifile)
          else:
            deps = set([model])
            GridReport.dependent_models(model, deps)
            models.append( (ifile, model, contenttype_id, deps) )

    # Sort the list of models, based on dependencies between models
        cnt = len(models)
        ok = False
        while not ok:
          ok = True
          for i in range(cnt):
            for j in range(i + 1, cnt):
              if models[i][1] in models[j][3]:
                # A subsequent model i depends on model i. The list ordering is
                # thus not ok yet. We move this element to the end of the list.
                models.append(models.pop(i))
                ok = False

        for ifile, model, contenttype_id, dependencies in models:
          
          print("Processing data in file: %s" % ifile)
          rownum = 0
          has_pk_field = False
          headers = []
          uploadform = None
          changed = 0
          added = 0
          numerrors = 0
          
          #Will the permissions have to be checked table by table?
          permname = get_permission_codename('add', model._meta)
          if not self.user == None and not self.user.has_perm('%s.%s' % (model._meta.app_label, permname)):
            print('Permission denied')
            return
          

          filetoparse=os.path.join(os.path.abspath(thisfolder), ifile)
          self.parseCSVloadfromfolder(model, filetoparse)
            
      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=self.database)
Beispiel #14
0
  def handle(self, **options):
    # Pick up the options
    database = options['database']
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_loadxml', 'loadxml'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='loadxml', submitted=now, started=now, status='0%', user=user)
      task.arguments = ' '.join(options['file'])
      task.processid = os.getpid()
      task.save(using=database)

      # Execute
      # TODO: if frePPLe is available as a module, we don't really need to spawn another process.
      os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace('\\', '\\\\')
      os.environ['FREPPLE_APP'] = settings.FREPPLE_APP
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(os.environ['FREPPLE_HOME'], 'python36.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(
          os.environ['FREPPLE_HOME'],
          'python%d%d.zip' % (sys.version_info[0], sys.version_info[1])
          ) + os.pathsep + os.path.normpath(os.environ['FREPPLE_APP'])
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(os.environ['FREPPLE_APP'])
      cmdline = [ '"%s"' % i for i in options['file'] ]
      cmdline.insert(0, 'frepple')
      cmdline.append( '"%s"' % os.path.join(settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py') )
      proc = subprocess.run(' '.join(cmdline))
      if proc.returncode:
        raise Exception('Exit code of the batch run is %d' % proc.returncode)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.processid = None
        task.save(using=database)
Beispiel #15
0
    def handle(self, **options):
        # Pick up the options
        now = datetime.now()

        if 'database' in options:
            database = options['database'] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        timestamp = now.strftime("%Y%m%d%H%M%S")
        if database == DEFAULT_DB_ALIAS:
            logfile = 'frepple-%s.log' % timestamp
        else:
            logfile = 'frepple_%s-%s.log' % (database, timestamp)

        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_run':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
                task.logfile = logfile
            else:
                task = Task(name='frepple_run',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user,
                            logfile=logfile)

            # Validate options
            if 'constraint' in options:
                constraint = int(options['constraint'])
                if constraint < 0 or constraint > 15:
                    raise ValueError("Invalid constraint: %s" %
                                     options['constraint'])
            else:
                constraint = 15
            if 'plantype' in options:
                plantype = int(options['plantype'])
            else:
                plantype = 1

            # Reset environment variables
            # TODO avoid having to delete the environment variables. Use options directly?
            PlanTaskRegistry.autodiscover()
            for i in PlanTaskRegistry.reg:
                if 'env' in options:
                    # Options specified
                    if i.label and i.label[0] in os.environ:
                        del os.environ[i.label[0]]
                elif i.label:
                    # No options specified - default to activate them all
                    os.environ[i.label[0]] = '1'

            # Set environment variables
            if options['env']:
                task.arguments = "--constraint=%d --plantype=%d --env=%s" % (
                    constraint, plantype, options['env'])
                for i in options['env'].split(','):
                    j = i.split('=')
                    if len(j) == 1:
                        os.environ[j[0]] = '1'
                    else:
                        os.environ[j[0]] = j[1]
            else:
                task.arguments = "--constraint=%d --plantype=%d" % (constraint,
                                                                    plantype)
            if options['background']:
                task.arguments += " --background"

            # Log task
            task.save(using=database)

            # Locate commands.py
            import freppledb.common.commands
            cmd = freppledb.common.commands.__file__

            # Prepare environment
            os.environ['FREPPLE_PLANTYPE'] = str(plantype)
            os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
            os.environ['FREPPLE_TASKID'] = str(task.id)
            os.environ['FREPPLE_DATABASE'] = database
            os.environ['FREPPLE_LOGFILE'] = logfile
            os.environ[
                'PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ[
                    'PATH'] + os.pathsep + settings.FREPPLE_APP
            if os.path.isfile(
                    os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
                os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
            if 'DJANGO_SETTINGS_MODULE' not in os.environ:
                os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
            os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)

            if options['background']:
                # Execute as background process on Windows
                if os.name == 'nt':
                    subprocess.Popen(['frepple', cmd],
                                     creationflags=0x08000000)
                else:
                    # Execute as background process on Linux
                    subprocess.Popen(['frepple', cmd])
            else:
                # Execute in foreground
                ret = subprocess.call(['frepple', cmd])
                if ret != 0 and ret != 2:
                    # Return code 0 is a successful run
                    # Return code is 2 is a run cancelled by a user. That's shown in the status field.
                    raise Exception('Failed with exit code %d' % ret)

                # Task update
                task.status = 'Done'
                task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.save(using=database)
    def handle(self, *args, **options):
        # Pick up the options
        if 'database' in options:
            self.database = options['database'] or DEFAULT_DB_ALIAS
        else:
            self.database = DEFAULT_DB_ALIAS
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        if 'user' in options and options['user']:
            try:
                self.user = User.objects.all().using(
                    self.database).get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            self.user = None

        now = datetime.now()

        task = None
        self.logfile = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'import from folder':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='import from folder',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=self.user)
            task.arguments = ' '.join(['"%s"' % i for i in args])
            task.save(using=self.database)

            # Choose the right self.delimiter and language
            self.delimiter = get_format('DECIMAL_SEPARATOR',
                                        settings.LANGUAGE_CODE,
                                        True) == ',' and ';' or ','
            translation.activate(settings.LANGUAGE_CODE)

            # Execute
            errors = 0
            if os.path.isdir(
                    settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

                # Open the logfile
                self.logfile = open(
                    os.path.join(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER'],
                        'importfromfolder.log'), "a")
                print("%s Started import from folder\n" % datetime.now(),
                      file=self.logfile)

                all_models = [(ct.model_class(), ct.pk)
                              for ct in ContentType.objects.all()
                              if ct.model_class()]
                models = []
                for ifile in os.listdir(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
                    if not ifile.endswith('.csv'):
                        continue
                    filename0 = ifile.split('.')[0]

                    model = None
                    contenttype_id = None
                    for m, ct in all_models:
                        if filename0.lower() in (
                                m._meta.model_name.lower(),
                                m._meta.verbose_name.lower(),
                                m._meta.verbose_name_plural.lower()):
                            model = m
                            contenttype_id = ct
                            print("%s Matched a model to file: %s" %
                                  (datetime.now(), ifile),
                                  file=self.logfile)
                            break

                    if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                        print("%s Ignoring data in file: %s" %
                              (datetime.now(), ifile),
                              file=self.logfile)
                    elif self.user and not self.user.has_perm(
                            '%s.%s' %
                        (model._meta.app_label,
                         get_permission_codename('add', model._meta))):
                        # Check permissions
                        print("%s You don't have permissions to add: %s" %
                              (datetime.now(), ifile),
                              file=self.logfile)
                    else:
                        deps = set([model])
                        GridReport.dependent_models(model, deps)

                        models.append((ifile, model, contenttype_id, deps))

                # Sort the list of models, based on dependencies between models
                models = GridReport.sort_models(models)

                i = 0
                cnt = len(models)
                for ifile, model, contenttype_id, dependencies in models:
                    i += 1
                    print("%s Started processing data in file: %s" %
                          (datetime.now(), ifile),
                          file=self.logfile)
                    filetoparse = os.path.join(
                        os.path.abspath(settings.DATABASES[self.database]
                                        ['FILEUPLOADFOLDER']), ifile)
                    errors += self.parseCSVloadfromfolder(model, filetoparse)
                    print("%s Finished processing data in file: %s\n" %
                          (datetime.now(), ifile),
                          file=self.logfile)
                    task.status = str(int(10 + i / cnt * 80)) + '%'
                    task.save(using=self.database)

            else:
                errors += 1
                cnt = 0
                print("%s Failed, folder does not exist" % datetime.now(),
                      file=self.logfile)

            # Task update
            if errors:
                task.status = 'Failed'
                if not cnt:
                    task.message = "Destination folder does not exist"
                else:
                    task.message = "Uploaded %s data files with %s errors" % (
                        cnt, errors)
            else:
                task.status = 'Done'
                task.message = "Uploaded %s data files" % cnt
            task.finished = datetime.now()

        except Exception as e:
            print("%s Failed" % datetime.now(), file=self.logfile)
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
            raise e

        finally:
            if task:
                if not errors:
                    task.status = '100%'
                else:
                    task.status = 'Failed'
            task.finished = datetime.now()
            task.save(using=self.database)
            if self.logfile:
                print('%s End of import from folder\n' % datetime.now(),
                      file=self.logfile)
                self.logfile.close()
Beispiel #17
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(managed=False, using=database)
    transaction.managed(False, using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else: constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else: plantype = 1

      # Log task
      task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      task.save(using=database)
      transaction.commit(using=database)

      # Locate commands.py
      cmd = None
      for app in settings.INSTALLED_APPS:
        mod = import_module(app)
        if os.path.exists(os.path.join(os.path.dirname(mod.__file__),'commands.py')):
          cmd = os.path.join(os.path.dirname(mod.__file__),'commands.py')
          break
      if not cmd: raise Exception("Can't locate commands.py")

      # Execute
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME,'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(settings.FREPPLE_HOME,'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(settings.FREPPLE_HOME,'python27.zip') + os.pathsep + os.path.normpath(settings.FREPPLE_APP)
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      ret = os.system('frepple "%s"' % cmd.replace('\\','\\\\'))
      if ret != 0 and ret != 2:
        # Return code 0 is a successful run
        # Return code is 2 is a run cancelled by a user. That's shown in the status field.
        raise Exception('Failed with exit code %d' % ret)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      transaction.leave_transaction_management(using=database)
Beispiel #18
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()

    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    timestamp = now.strftime("%Y%m%d%H%M%S")
    if database == DEFAULT_DB_ALIAS:
      logfile = 'frepple-%s.log' % timestamp
    else:
      logfile = 'frepple_%s-%s.log' % (database, timestamp)

    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('runplan', 'frepple_run'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='runplan', submitted=now, started=now, status='0%', user=user, logfile=logfile)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else:
        constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
      else:
        plantype = 1

      # Reset environment variables
      # TODO avoid having to delete the environment variables. Use options directly?
      PlanTaskRegistry.autodiscover()
      for i in PlanTaskRegistry.reg:
        if 'env' in options:
          # Options specified
          if i.label and i.label[0] in os.environ:
            del os.environ[i.label[0]]
        elif i.label:
          # No options specified - default to activate them all
          os.environ[i.label[0]] = '1'

      # Set environment variables
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      if options['background']:
        task.arguments += " --background"

      # Log task
      # Different from the other tasks the frepple engine will write the processid
      task.save(using=database)

      # Locate commands.py
      import freppledb.common.commands
      cmd = freppledb.common.commands.__file__

      def setlimits():
        import resource
        if settings.MAXMEMORYSIZE:
          resource.setrlimit(
            resource.RLIMIT_AS,
            (settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024)
            )
        if settings.MAXCPUTIME:
          resource.setrlimit(
            resource.RLIMIT_CPU,
            (settings.MAXCPUTIME, settings.MAXCPUTIME + 5)
            )
        # Limiting the file size is a bit tricky as this limit not only applies to the log
        # file, but also to temp files during the export
        # if settings.MAXTOTALLOGFILESIZE:
        #  resource.setrlimit(
        #    resource.RLIMIT_FSIZE,
        #   (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024)
        #   )

      # Prepare environment
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['FREPPLE_LOGFILE'] = logfile
      os.environ['FREPPLE_PROCESSNAME'] = settings.DATABASES[database]['NAME'].replace('demo', '')
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), 'lib')
      if os.path.isdir(libdir):
        # Folders used by the Windows version
        os.environ['PYTHONPATH'] += os.pathsep + libdir
        if os.path.isfile(os.path.join(libdir, 'library.zip')):
          os.environ['PYTHONPATH'] += os.pathsep + os.path.join(libdir, 'library.zip')

      if options['background']:
        # Execute as background process on Windows
        if os.name == 'nt':
          subprocess.Popen(['frepple', cmd], creationflags=0x08000000)
        else:
          # Execute as background process on Linux
          subprocess.Popen(['frepple', cmd], preexec_fn=setlimits)
      else:
        if os.name == 'nt':
          # Execute in foreground on Windows
          ret = subprocess.call(['frepple', cmd])
        else:
          # Execute in foreground on Linux
          ret = subprocess.call(['frepple', cmd], preexec_fn=setlimits)
        if ret != 0 and ret != 2:
          # Return code 0 is a successful run
          # Return code is 2 is a run cancelled by a user. That's shown in the status field.
          raise Exception('Failed with exit code %d' % ret)

      # Reread the task from the database and update it
      if not options['background']:
        task = Task.objects.all().using(database).get(pk=task.id)
        task.processid = None
        task.status = 'Done'
        task.finished = datetime.now()
        task.save(using=database)

    except Exception as e:
      if task:
        task = Task.objects.all().using(database).get(pk=task.id)
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.processid = None
        task.save(using=database)
      raise e
Beispiel #19
0
def wrapTask(request, action):
    # Allow only post
    if request.method != "POST":
        raise Exception("Only post requests allowed")
    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    args = request.POST or request.GET

    # A
    if action in ("frepple_run", "runplan"):
        if not request.user.has_perm("auth.generate_plan"):
            raise Exception("Missing execution privileges")
        constraint = 0
        for value in args.getlist("constraint"):
            try:
                constraint += int(value)
            except Exception:
                pass
        task = Task(name="runplan",
                    submitted=now,
                    status="Waiting",
                    user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (
            constraint,
            args.get("plantype", 1),
        )
        env = []
        for value in args.getlist("env"):
            env.append(value)
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ",".join(env))
        task.save(using=request.database)
    # C
    elif action in ("frepple_flush", "empty"):
        if not request.user.has_perm("auth.run_db"):
            raise Exception("Missing execution privileges")
        task = Task(name="empty",
                    submitted=now,
                    status="Waiting",
                    user=request.user)
        models = ",".join(args.getlist("models"))
        if models:
            task.arguments = "--models=%s" % (models)
        task.save(using=request.database)
    # D
    elif action == "loaddata":
        if not request.user.has_perm("auth.run_db"):
            raise Exception("Missing execution privileges")
        task = Task(
            name="loaddata",
            submitted=now,
            status="Waiting",
            user=request.user,
            arguments=args["fixture"],
        )
        task.save(using=request.database)
        # Also run the workflow upon loading of manufacturing_demo or distribution_demo
        if args.get("regenerateplan", False) == "true":
            active_modules = "supply"
            task = Task(name="runplan",
                        submitted=now,
                        status="Waiting",
                        user=request.user)
            task.arguments = "--constraint=15 --plantype=1 --env=%s --background" % (
                active_modules, )
            task.save(using=request.database)
    # E
    elif action in ("frepple_copy", "scenario_copy"):
        worker_database = DEFAULT_DB_ALIAS
        if "copy" in args:
            if not request.user.has_perm("auth.copy_scenario"):
                raise Exception("Missing execution privileges")
            source = args.get("source", request.database)
            worker_database = source
            destination = args.get("destination", False)
            if destination and destination != DEFAULT_DB_ALIAS:
                force = args.get("force", False)
                arguments = "%s %s" % (source, destination)
                if force:
                    arguments += " --force"
                task = Task(
                    name="scenario_copy",
                    submitted=now,
                    status="Waiting",
                    user=request.user,
                    arguments=arguments,
                )
                task.save(using=source)
        elif "release" in args:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm("auth.release_scenario"):
                raise Exception("Missing execution privileges")
            sc = Scenario.objects.using(DEFAULT_DB_ALIAS).get(
                name=request.database)
            if sc.status != "Free" and sc.name != DEFAULT_DB_ALIAS:
                sc.status = "Free"
                sc.lastrefresh = now
                sc.save(using=DEFAULT_DB_ALIAS)
        elif "promote" in args:
            if not request.user.has_perm("auth.promote_scenario"):
                raise Exception("Missing execution privileges")
            source = args.get("source", request.database)
            worker_database = source
            destination = args.get("destination", False)
            if destination and destination == DEFAULT_DB_ALIAS:
                arguments = "--promote %s %s" % (source, destination)
                task = Task(
                    name="scenario_copy",
                    submitted=now,
                    status="Waiting",
                    user=request.user,
                    arguments=arguments,
                )
                task.save(using=source)
        elif "update" in args:
            # Note: update is immediate and synchronous.
            if not request.user.has_perm("auth.release_scenario"):
                raise Exception("Missing execution privileges")
            sc = Scenario.objects.using(DEFAULT_DB_ALIAS).get(
                name=request.database)
            sc.description = args.get("description", None)
            sc.save(using=DEFAULT_DB_ALIAS)
        else:
            raise Exception("Invalid scenario task")
    # G
    elif action in ("frepple_createbuckets", "createbuckets"):
        if not request.user.has_perm("auth.run_db"):
            raise Exception("Missing execution privileges")
        task = Task(name="createbuckets",
                    submitted=now,
                    status="Waiting",
                    user=request.user)
        arguments = []
        start = args.get("start", None)
        if start:
            arguments.append("--start=%s" % start)
        end = args.get("end", None)
        if end:
            arguments.append("--end=%s" % end)
        weekstart = args.get("weekstart", None)
        if weekstart:
            arguments.append("--weekstart=%s" % weekstart)
        format_day = args.get("format-day", None)
        if format_day:
            arguments.append('--format-day="%s"' % format_day)
        format_week = args.get("format-week", None)
        if format_week:
            arguments.append('--format-week="%s"' % format_week)
        format_month = args.get("format-month", None)
        if format_month:
            arguments.append('--format-month="%s"' % format_month)
        format_quarter = args.get("format-quarter", None)
        if format_quarter:
            arguments.append('--format-quarter="%s"' % format_quarter)
        format_year = args.get("format-year", None)
        if format_year:
            arguments.append('--format-year="%s"' % format_year)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)
    else:
        # Generic task wrapper

        # Find the command and verify we have permissions to run it
        command = None
        for commandname, appname in get_commands().items():
            if commandname == action:
                try:
                    c = getattr(
                        import_module("%s.management.commands.%s" %
                                      (appname, commandname)),
                        "Command",
                    )
                    if c.index >= 0:
                        if getattr(c, "getHTML", None) and c.getHTML(request):
                            # Command class has getHTML method
                            command = c
                            break
                        else:
                            for p in c.__bases__:
                                # Parent command class has getHTML method
                                if getattr(p, "getHTML",
                                           None) and p.getHTML(request):
                                    command = c
                                    break
                            if command:
                                break
                except Exception:
                    pass  # Silently ignore failures
        if not command:
            raise Exception("Invalid task name '%s'" % action)
        # Create a task
        arguments = []
        for arg, val in args.lists():
            if arg != "csrfmiddlewaretoken":
                arguments.append("--%s=%s" % (arg, ",".join(val)))
        task = Task(name=action,
                    submitted=now,
                    status="Waiting",
                    user=request.user)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)

    # Launch a worker process, making sure it inherits the right
    # environment variables from this parent
    os.environ["FREPPLE_CONFIGDIR"] = settings.FREPPLE_CONFIGDIR
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen([
                    sys.executable,  # Python executable
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "runworker",
                    "--database=%s" % worker_database,
                ])
            else:
                # Deployment on Apache web server
                Popen(
                    [
                        "python",
                        os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                        "runworker",
                        "--database=%s" % worker_database,
                    ],
                    creationflags=0x08000000,
                )
        elif sys.executable.find("freppleserver.exe") >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace(
                        "freppleserver.exe",
                        "frepplectl.exe"),  # frepplectl executable
                    "runworker",
                    "--database=%s" % worker_database,
                ],
                creationflags=0x08000000,
            )  # Do not create a console window
        else:
            # Linux standard installation
            Popen(
                ["frepplectl", "runworker",
                 "--database=%s" % worker_database])
    return task
Beispiel #20
0
def wrapTask(request, action):
  # Allow only post
  if request.method != 'POST':
    raise Exception('Only post requests allowed')
  # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
  worker_database = request.database

  now = datetime.now()
  task = None
  args = request.POST or request.GET

  # A
  if action in ('frepple_run', 'runplan'):
    if not request.user.has_perm('auth.generate_plan'):
      raise Exception('Missing execution privileges')
    constraint = 0
    for value in args.getlist('constraint'):
      try:
        constraint += int(value)
      except:
        pass
    task = Task(name='runplan', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--constraint=%s --plantype=%s" % (constraint, args.get('plantype', 1))
    env = []
    for value in args.getlist('env'):
      env.append(value)
    if env:
      task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
    task.save(using=request.database)
  # C
  elif action in ('frepple_flush', 'empty'):
    if not request.user.has_perm('auth.run_db'):
      raise Exception('Missing execution privileges')
    task = Task(name='empty', submitted=now, status='Waiting', user=request.user)
    models = ','.join(args.getlist('models'))
    if models:
      task.arguments = "--models=%s" % (models)
    task.save(using=request.database)
  # D
  elif action == 'loaddata':
    if not request.user.has_perm('auth.run_db'):
      raise Exception('Missing execution privileges')
    task = Task(name='loaddata', submitted=now, status='Waiting', user=request.user, arguments=args['fixture'])
    task.save(using=request.database)
    # Also run the workflow upon loading of manufacturing_demo or distribution_demo
    if (args['regenerateplan'] == 'true'):
      active_modules = 'supply'
      task = Task(name='runplan', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--constraint=15 --plantype=1 --env=%s --background" % (active_modules,)
      task.save(using=request.database)
  # E
  elif action in ('frepple_copy', 'scenario_copy'):
    worker_database = DEFAULT_DB_ALIAS
    if 'copy' in args:
      if not request.user.has_perm('auth.copy_scenario'):
        raise Exception('Missing execution privileges')
      source = args.get('source', DEFAULT_DB_ALIAS)
      worker_database = source
      destination = args.getlist('destination')
      force = args.get('force', False)
      for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
        arguments = "%s %s" % (source, sc.name)
        if force:
          arguments += ' --force'
        if args.get(sc.name, 'off') == 'on' or sc.name in destination:
          task = Task(name='scenario_copy', submitted=now, status='Waiting', user=request.user, arguments=arguments)
          task.save(using=source)
    elif 'release' in args:
      # Note: release is immediate and synchronous.
      if not request.user.has_perm('auth.release_scenario'):
        raise Exception('Missing execution privileges')
      for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
        if args.get(sc.name, 'off') == 'on' and sc.status != 'Free':
          sc.status = 'Free'
          sc.lastrefresh = now
          sc.save(using=DEFAULT_DB_ALIAS)
          if request.database == sc.name:
            # Erasing the database that is currently selected.
            request.prefix = ''
    elif 'update' in args:
      # Note: update is immediate and synchronous.
      if not request.user.has_perm('auth.release_scenario'):
        raise Exception('Missing execution privileges')
      for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
        if args.get(sc.name, 'off') == 'on':
          sc.description = args.get('description', None)
          sc.save(using=DEFAULT_DB_ALIAS)
    else:
      raise Exception('Invalid scenario task')
  # G
  elif action in ('frepple_createbuckets', 'createbuckets'):
    if not request.user.has_perm('auth.run_db'):
      raise Exception('Missing execution privileges')
    task = Task(name='createbuckets', submitted=now, status='Waiting', user=request.user)
    arguments = []
    start = args.get('start', None)
    if start:
      arguments.append("--start=%s" % start)
    end = args.get('end', None)
    if end:
      arguments.append("--end=%s" % end)
    weekstart = args.get('weekstart', None)
    if weekstart:
      arguments.append("--weekstart=%s" % weekstart)
    format_day = args.get('format-day', None)
    if format_day:
      arguments.append('--format-day="%s"' % format_day)
    format_week = args.get('format-week', None)
    if format_week:
      arguments.append('--format-week="%s"' % format_week)
    format_month = args.get('format-month', None)
    if format_month:
      arguments.append('--format-month="%s"' % format_month)
    format_quarter = args.get('format-quarter', None)
    if format_quarter:
      arguments.append('--format-quarter="%s"' % format_quarter)
    format_year = args.get('format-year', None)
    if format_year:
      arguments.append('--format-year="%s"' % format_year)
    if arguments:
      task.arguments = " ".join(arguments)
    task.save(using=request.database)
  else:
    # Generic task wrapper

    # Find the command and verify we have permissions to run it
    command = None
    for commandname, appname in get_commands().items():
      if commandname == action:
        try:
          c = getattr(import_module('%s.management.commands.%s' % (appname, commandname)), 'Command')
          if c.index >= 0:
            if getattr(c, 'getHTML', None) and c.getHTML(request):
              # Command class has getHTML method
              command = c
              break
            else:
              for p in c.__bases__:
                # Parent command class has getHTML method
                if getattr(p, 'getHTML', None) and p.getHTML(request):
                  command = c
                  break
              if command:
                break
        except Exception:
          pass  # Silently ignore failures
    if not command:
      raise Exception("Invalid task name '%s'" % action)
    # Create a task
    arguments = []
    for arg, val in args.lists():
      if arg != 'csrfmiddlewaretoken':
        arguments.append('--%s=%s' % (arg, ','.join(val)))
    task = Task(name=action, submitted=now, status='Waiting', user=request.user)
    if arguments:
      task.arguments = " ".join(arguments)
    task.save(using=request.database)

  # Launch a worker process, making sure it inherits the right
  # environment variables from this parent
  os.environ['FREPPLE_CONFIGDIR'] = settings.FREPPLE_CONFIGDIR
  if task and not checkActive(worker_database):
    if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
      if "python" in sys.executable:
        # Development layout
        Popen([
          sys.executable,  # Python executable
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "runworker",
          "--database=%s" % worker_database
          ])
      else:
        # Deployment on Apache web server
        Popen([
          "python",
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "runworker",
          "--database=%s" % worker_database
          ], creationflags=0x08000000)
    elif sys.executable.find('freppleserver.exe') >= 0:
      # Py2exe executable
      Popen([
        sys.executable.replace('freppleserver.exe', 'frepplectl.exe'),  # frepplectl executable
        "runworker",
        "--database=%s" % worker_database
        ], creationflags=0x08000000)  # Do not create a console window
    else:
      # Linux standard installation
      Popen([
        "frepplectl",
        "runworker",
        "--database=%s" % worker_database
        ])
  return task
Beispiel #21
0
    def handle(self, **options):
        now = datetime.now()
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)

        # Pick up options
        if options["user"]:
            try:
                user = User.objects.all().get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        task = None
        try:
            setattr(_thread_locals, "database", database)
            if "task" in options and options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name not in ("emailreport")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="emailreport",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=user,
                )
            task.processid = os.getpid()
            task.save(using=database)

            if not settings.EMAIL_HOST:
                raise CommandError(
                    "No SMTP mail server is configured in your djangosettings.py file"
                )

            sender = options["sender"]
            recipient = options["recipient"]
            report = options["report"]

            if not sender:
                raise CommandError("No sender has been defined")

            if not recipient:
                raise CommandError("No recipient has been defined")

            if not report:
                raise CommandError("No report to email has been defined")

            # Make sure file exist in the export folder
            reports = report.split(",")
            correctedReports = []
            missingFiles = []
            for r in reports:
                if len(r.strip()) == 0:
                    continue
                path = os.path.join(
                    settings.DATABASES[database]["FILEUPLOADFOLDER"],
                    "export",
                    r.strip(),
                )
                if not os.path.isfile(path):
                    missingFiles.append(r.strip())
                else:
                    correctedReports.append(path)

            if len(missingFiles) > 0:
                raise CommandError(
                    "Following files are missing in export folder: %s" %
                    (",".join(str(x) for x in missingFiles)))

            if len(correctedReports) == 0:
                raise CommandError("No report defined in options")

            # Validate email adresses
            recipients = recipient.split(",")
            correctedRecipients = []
            invalidEmails = []
            for r in recipients:
                if len(r.strip()) == 0:
                    continue
                if not re.fullmatch(r"[^@]+@[^@]+\.[^@]+", r.strip()):
                    invalidEmails.append(r.strip())
                else:
                    correctedRecipients.append(r.strip())

            if len(invalidEmails) > 0:
                raise CommandError(
                    "Invalid email formatting for following addresses: %s" %
                    (",".join(str(x) for x in invalidEmails)))
            if len(correctedRecipients) == 0:
                raise CommandError("No recipient defined in options")

            task.arguments = "--recipient=%s --report=%s" % (recipient, report)
            task.save(using=database)

            # create message
            message = EmailMessage(
                subject="Exported reports",
                body="",
                from_email=sender,
                to=correctedRecipients,
            )

            b = BytesIO()
            with ZipFile(file=b, mode="w", compression=ZIP_DEFLATED) as zf:
                processedFiles = 0
                for f in correctedReports:
                    task.message = "Compressing file %s" % basename(f)
                    task.status = (str(
                        int(processedFiles / len(correctedReports) * 90.0)) +
                                   "%")
                    task.save(using=database)
                    zf.write(filename=f, arcname=basename(f))
                    processedFiles = processedFiles + 1
                zf.close()

                # attach zip file
                task.status = "90%"
                task.message = "Sending email"
                task.save(using=database)
                message.attach("reports.zip", b.getvalue(), "application/zip")
                # send email
                message.send()

            b.close()

            # Logging message
            task.processid = None
            task.message = ""
            task.status = "Done"
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            setattr(_thread_locals, "database", None)
            if task:
                task.processid = None
                task.save(using=database)
Beispiel #22
0
def wrapTask(request, action):
    # Allow only post
    if request.method != 'POST':
        raise Exception('Only post requests allowed')

    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    # A
    if action == 'frepple_run':
        if not request.user.has_perm('auth.generate_plan'):
            raise Exception('Missing execution privileges')
        constraint = 0
        for value in request.POST.getlist('constraint'):
            try:
                constraint += int(value)
            except:
                pass
        task = Task(name='generate plan',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (
            constraint, request.POST.get('plantype', 1))
        env = []
        for value in request.POST.getlist('env'):
            env.append(value)
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
        request.session['env'] = env
        task.save(using=request.database)
        # Update the session object
        request.session['plantype'] = request.POST.get('plantype')
        request.session['constraint'] = constraint
    # C
    elif action == 'frepple_flush':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='empty database',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        models = ','.join(request.POST.getlist('models'))
        if models:
            task.arguments = "--models=%s" % models
        task.save(using=request.database)
    # D
    elif action == 'loaddata':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='load dataset',
                    submitted=now,
                    status='Waiting',
                    user=request.user,
                    arguments=request.POST['fixture'])
        task.save(using=request.database)
    # E
    elif action == 'frepple_copy':
        worker_database = DEFAULT_DB_ALIAS
        if 'copy' in request.POST:
            if not request.user.has_perm('auth.copy_scenario'):
                raise Exception('Missing execution privileges')
            source = request.POST.get('source', DEFAULT_DB_ALIAS)
            destination = request.POST.getlist('destination')
            force = request.POST.get('force', False)
            for sc in Scenario.objects.all():
                arguments = "%s %s" % (source, sc.name)
                if force:
                    arguments += ' --force'
                if request.POST.get(sc.name,
                                    'off') == 'on' or sc.name in destination:
                    task = Task(name='copy scenario',
                                submitted=now,
                                status='Waiting',
                                user=request.user,
                                arguments=arguments)
                    task.save()
        elif 'release' in request.POST:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name,
                                    'off') == 'on' and sc.status != 'Free':
                    sc.status = 'Free'
                    sc.lastrefresh = now
                    sc.save()
                    if request.database == sc.name:
                        # Erasing the database that is currently selected.
                        request.prefix = ''
        elif 'update' in request.POST:
            # Note: update is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, 'off') == 'on':
                    sc.description = request.POST.get('description', None)
                    sc.save()
        else:
            raise Exception('Invalid scenario task')
    # F
    elif action == 'frepple_backup':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='backup database',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    # G
    elif action == 'frepple_createbuckets':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='generate buckets',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        arguments = []
        start = request.POST.get('start', None)
        if start:
            arguments.append("--start=%s" % start)
        end = request.POST.get('end', None)
        if end:
            arguments.append("--end=%s" % end)
        weekstart = request.POST.get('weekstart', None)
        if weekstart:
            arguments.append("--weekstart=%s" % weekstart)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)
    # J
    elif action == 'odoo_import' and 'freppledb.odoo' in settings.INSTALLED_APPS:
        task = Task(name='Odoo import',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    # M
    elif action == 'frepple_importfromfolder':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='import from folder',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    # N
    elif action == 'frepple_exporttofolder':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='export to folder',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    else:
        # Task not recognized
        raise Exception("Invalid task name '%s'" % action)

    # Launch a worker process, making sure it inherits the right
    # environment variables from this parent
    os.environ['FREPPLE_CONFIGDIR'] = settings.FREPPLE_CONFIGDIR
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen([
                    sys.executable,  # Python executable
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ])
            else:
                # Deployment on Apache web server
                Popen([
                    "python",
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ],
                      creationflags=0x08000000)
        elif sys.executable.find('freppleserver.exe') >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace(
                        'freppleserver.exe',
                        'frepplectl.exe'),  # frepplectl executable
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ],
                creationflags=0x08000000)  # Do not create a console window
        else:
            # Linux standard installation
            Popen([
                "frepplectl", "frepple_runworker",
                "--database=%s" % worker_database
            ])
    return task
Beispiel #23
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else:
        constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else:
        plantype = 1
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      if options['background']:
        task.arguments += " --background"

      # Log task
      task.save(using=database)

      # Locate commands.py
      cmd = None
      for app in settings.INSTALLED_APPS:
        mod = import_module(app)
        if os.path.exists(os.path.join(os.path.dirname(mod.__file__), 'commands.py')):
          cmd = os.path.join(os.path.dirname(mod.__file__), 'commands.py')
          break
      if not cmd:
        raise Exception("Can't locate commands.py")

      # Prepare environment
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(settings.FREPPLE_HOME, 'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(
          settings.FREPPLE_HOME,
          'python%d%d.zip' %(sys.version_info[0], sys.version_info[1])
          ) + os.pathsep + os.path.normpath(settings.FREPPLE_APP)
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)

      if options['background']:
        # Execute as background process on Windows
        if os.name == 'nt':
          subprocess.Popen(['frepple', cmd], creationflags=0x08000000)
        else:
          # Execute as background process on Linux
          subprocess.Popen(['frepple', cmd]).pid
      else:
        # Execute in foreground
        ret = subprocess.call(['frepple', cmd])
        if ret != 0 and ret != 2:
          # Return code 0 is a successful run
          # Return code is 2 is a run cancelled by a user. That's shown in the status field.
          raise Exception('Failed with exit code %d' % ret)

        # Task update
        task.status = 'Done'
        task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)
Beispiel #24
0
    def handle(self, **options):

        if options["user"]:
            try:
                user = User.objects.all().get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        # Synchronize the scenario table with the settings
        Scenario.syncWithSettings()

        now = datetime.now()
        task = None
        if "task" in options and options["task"]:
            try:
                task = (
                    Task.objects.all().using(DEFAULT_DB_ALIAS).get(pk=options["task"])
                )
            except Exception:
                raise CommandError("Task identifier not found")
            if (
                task.started
                or task.finished
                or task.status != "Waiting"
                or task.name != "scenario_release"
            ):
                raise CommandError("Invalid task identifier")
            task.status = "0%"
            task.started = now
        else:
            task = Task(
                name="scenario_release",
                submitted=now,
                started=now,
                status="0%",
                user=user,
            )
        task.processid = os.getpid()
        task.save(using=DEFAULT_DB_ALIAS)

        # Validate the arguments
        destination = options["destination"]
        destinationscenario = None
        try:
            task.arguments = "%s" % (destination,)
            task.save(using=DEFAULT_DB_ALIAS)
            try:
                destinationscenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(
                    pk=destination
                )
            except Exception:
                raise CommandError(
                    "No destination database defined with name '%s'" % destination
                )
            if destinationscenario.status != "In use":
                raise CommandError("Scenario to release is not in use")

            if destination == DEFAULT_DB_ALIAS:
                raise CommandError("Production scenario cannot be released")

            # Update the scenario table, set it free in the production database
            destinationscenario.status = "Free"
            destinationscenario.lastrefresh = datetime.today()
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Killing webservice
            if "freppledb.webservice" in settings.INSTALLED_APPS:
                management.call_command(
                    "stopwebservice", force=True, database=destination
                )

            # Logging message
            task.processid = None
            task.status = "Done"
            task.finished = datetime.now()

            # Update the task in the destination database
            task.message = "Scenario %s released" % (destination,)
            task.save(using=DEFAULT_DB_ALIAS)

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            if destinationscenario and destinationscenario.status == "Busy":
                if destination == DEFAULT_DB_ALIAS:
                    destinationscenario.status = "In use"
                else:
                    destinationscenario.status = "Free"
                destinationscenario.save(using=DEFAULT_DB_ALIAS)
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=DEFAULT_DB_ALIAS)
Beispiel #25
0
def wrapTask(request, action):
  # Allow only post
  if request.method != 'POST':
    raise Exception('Only post requests allowed')

  # Check user permissions
  if not request.user.has_perm('execute'):
    raise Exception('Missing execution privileges')

  # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
  worker_database = request.database

  now = datetime.now()
  task = None
  # A
  if action == 'frepple_run':
    if not request.user.has_perm('execute.generate_plan'):
      raise Exception('Missing execution privileges')
    constraint = 0
    for value in request.POST.getlist('constraint'):
      try:
        constraint += int(value)
      except:
        pass
    task = Task(name='generate plan', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--constraint=%s --plantype=%s" % (constraint, request.POST.get('plantype'))
    env = []
    if request.POST.get('odoo_read', None) == '1':
      env.append("odoo_read")
      request.session['odoo_read'] = True
    else:
      request.session['odoo_read'] = False
    if request.POST.get('odoo_write', None) == '1':
      env.append("odoo_write")
      request.session['odoo_write'] = True
    else:
      request.session['odoo_write'] = False
    if env:
      task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
    task.save(using=request.database)
    # Update the session object
    request.session['plantype'] = request.POST.get('plantype')
    request.session['constraint'] = constraint
  # B
  elif action == 'frepple_createmodel':
    task = Task(name='generate model', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
      "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
        request.POST['clusters'], request.POST['demands'], request.POST['fcst'], request.POST['levels'],
        request.POST['rsrc_number'], request.POST['rsrc_size'], request.POST['components'],
        request.POST['components_per'], request.POST['deliver_lt'], request.POST['procure_lt']
        )
    task.save(using=request.database)
  # C
  elif action == 'frepple_flush':
    task = Task(name='empty database', submitted=now, status='Waiting', user=request.user)
    if not request.POST.get('all'):
      task.arguments = "--models=%s" % ','.join(request.POST.getlist('entities'))
    task.save(using=request.database)
  # D
  elif action == 'loaddata':
    task = Task(name='load dataset', submitted=now, status='Waiting', user=request.user, arguments=request.POST['datafile'])
    task.save(using=request.database)
  # E
  elif action == 'frepple_copy':
    worker_database = DEFAULT_DB_ALIAS
    if 'copy' in request.POST:
      if not request.user.has_perm('execute.copy_scenario'):
        raise Exception('Missing execution privileges')
      source = request.POST.get('source', DEFAULT_DB_ALIAS)
      for sc in Scenario.objects.all():
        if request.POST.get(sc.name, 'off') == 'on' and sc.status == 'Free':
          task = Task(name='copy scenario', submitted=now, status='Waiting', user=request.user, arguments="%s %s" % (source, sc.name))
          task.save()
    elif 'release' in request.POST:
      # Note: release is immediate and synchronous.
      if not request.user.has_perm('execute.release_scenario'):
        raise Exception('Missing execution privileges')
      for sc in Scenario.objects.all():
        if request.POST.get(sc.name, 'off') == 'on' and sc.status != 'Free':
          sc.status = 'Free'
          sc.lastrefresh = now
          sc.save()
          if request.database == sc.name:
            # Erasing the database that is currently selected.
            request.prefix = ''
    elif 'update' in request.POST:
      # Note: update is immediate and synchronous.
      for sc in Scenario.objects.all():
        if request.POST.get(sc.name, 'off') == 'on':
          sc.description = request.POST.get('description', None)
          sc.save()
    else:
      raise Exception('Invalid scenario task')
  # F
  elif action == 'frepple_backup':
    task = Task(name='backup database', submitted=now, status='Waiting', user=request.user)
    task.save(using=request.database)
  # G
  elif action == 'frepple_createbuckets':
    task = Task(name='generate buckets', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--start=%s --end=%s --weekstart=%s" % (
      request.POST['start'], request.POST['end'], request.POST['weekstart']
      )
    task.save(using=request.database)
  # H
  elif action == 'openbravo_import' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
    task = Task(name='Openbravo import', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--delta=%s" % request.POST['delta']
    task.save(using=request.database)
  # I
  elif action == 'openbravo_export' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
    task = Task(name='Openbravo export', submitted=now, status='Waiting', user=request.user)
    task.save(using=request.database)
  else:
    # Task not recognized
    raise Exception('Invalid launching task')

  # Launch a worker process
  if task and not checkActive(worker_database):
    if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
      if "python" in sys.executable:
        # Development layout
        Popen([
          sys.executable,  # Python executable
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "frepple_runworker",
          "--database=%s" % worker_database
          ])
      else:
        # Deployment on Apache web server
        Popen([
          "python",
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "frepple_runworker",
          "--database=%s" % worker_database
          ], creationflags=0x08000000)
    elif sys.executable.find('freppleserver.exe') >= 0:
      # Py2exe executable
      Popen([
        sys.executable.replace('freppleserver.exe', 'frepplectl.exe'),  # frepplectl executable
        "frepple_runworker",
        "--database=%s" % worker_database
        ], creationflags=0x08000000)  # Do not create a console window
    else:
      # Linux standard installation
      Popen([
        "frepplectl",
        "frepple_runworker",
        "--database=%s" % worker_database
        ])
  return task
Beispiel #26
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None
    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'importworkbook-%s.log' % timestamp
    else:
      logfile = 'importworkbook_%s-%s.log' % (self.database, timestamp)

    task = None
    try:
      setattr(_thread_locals, 'database', self.database)
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_importworkbook':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='frepple_importworkbook', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(options['file'])
      task.save(using=self.database)

      all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
      try:
        with transaction.atomic(using=self.database):
          # Find all models in the workbook
          for file in filename:
            wb = load_workbook(filename=file, read_only=True, data_only=True)
            models = []
            for ws_name in wb.get_sheet_names():
              # Find the model
              model = None
              contenttype_id = None
              for m, ct in all_models:
                # Try with translated model names
                if ws_name.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
                  model = m
                  contenttype_id = ct
                  break
                # Try with English model names
                with translation.override('en'):
                  if ws_name.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
                    model = m
                    contenttype_id = ct
                    break
              if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                print(force_text(_("Ignoring data in worksheet: %s") % ws_name))
                # yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>'
              elif not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
                # Check permissions
                print(force_text(_("You don't permissions to add: %s") % ws_name))
                # yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>'
              else:
                deps = set([model])
                GridReport.dependent_models(model, deps)
                models.append( (ws_name, model, contenttype_id, deps) )

            # Sort the list of models, based on dependencies between models
            models = GridReport.sort_models(models)
            print('197----', models)
            # Process all rows in each worksheet
            for ws_name, model, contenttype_id, dependencies in models:
              print(force_text(_("Processing data in worksheet: %s") % ws_name))
              # yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong></br>'
              # yield ('<div class="table-responsive">'
                     # '<table class="table table-condensed" style="white-space: nowrap;"><tbody>')
              numerrors = 0
              numwarnings = 0
              firsterror = True
              ws = wb.get_sheet_by_name(name=ws_name)
              for error in parseExcelWorksheet(model, ws, user=self.user, database=self.database, ping=True):
                if error[0] == DEBUG:
                  # Yield some result so we can detect disconnect clients and interrupt the upload
                  # yield ' '
                  continue
                if firsterror and error[0] in (ERROR, WARNING):
                  print('%s %s %s %s %s%s%s' % (
                    capfirst(_("worksheet")), capfirst(_("row")),
                    capfirst(_("field")), capfirst(_("value")),
                    capfirst(_("error")), " / ", capfirst(_("warning"))
                    ))
                  # yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % (
                  #   capfirst(_("worksheet")), capfirst(_("row")),
                  #   capfirst(_("field")), capfirst(_("value")),
                  #   capfirst(_("error")), " / ", capfirst(_("warning"))
                  #   )
                  firsterror = False
                if error[0] == ERROR:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('error')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('error')),
                  #   error[4]
                  #   )
                  numerrors += 1
                elif error[1] == WARNING:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('warning')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('warning')),
                  #   error[4]
                  #   )
                  numwarnings += 1
                else:
                  print('%s %s %s %s %s %s' % (
                    "danger" if numerrors > 0 else 'success',
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    error[4]
                    ))
              #     yield '<tr class=%s><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % (
              #       "danger" if numerrors > 0 else 'success',
              #       ws_name,
              #       error[1] if error[1] else '',
              #       error[2] if error[2] else '',
              #       error[3] if error[3] else '',
              #       error[4]
              #       )
              # yield '</tbody></table></div>'
            print('%s' % _("Done"))
            # yield '<div><strong>%s</strong></div>' % _("Done")
      except GeneratorExit:
        logger.warning('Connection Aborted')
    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      setattr(_thread_locals, 'database', None)
      if task:
        task.save(using=self.database)

    return _("Done")
Beispiel #27
0
  def handle(self, *args, **options):
    # Pick up the options
    if 'database' in options:
      self.database = options['database'] or DEFAULT_DB_ALIAS
    else:
      self.database = DEFAULT_DB_ALIAS
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if 'user' in options and options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    now = datetime.now()

    task = None
    self.logfile = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'load from folder':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='load from folder', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.save(using=self.database)

      # Choose the right self.delimiter and language
      self.delimiter = get_format('DECIMAL_SEPARATOR', settings.LANGUAGE_CODE, True) == ',' and ';' or ','
      translation.activate(settings.LANGUAGE_CODE)

      # Execute
      if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

        # Open the logfile
        self.logfile = open(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'loadfromfolder.log'), "a")
        print("%s Started upload from folder\n" % datetime.now(), file=self.logfile)

        all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
        models = []
        for ifile in os.listdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
          if not ifile.endswith('.csv'):
            continue
          filename0 = ifile.split('.')[0]

          model = None
          contenttype_id = None
          for m, ct in all_models:
            if filename0.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
              model = m
              contenttype_id = ct
              print("%s Matched a model to file: %s" % (datetime.now(),ifile), file=self.logfile)
              break

          if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
            print("%s Ignoring data in file: %s" % (datetime.now(),ifile), file=self.logfile)
          elif self.user and not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
            # Check permissions
            print("%s You don't have permissions to add: %s" % (datetime.now(),ifile), file=self.logfile)
          else:
            deps = set([model])
            GridReport.dependent_models(model, deps)

            models.append( (ifile, model, contenttype_id, deps) )

        # Sort the list of models, based on dependencies between models
        cnt = len(models)
        ok = False
        while not ok:
          ok = True
          for i in range(cnt):
            for j in range(i + 1, cnt):
              if models[i][1] != models[j][1] and models[i][1] in models[j][3]:
                # A subsequent model i depends on model i. The list ordering is
                # thus not ok yet. We move this element to the end of the list.
                models.append(models.pop(i))
                ok = False
        task.status = '10%'
        task.save(using=self.database)

        i=0
        errors = 0
        for ifile, model, contenttype_id, dependencies in models:
          i += 1
          print("%s Started processing data in file: %s" % (datetime.now(),ifile), file=self.logfile)
          filetoparse=os.path.join(os.path.abspath(settings.DATABASES[self.database]['FILEUPLOADFOLDER']), ifile)
          errors += self.parseCSVloadfromfolder(model, filetoparse)
          print("%s Finished processing data in file: %s\n" % (datetime.now(),ifile), file=self.logfile)
          task.status = str(int(10+i/cnt*80))+'%'
          task.save(using=self.database)

      # Task update
      if errors:
        task.status = 'Failed'
        task.message = "Uploaded %s data files with %s errors" % (cnt, errors)
      else:
        task.status = 'Done'
        task.message = "Uploaded %s data file" % cnt
      task.finished = datetime.now()

    except Exception as e:
      print("%s Failed" % datetime.now(), file=self.logfile)
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.status = '100%'
        task.save(using=self.database)
      if self.logfile:
        print('%s End of upload from folder\n' % datetime.now(), file=self.logfile)
        self.logfile.close()
Beispiel #28
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else: constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else: plantype = 1
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)

      # Log task
      task.save(using=database)
      transaction.commit(using=database)

      # Locate commands.py
      cmd = None
      for app in settings.INSTALLED_APPS:
        mod = import_module(app)
        if os.path.exists(os.path.join(os.path.dirname(mod.__file__),'commands.py')):
          cmd = os.path.join(os.path.dirname(mod.__file__),'commands.py')
          break
      if not cmd: raise Exception("Can't locate commands.py")

      # Execute
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME,'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(settings.FREPPLE_HOME,'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(settings.FREPPLE_HOME,'python27.zip') + os.pathsep + os.path.normpath(settings.FREPPLE_APP)
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      ret = os.system('frepple "%s"' % cmd.replace('\\','\\\\'))
      if ret != 0 and ret != 2:
        # Return code 0 is a successful run
        # Return code is 2 is a run cancelled by a user. That's shown in the status field.
        raise Exception('Failed with exit code %d' % ret)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      transaction.leave_transaction_management(using=database)
Beispiel #29
0
def wrapTask(request, action):
    # Allow only post
    if request.method != 'POST':
        raise Exception('Only post requests allowed')
    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    args = request.POST or request.GET

    # A
    if action in ('frepple_run', 'runplan'):
        if not request.user.has_perm('auth.generate_plan'):
            raise Exception('Missing execution privileges')
        constraint = 0
        for value in args.getlist('constraint'):
            try:
                constraint += int(value)
            except:
                pass
        task = Task(name='runplan',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (
            constraint, args.get('plantype', 1))
        env = []
        for value in args.getlist('env'):
            env.append(value)
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
        request.session['env'] = env
        task.save(using=request.database)
        # Update the session object
        request.session['plantype'] = args.get('plantype')
        request.session['constraint'] = constraint
    # C
    elif action in ('frepple_flush', 'empty'):
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='empty',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        models = ','.join(args.getlist('models'))
        if models:
            task.arguments = "--models=%s" % (models)
        task.save(using=request.database)
    # D
    elif action == 'loaddata':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='loaddata',
                    submitted=now,
                    status='Waiting',
                    user=request.user,
                    arguments=args['fixture'])
        task.save(using=request.database)
    # E
    elif action in ('frepple_copy', 'scenario_copy'):
        worker_database = DEFAULT_DB_ALIAS
        if 'copy' in args:
            if not request.user.has_perm('auth.copy_scenario'):
                raise Exception('Missing execution privileges')
            source = args.get('source', DEFAULT_DB_ALIAS)
            worker_database = source
            destination = args.getlist('destination')
            force = args.get('force', False)
            for sc in Scenario.objects.all():
                arguments = "%s %s" % (source, sc.name)
                if force:
                    arguments += ' --force'
                if args.get(sc.name, 'off') == 'on' or sc.name in destination:
                    task = Task(name='scenario_copy',
                                submitted=now,
                                status='Waiting',
                                user=request.user,
                                arguments=arguments)
                    task.save(using=source)
        elif 'release' in args:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.all().using(DEFAULT_DB_ALIAS):
                if args.get(sc.name, 'off') == 'on' and sc.status != 'Free':
                    sc.status = 'Free'
                    sc.lastrefresh = now
                    sc.save(using=DEFAULT_DB_ALIAS)
                    if request.database == sc.name:
                        # Erasing the database that is currently selected.
                        request.prefix = ''
        elif 'update' in args:
            # Note: update is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.all().using(DEFAULT_DB_ALIAS):
                if args.get(sc.name, 'off') == 'on':
                    sc.description = args.get('description', None)
                    sc.save(using=DEFAULT_DB_ALIAS)
        else:
            raise Exception('Invalid scenario task')
    # G
    elif action in ('frepple_createbuckets', 'createbuckets'):
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='createbuckets',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        arguments = []
        start = args.get('start', None)
        if start:
            arguments.append("--start=%s" % start)
        end = args.get('end', None)
        if end:
            arguments.append("--end=%s" % end)
        weekstart = args.get('weekstart', None)
        if weekstart:
            arguments.append("--weekstart=%s" % weekstart)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)
    else:
        # Generic task wrapper

        # Find the command and verify we have permissions to run it
        command = None
        for commandname, appname in get_commands().items():
            if commandname == action:
                try:
                    c = getattr(
                        import_module('%s.management.commands.%s' %
                                      (appname, commandname)), 'Command')
                    if c.index >= 0:
                        if getattr(c, 'getHTML', None) and c.getHTML(request):
                            # Command class has getHTML method
                            command = c
                            break
                        else:
                            for p in c.__bases__:
                                # Parent command class has getHTML method
                                if getattr(p, 'getHTML',
                                           None) and p.getHTML(request):
                                    command = c
                                    break
                            if command:
                                break
                except Exception:
                    pass  # Silently ignore failures
        if not command:
            raise Exception("Invalid task name '%s'" % action)
        # Create a task
        arguments = []
        for arg, val in args.lists():
            if arg != 'csrfmiddlewaretoken':
                arguments.append('--%s=%s' % (arg, ','.join(val)))
        task = Task(name=action,
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)

    # Launch a worker process, making sure it inherits the right
    # environment variables from this parent
    os.environ['FREPPLE_CONFIGDIR'] = settings.FREPPLE_CONFIGDIR
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen([
                    sys.executable,  # Python executable
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "runworker",
                    "--database=%s" % worker_database
                ])
            else:
                # Deployment on Apache web server
                Popen([
                    "python",
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "runworker",
                    "--database=%s" % worker_database
                ],
                      creationflags=0x08000000)
        elif sys.executable.find('freppleserver.exe') >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace(
                        'freppleserver.exe',
                        'frepplectl.exe'),  # frepplectl executable
                    "runworker",
                    "--database=%s" % worker_database
                ],
                creationflags=0x08000000)  # Do not create a console window
        else:
            # Linux standard installation
            Popen(
                ["frepplectl", "runworker",
                 "--database=%s" % worker_database])
    return task
Beispiel #30
0
    def handle(self, **options):
        # Pick up the options
        if 'database' in options:
            database = options['database'] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        now = datetime.now()
        task = None
        param = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'plan simulation':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='plan simulation',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)

            # Validate options
            if 'horizon' in options:
                horizon = int(options['horizon'])
                if horizon < 0:
                    raise ValueError("Invalid horizon: %s" %
                                     options['horizon'])
                task.arguments = "--horizon=%d" % horizon
            else:
                horizon = 60
            if 'step' in options:
                step = int(options['step'])
                if step < 0:
                    raise ValueError("Invalid step: %s" % options['step'])
                task.arguments = "--step=%d" % step
            else:
                step = 1
            if 'verbosity' in options:
                verbosity = int(options['verbosity'])
            else:
                verbosity = 0

            # Log task
            task.save(using=database)

            # Load the initial status
            if options.get('initial', None):
                if verbosity > 0:
                    print("Erasing simulation database")
                management.call_command('frepple_flush',
                                        database=database,
                                        verbosity=verbosity)
                if verbosity > 0:
                    print("Loading initial data")
                management.call_command('loaddata',
                                        options.get('initial'),
                                        database=database,
                                        verbosity=verbosity)

            # Get current date
            param = Parameter.objects.all().using(database).get_or_create(
                name='currentdate')[0]
            try:
                curdate = datetime.strptime(param.value, "%Y-%m-%d %H:%M:%S")
            except:
                curdate = datetime.now()
            curdate = curdate.date()

            # Compute how many simulation steps we need
            bckt_list = []
            tmp = 0
            while tmp <= horizon:
                bckt_list.append(curdate + timedelta(days=tmp))
                tmp += step
            bckt_list_len = len(bckt_list)

            # Create the simulator class
            if options.get('simulator', None):
                cls = load_class(options['simulator'])
                simulator = cls(database=database, verbosity=verbosity)
            else:
                simulator = Simulator(database=database, verbosity=verbosity)
            simulator.buckets = 1

            # The simulation only support complete shipments for the full quantity.
            # We enforce that the generated plan respects this as well.
            Demand.objects.all().using(database).update(
                minshipment=F('quantity'))

            # Loop over all dates in the simulation horizon
            idx = 0
            strt = None
            nd = None
            for bckt in bckt_list:
                if nd:
                    strt = nd
                    nd = bckt
                else:
                    nd = bckt
                    continue

                # Start message
                task.status = "%.0f%%" % (100.0 * idx / bckt_list_len)
                task.message = 'Simulating bucket from %s to %s ' % (strt, nd)
                task.save(using=database)
                idx += 1
                simulator.buckets += 1

                if verbosity > 0:
                    print(
                        "\nStart simulating bucket from %s to %s (%s out of %s)"
                        % (strt, nd, idx, bckt_list_len))

                # Update currentdate parameter
                param.value = strt.strftime("%Y-%m-%d %H:%M:%S")
                param.save(using=database)

                # Initialization of the bucket
                if verbosity > 1:
                    print("  Starting the bucket")
                with transaction.atomic(using=database):
                    simulator.start_bucket(strt, nd)

                # Generate new demand records
                if verbosity > 1:
                    print("  Receive new orders from customers")
                with transaction.atomic(using=database):
                    simulator.generate_customer_demand(strt, nd)

                # Generate the constrained plan
                if verbosity > 1:
                    print("  Generating plan...")
                management.call_command('frepple_run', database=database)

                if options['pause']:
                    print(
                        "\nYou can analyze the plan in the bucket in the user interface now..."
                    )
                    input("\nPress Enter to continue the simulation...\n")

                # Release new purchase orders
                if verbosity > 1:
                    print("  Create new purchase orders")
                with transaction.atomic(using=database):
                    simulator.create_purchase_orders(strt, nd)

                # Release new manufacturing orders
                if verbosity > 1:
                    print("  Create new manufacturing orders")
                with transaction.atomic(using=database):
                    simulator.create_manufacturing_orders(strt, nd)

                # Release new distribution orders
                if verbosity > 1:
                    print("  Create new distribution orders")
                with transaction.atomic(using=database):
                    simulator.create_distribution_orders(strt, nd)

                # Receive open purchase orders
                if verbosity > 1:
                    print("  Receive open purchase orders")
                with transaction.atomic(using=database):
                    simulator.receive_purchase_orders(strt, nd)

                # Receive open distribution orders
                if verbosity > 1:
                    print("  Receive open distribution orders")
                with transaction.atomic(using=database):
                    simulator.receive_distribution_orders(strt, nd)

                # Finish open manufacturing orders
                if verbosity > 1:
                    print("  Finish open manufacturing orders")
                with transaction.atomic(using=database):
                    simulator.finish_manufacturing_orders(strt, nd)

                # Ship demand to customers
                if verbosity > 1:
                    print("  Ship orders to customers")
                with transaction.atomic(using=database):
                    simulator.ship_customer_demand(strt, nd)

                # Finish of the bucket
                if verbosity > 1:
                    print("  Ending the bucket")
                with transaction.atomic(using=database):
                    simulator.end_bucket(strt, nd)

            # Report statistics from the simulation.
            # The simulator class collected these results during its run.
            if verbosity > 1:
                print("Displaying final simulation metrics")
            with transaction.atomic(using=database):
                simulator.show_metrics()

            # Task update
            task.status = 'Done'
            task.message = "Simulated from %s till %s" % (bckt_list[0],
                                                          bckt_list[-1])
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            # Final task status
            if task:
                task.save(using=database)
Beispiel #31
0
    def handle(self, **options):
        # Pick up the options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        now = datetime.now()
        task = None
        param = None
        try:
            # Initialize the task
            if options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name
                        not in ("frepple_simulation", "simulation")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="simulation",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=user,
                )

            # Validate options
            task.arguments = ""
            horizon = int(options["horizon"])
            if horizon < 0:
                raise ValueError("Invalid horizon: %s" % options["horizon"])
            task.arguments += "--horizon=%d" % horizon
            step = int(options["step"])
            if step < 0:
                raise ValueError("Invalid step: %s" % options["step"])
            task.arguments += " --step=%d" % step
            verbosity = int(options["verbosity"])

            # Log task
            task.save(using=database)

            # Load the initial status
            if options.get("initial", None):
                if verbosity > 0:
                    print("Erasing simulation database")
                management.call_command("empty",
                                        database=database,
                                        verbosity=verbosity)
                if verbosity > 0:
                    print("Loading initial data")
                management.call_command(
                    "loaddata",
                    options.get("initial"),
                    database=database,
                    verbosity=verbosity,
                )

            # Get current date
            param = (Parameter.objects.all().using(database).get_or_create(
                name="currentdate")[0])
            try:
                curdate = datetime.strptime(param.value, "%Y-%m-%d %H:%M:%S")
            except:
                curdate = datetime.now()
            curdate = curdate.date()

            # Compute how many simulation steps we need
            bckt_list = []
            tmp = 0
            while tmp <= horizon:
                bckt_list.append(curdate + timedelta(days=tmp))
                tmp += step
            bckt_list_len = len(bckt_list)

            # Create the simulator class
            if options.get("simulator", None):
                cls = load_class(options["simulator"])
                simulator = cls(database=database, verbosity=verbosity)
            else:
                simulator = Simulator(database=database, verbosity=verbosity)
            simulator.buckets = 1

            # Loop over all dates in the simulation horizon
            idx = 0
            strt = None
            nd = None
            for bckt in bckt_list:
                if nd:
                    strt = nd
                    nd = bckt
                else:
                    nd = bckt
                    continue

                # Start message
                task.status = "%.0f%%" % (100.0 * idx / bckt_list_len)
                task.message = "Simulating bucket from %s to %s " % (strt, nd)
                task.save(using=database)
                idx += 1
                simulator.buckets += 1

                if verbosity > 0:
                    print(
                        "\nStart simulating bucket from %s to %s (%s out of %s)"
                        % (strt, nd, idx, bckt_list_len))

                # Update currentdate parameter
                param.value = strt.strftime("%Y-%m-%d %H:%M:%S")
                param.save(using=database)

                # Initialization of the bucket
                if verbosity > 1:
                    print("  Starting the bucket")
                with transaction.atomic(using=database):
                    simulator.start_bucket(strt, nd)

                # Generate new demand records
                if verbosity > 1:
                    print("  Receive new orders from customers")
                with transaction.atomic(using=database):
                    simulator.generate_customer_demand(strt, nd)

                # Generate the constrained plan
                if verbosity > 1:
                    print("  Generating plan...")
                management.call_command("runplan",
                                        database=database,
                                        env="supply")

                if options["pause"]:
                    print(
                        "\nYou can analyze the plan in the bucket in the user interface now..."
                    )
                    input("\nPress Enter to continue the simulation...\n")

                # Release new purchase orders
                if verbosity > 1:
                    print("  Create new purchase orders")
                with transaction.atomic(using=database):
                    simulator.create_purchase_orders(strt, nd)

                # Release new manufacturing orders
                if verbosity > 1:
                    print("  Create new manufacturing orders")
                with transaction.atomic(using=database):
                    simulator.create_manufacturing_orders(strt, nd)

                # Release new distribution orders
                if verbosity > 1:
                    print("  Create new distribution orders")
                with transaction.atomic(using=database):
                    simulator.create_distribution_orders(strt, nd)

                # Receive open purchase orders
                if verbosity > 1:
                    print("  Receive open purchase orders")
                with transaction.atomic(using=database):
                    simulator.receive_purchase_orders(strt, nd)

                # Receive open distribution orders
                if verbosity > 1:
                    print("  Receive open distribution orders")
                with transaction.atomic(using=database):
                    simulator.receive_distribution_orders(strt, nd)

                # Finish open manufacturing orders
                if verbosity > 1:
                    print("  Finish open manufacturing orders")
                with transaction.atomic(using=database):
                    simulator.finish_manufacturing_orders(strt, nd)

                # Ship demand to customers
                if verbosity > 1:
                    print("  Ship orders to customers")
                with transaction.atomic(using=database):
                    simulator.ship_customer_demand(strt, nd)

                # Finish of the bucket
                if verbosity > 1:
                    print("  Ending the bucket")
                with transaction.atomic(using=database):
                    simulator.end_bucket(strt, nd)

            # Report statistics from the simulation.
            # The simulator class collected these results during its run.
            if verbosity > 1:
                print("Displaying final simulation metrics")
            with transaction.atomic(using=database):
                simulator.show_metrics()

            # Task update
            task.status = "Done"
            task.message = "Simulated from %s till %s" % (bckt_list[0],
                                                          bckt_list[-1])
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            # Final task status
            if task:
                task.save(using=database)
Beispiel #32
0
  def handle(self, *args, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )

    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'exporttofolder-%s.log' % timestamp
    else:
      logfile = 'exporttofolder_%s-%s.log' % (self.database, timestamp)

    try:
      handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8')
      # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format']))
      logger.addHandler(handler)
      logger.propagate = False
    except Exception as e:
      print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e))

    task = None
    errors = 0
    try:
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_exporttofolder', 'exporttofolder'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='exporttofolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.processid = os.getpid()
      task.save(using=self.database)

      # Execute
      if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
        if not os.path.isdir(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export')):
          try:
            os.makedirs(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export'))
          except OSError as exception:
            if exception.errno != errno.EEXIST:
              raise

        logger.info("%s Started export to folder\n" % datetime.now())

        cursor = connections[self.database].cursor()

        task.status = '0%'
        task.save(using=self.database)

        i = 0
        cnt = len(self.statements)

        # Calling all the pre-sql statements
        for stmt in self.pre_sql_statements:
          try:
            logger.info("Executing pre-statement '%s'" % stmt)
            cursor.execute(stmt)
            logger.info("%s record(s) modified" % cursor.rowcount)
          except:
            errors += 1
            logger.error("An error occurred when executing statement '%s'" % stmt)

        for cfg in self.statements:
          # Validate filename
          filename = cfg.get('filename', None)
          if not filename:
            raise Exception("Missing filename in export configuration")
          folder = cfg.get('folder', None)
          if not folder:
            raise Exception("Missing folder in export configuration for %s" % filename)
          logger.info("%s Started export of %s" % (datetime.now(), filename))

          # Make sure export folder exists
          exportFolder = os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], folder)
          if not os.path.isdir(exportFolder):
            os.makedirs(exportFolder)

          try:
            reportclass = cfg.get('report', None)
            sql = cfg.get('sql', None)
            if reportclass:
              # Export from report class

              # Create a dummy request
              factory = RequestFactory()
              request = factory.get("/dummy/", cfg.get('data', {}))
              if self.user:
                request.user = self.user
              else:
                request.user = User.objects.all().get(username="******")
              request.database = self.database
              request.LANGUAGE_CODE = settings.LANGUAGE_CODE
              request.prefs = cfg.get('prefs', None)

              # Initialize the report
              if hasattr(reportclass, "initialize"):
                reportclass.initialize(request)
              if not reportclass._attributes_added and reportclass.model:
                reportclass._attributes_added = True
                for f in reportclass.getAttributeFields(reportclass.model):
                  reportclass.rows += (f,)
              if reportclass.hasTimeBuckets:
                reportclass.getBuckets(request)

              # Write the report file
              datafile = open(os.path.join(exportFolder, filename), "wb")
              if filename.endswith(".xlsx"):
                reportclass._generate_spreadsheet_data(request, datafile, **cfg.get('data', {}))
              elif filename.endswith(".csv"):
                for r in reportclass._generate_csv_data(request, **cfg.get('data', {})):
                  datafile.write(
                    r.encode(settings.CSV_CHARSET)
                    if isinstance(r, str) else r
                    )
              else:
                raise Exception("Unknown output format for %s" % filename)
            elif sql:
              # Exporting using SQL
              if filename.lower().endswith(".gz"):
                datafile = gzip.open(os.path.join(exportFolder, filename), "w")
              else:
                datafile = open(os.path.join(exportFolder, filename), "w")
              cursor.copy_expert(sql, datafile)
            else:
              raise Exception("Unknown export type for %s" % filename)
            datafile.close()
            i += 1

          except Exception as e:
            errors += 1
            logger.error("%s Failed to export to %s" % (datetime.now(), filename))
            if task:
              task.message = 'Failed to export %s' % filename

          task.status = str(int(i / cnt * 100)) + '%'
          task.save(using=self.database)

        logger.info("%s Exported %s file(s)\n" % (datetime.now(), cnt - errors))

        for stmt in self.post_sql_statements:
          try:
            logger.info("Executing post-statement '%s'" % stmt)
            cursor.execute(stmt)
            logger.info("%s record(s) modified" % cursor.rowcount)
          except:
            errors += 1
            logger.error("An error occured when executing statement '%s'" % stmt)

      else:
        errors += 1
        logger.error("%s Failed, folder does not exist" % datetime.now())
        task.message = "Destination folder does not exist"
        task.save(using=self.database)

    except Exception as e:
      logger.error("%s Failed to export: %s" % (datetime.now(), e))
      errors += 1
      if task:
        task.message = 'Failed to export'

    finally:
      logger.info('%s End of export to folder\n' % datetime.now())
      if task:
        if not errors:
          task.status = '100%'
          task.message = "Exported %s data files" % (cnt)
        else:
          task.status = 'Failed'
          #  task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
        task.finished = datetime.now()
        task.processid = None
        task.save(using=self.database)
Beispiel #33
0
    def handle(self, **options):

        # Pick up the options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name not in ("frepple_restore", "restore")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(name="restore",
                            submitted=now,
                            started=now,
                            status="0%",
                            user=user)
            task.arguments = options["dump"]
            task.processid = os.getpid()
            task.save(using=database)

            # Validate options
            dumpfile = os.path.abspath(
                os.path.join(settings.FREPPLE_LOGDIR, options["dump"]))
            if not os.path.isfile(dumpfile):
                raise CommandError("Dump file not found")

            # Run the restore command
            # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
            if settings.DATABASES[database]["PASSWORD"]:
                os.environ["PGPASSWORD"] = settings.DATABASES[database][
                    "PASSWORD"]
            cmd = ["pg_restore", "-n", "public", "-Fc", "-c", "--if-exists"]
            if settings.DATABASES[database]["USER"]:
                cmd.append("--username=%s" %
                           settings.DATABASES[database]["USER"])
            if settings.DATABASES[database]["HOST"]:
                cmd.append("--host=%s" % settings.DATABASES[database]["HOST"])
            if settings.DATABASES[database]["PORT"]:
                cmd.append("--port=%s " % settings.DATABASES[database]["PORT"])
            cmd.append("-d")
            cmd.append(settings.DATABASES[database]["NAME"])
            cmd.append("<%s" % dumpfile)
            # Shell needs to be True in order to interpret the < character
            with subprocess.Popen(cmd, shell=True) as p:
                try:
                    task.processid = p.pid
                    task.save(using=database)
                    p.wait()
                except:
                    p.kill()
                    p.wait()
                    raise Exception("Database restoration failed")

            # Task update
            # We need to recreate a new task record, since the previous one is lost during the restoration.
            task = Task(
                name="restore",
                submitted=task.submitted,
                started=task.started,
                arguments=task.arguments,
                status="Done",
                finished=datetime.now(),
                user=task.user,
            )

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            # Commit it all, even in case of exceptions
            if task:
                task.processid = None
                task.save(using=database)
Beispiel #34
0
def wrapTask(request, action):
    # Allow only post
    if request.method != "POST":
        raise Exception("Only post requests allowed")

    # Check user permissions
    if not request.user.has_perm("execute"):
        raise Exception("Missing execution privileges")

    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    # A
    if action == "frepple_run":
        if not request.user.has_perm("execute.generate_plan"):
            raise Exception("Missing execution privileges")
        constraint = 0
        for value in request.POST.getlist("constraint"):
            try:
                constraint += int(value)
            except:
                pass
        task = Task(name="generate plan", submitted=now, status="Waiting", user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (constraint, request.POST.get("plantype"))
        env = []
        if request.POST.get("odoo_read", None) == "1":
            env.append("odoo_read_1")
            request.session["odoo_read"] = True
        else:
            request.session["odoo_read"] = False
        if request.POST.get("odoo_write", None) == "1":
            env.append("odoo_write")
            request.session["odoo_write"] = True
        else:
            request.session["odoo_write"] = False
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ",".join(env))
        task.save(using=request.database)
        # Update the session object
        request.session["plantype"] = request.POST.get("plantype")
        request.session["constraint"] = constraint
    # B
    elif action == "frepple_createmodel":
        task = Task(name="generate model", submitted=now, status="Waiting", user=request.user)
        task.arguments = (
            "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s "
            "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s"
            % (
                request.POST["clusters"],
                request.POST["demands"],
                request.POST["fcst"],
                request.POST["levels"],
                request.POST["rsrc_number"],
                request.POST["rsrc_size"],
                request.POST["components"],
                request.POST["components_per"],
                request.POST["deliver_lt"],
                request.POST["procure_lt"],
            )
        )
        task.save(using=request.database)
    # C
    elif action == "frepple_flush":
        task = Task(name="empty database", submitted=now, status="Waiting", user=request.user)
        if not request.POST.get("all"):
            task.arguments = "--models=%s" % ",".join(request.POST.getlist("entities"))
        task.save(using=request.database)
    # D
    elif action == "loaddata":
        task = Task(
            name="load dataset", submitted=now, status="Waiting", user=request.user, arguments=request.POST["datafile"]
        )
        task.save(using=request.database)
    # E
    elif action == "frepple_copy":
        worker_database = DEFAULT_DB_ALIAS
        if "copy" in request.POST:
            if not request.user.has_perm("execute.copy_scenario"):
                raise Exception("Missing execution privileges")
            source = request.POST.get("source", DEFAULT_DB_ALIAS)
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, "off") == "on" and sc.status == "Free":
                    task = Task(
                        name="copy scenario",
                        submitted=now,
                        status="Waiting",
                        user=request.user,
                        arguments="%s %s" % (source, sc.name),
                    )
                    task.save()
        elif "release" in request.POST:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm("execute.release_scenario"):
                raise Exception("Missing execution privileges")
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, "off") == "on" and sc.status != "Free":
                    sc.status = "Free"
                    sc.lastrefresh = now
                    sc.save()
                    if request.database == sc.name:
                        # Erasing the database that is currently selected.
                        request.prefix = ""
        elif "update" in request.POST:
            # Note: update is immediate and synchronous.
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, "off") == "on":
                    sc.description = request.POST.get("description", None)
                    sc.save()
        else:
            raise Exception("Invalid scenario task")
    # F
    elif action == "frepple_backup":
        task = Task(name="backup database", submitted=now, status="Waiting", user=request.user)
        task.save(using=request.database)
    # G
    elif action == "frepple_createbuckets":
        task = Task(name="generate buckets", submitted=now, status="Waiting", user=request.user)
        task.arguments = "--start=%s --end=%s --weekstart=%s" % (
            request.POST["start"],
            request.POST["end"],
            request.POST["weekstart"],
        )
        task.save(using=request.database)
    # H
    elif action == "openbravo_import" and "freppledb.openbravo" in settings.INSTALLED_APPS:
        task = Task(name="Openbravo import", submitted=now, status="Waiting", user=request.user)
        task.arguments = "--delta=%s" % request.POST["delta"]
        task.save(using=request.database)
    # I
    elif action == "openbravo_export" and "freppledb.openbravo" in settings.INSTALLED_APPS:
        task = Task(name="Openbravo export", submitted=now, status="Waiting", user=request.user)
        if "filter_export" in request.POST:
            task.arguments = "--filter"
        task.save(using=request.database)
    elif action == "odoo_import" and "freppledb.odoo" in settings.INSTALLED_APPS:
        task = Task(name="Odoo import", submitted=now, status="Waiting", user=request.user)
        # task.arguments = "--filter"
        task.save(using=request.database)
    # J
    elif action == "odoo_export" and "freppledb.odoo" in settings.INSTALLED_APPS:
        task = Task(name="Odoo export", submitted=now, status="Waiting", user=request.user)
        if "filter_export" in request.POST:
            task.arguments = "--filter"
        task.save(using=request.database)
    # K
    elif action == "frepple_loadfromfolder":
        task = Task(name="load from folder", submitted=now, status="Waiting", user=request.user)
        task.save(using=request.database)
    else:
        # Task not recognized
        raise Exception("Invalid launching task")

    # Launch a worker process, making sure it inherits the right
    # environment variables from this parent
    os.environ["FREPPLE_CONFIGDIR"] = settings.FREPPLE_CONFIGDIR
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen(
                    [
                        sys.executable,  # Python executable
                        os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                        "frepple_runworker",
                        "--database=%s" % worker_database,
                    ]
                )
            else:
                # Deployment on Apache web server
                Popen(
                    [
                        "python",
                        os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                        "frepple_runworker",
                        "--database=%s" % worker_database,
                    ],
                    creationflags=0x08000000,
                )
        elif sys.executable.find("freppleserver.exe") >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace("freppleserver.exe", "frepplectl.exe"),  # frepplectl executable
                    "frepple_runworker",
                    "--database=%s" % worker_database,
                ],
                creationflags=0x08000000,
            )  # Do not create a console window
        else:
            # Linux standard installation
            Popen(["frepplectl", "frepple_runworker", "--database=%s" % worker_database])
    return task
Beispiel #35
0
    def handle(self, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up options
        force = options['force']
        test = 'FREPPLE_TEST' in os.environ
        if options['user']:
            try:
                user = User.objects.all().get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        # Synchronize the scenario table with the settings
        Scenario.syncWithSettings()

        # Initialize the task
        source = options['source']
        try:
            sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(
                pk=source)
        except:
            raise CommandError("No source database defined with name '%s'" %
                               source)
        now = datetime.now()
        task = None
        if 'task' in options and options['task']:
            try:
                task = Task.objects.all().using(source).get(pk=options['task'])
            except:
                raise CommandError("Task identifier not found")
            if task.started or task.finished or task.status != "Waiting" or task.name not in (
                    'frepple_copy', 'scenario_copy'):
                raise CommandError("Invalid task identifier")
            task.status = '0%'
            task.started = now
        else:
            task = Task(name='scenario_copy',
                        submitted=now,
                        started=now,
                        status='0%',
                        user=user)
        task.save(using=source)

        # Validate the arguments
        destination = options['destination']
        destinationscenario = None
        try:
            task.arguments = "%s %s" % (source, destination)
            if options['description']:
                task.arguments += '--description="%s"' % options[
                    'description'].replace('"', '\\"')
            if force:
                task.arguments += " --force"
            task.save(using=source)
            try:
                destinationscenario = Scenario.objects.using(
                    DEFAULT_DB_ALIAS).get(pk=destination)
            except:
                raise CommandError(
                    "No destination database defined with name '%s'" %
                    destination)
            if source == destination:
                raise CommandError("Can't copy a schema on itself")
            if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[
                    destination]['ENGINE']:
                raise CommandError(
                    "Source and destination scenarios have a different engine")
            if sourcescenario.status != 'In use':
                raise CommandError("Source scenario is not in use")
            if destinationscenario.status != 'Free' and not force:
                raise CommandError("Destination scenario is not free")

            # Logging message - always logging in the default database
            destinationscenario.status = 'Busy'
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Copying the data
            # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
            if settings.DATABASES[source]['PASSWORD']:
                os.environ['PGPASSWORD'] = settings.DATABASES[source][
                    'PASSWORD']
            if os.name == 'nt':
                # On windows restoring with pg_restore over a pipe is broken :-(
                cmd = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s"
            else:
                cmd = "pg_dump -Fc %s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s"
            commandline = cmd % (
                settings.DATABASES[source]['USER'] and
                ("-U %s " % settings.DATABASES[source]['USER']) or '',
                settings.DATABASES[source]['HOST'] and
                ("-h %s " % settings.DATABASES[source]['HOST']) or '',
                settings.DATABASES[source]['PORT'] and
                ("-p %s " % settings.DATABASES[source]['PORT']) or '',
                test and settings.DATABASES[source]['TEST']['NAME']
                or settings.DATABASES[source]['NAME'],
                settings.DATABASES[destination]['USER'] and
                ("-U %s " % settings.DATABASES[destination]['USER']) or '',
                settings.DATABASES[destination]['HOST'] and
                ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
                settings.DATABASES[destination]['PORT'] and
                ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
                test and settings.DATABASES[destination]['TEST']['NAME']
                or settings.DATABASES[destination]['NAME'],
            )

            ret = subprocess.call(commandline,
                                  shell=True,
                                  stdout=subprocess.DEVNULL,
                                  stderr=subprocess.STDOUT)

            if ret:
                raise Exception(
                    'Exit code of the database copy command is %d' % ret)

            # Update the scenario table
            destinationscenario.status = 'In use'
            destinationscenario.lastrefresh = datetime.today()
            if 'description' in options:
                destinationscenario.description = options['description']
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Give access to the destination scenario to:
            #  a) the user doing the copy
            #  b) all superusers from the source schema
            User.objects.using(destination).filter(is_superuser=True).update(
                is_active=True)
            User.objects.using(destination).filter(is_superuser=False).update(
                is_active=False)
            if user:
                User.objects.using(destination).filter(
                    username=user.username).update(is_active=True)

            # Logging message
            task.status = 'Done'
            task.finished = datetime.now()

            # Update the task in the destination database
            task.message = "Scenario copied from %s" % source
            task.save(using=destination)
            task.message = "Scenario copied to %s" % destination

            # Delete any waiting tasks in the new copy.
            # This is needed for situations where the same source is copied to
            # multiple destinations at the same moment.
            Task.objects.all().using(destination).filter(
                id__gt=task.id).delete()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            if destinationscenario and destinationscenario.status == 'Busy':
                destinationscenario.status = 'Free'
                destinationscenario.save(using=DEFAULT_DB_ALIAS)
            raise e

        finally:
            if task:
                task.save(using=source)
            settings.DEBUG = tmp_debug
Beispiel #36
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()

    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    timestamp = now.strftime("%Y%m%d%H%M%S")
    if database == DEFAULT_DB_ALIAS:
      logfile = 'frepple-%s.log' % timestamp
    else:
      logfile = 'frepple_%s-%s.log' % (database, timestamp)

    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('runplan', 'frepple_run'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='runplan', submitted=now, started=now, status='0%', user=user, logfile=logfile)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else:
        constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
      else:
        plantype = 1

      # Reset environment variables
      # TODO avoid having to delete the environment variables. Use options directly?
      PlanTaskRegistry.autodiscover()
      for i in PlanTaskRegistry.reg:
        if 'env' in options:
          # Options specified
          if i.label and i.label[0] in os.environ:
            del os.environ[i.label[0]]
        elif i.label:
          # No options specified - default to activate them all
          os.environ[i.label[0]] = '1'

      # Set environment variables
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      if options['background']:
        task.arguments += " --background"

      # Log task
      # Different from the other tasks the frepple engine will write the processid
      task.save(using=database)

      # Locate commands.py
      import freppledb.common.commands
      cmd = freppledb.common.commands.__file__

      def setlimits():
        import resource
        if settings.MAXMEMORYSIZE:
          resource.setrlimit(
            resource.RLIMIT_AS,
            (settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024)
            )
        if settings.MAXCPUTIME:
          resource.setrlimit(
            resource.RLIMIT_CPU,
            (settings.MAXCPUTIME, settings.MAXCPUTIME + 5)
            )
        # Limiting the file size is a bit tricky as this limit not only applies to the log
        # file, but also to temp files during the export
        # if settings.MAXTOTALLOGFILESIZE:
        #  resource.setrlimit(
        #    resource.RLIMIT_FSIZE,
        #   (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024)
        #   )

      # Prepare environment
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['FREPPLE_LOGFILE'] = logfile
      os.environ['FREPPLE_PROCESSNAME'] = settings.DATABASES[database]['NAME'].replace('demo', '')
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), 'lib')
      if os.path.isdir(libdir):
        # Folders used by the Windows version
        os.environ['PYTHONPATH'] += os.pathsep + libdir
        if os.path.isfile(os.path.join(libdir, 'library.zip')):
          os.environ['PYTHONPATH'] += os.pathsep + os.path.join(libdir, 'library.zip')

      if options['background']:
        # Execute as background process on Windows
        if os.name == 'nt':
          subprocess.Popen(['frepple', cmd], creationflags=0x08000000)
        else:
          # Execute as background process on Linux
          subprocess.Popen(['frepple', cmd], preexec_fn=setlimits)
      else:
        if os.name == 'nt':
          # Execute in foreground on Windows
          ret = subprocess.call(['frepple', cmd])
        else:
          # Execute in foreground on Linux
          ret = subprocess.call(['frepple', cmd], preexec_fn=setlimits)
        if ret != 0 and ret != 2:
          # Return code 0 is a successful run
          # Return code is 2 is a run cancelled by a user. That's shown in the status field.
          raise Exception('Failed with exit code %d' % ret)

      # Reread the task from the database and update it
      task = Task.objects.all().using(database).get(pk=task.id)
      task.processid = None
      task.status = 'Done'
      task.finished = datetime.now()
      task.save(using=database)

    except Exception as e:
      if task:
        task = Task.objects.all().using(database).get(pk=task.id)
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.processid = None
        task.save(using=database)
      raise e
Beispiel #37
0
    def handle(self, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up the options
        if 'verbosity' in options:
            verbosity = int(options['verbosity'])
        else:
            verbosity = 1
        if 'cluster' in options:
            cluster = int(options['cluster'])
        else:
            cluster = 100
        if 'demand' in options:
            demand = int(options['demand'])
        else:
            demand = 30
        if 'forecast_per_item' in options:
            forecast_per_item = int(options['forecast_per_item'])
        else:
            forecast_per_item = 50
        if 'level' in options:
            level = int(options['level'])
        else:
            level = 5
        if 'resource' in options:
            resource = int(options['resource'])
        else:
            resource = 60
        if 'resource_size' in options:
            resource_size = int(options['resource_size'])
        else:
            resource_size = 5
        if 'components' in options:
            components = int(options['components'])
        else:
            components = 200
        if 'components_per' in options:
            components_per = int(options['components_per'])
        else:
            components_per = 5
        if components == 0:
            components_per = 0
        if 'deliver_lt' in options:
            deliver_lt = int(options['deliver_lt'])
        else:
            deliver_lt = 30
        if 'procure_lt' in options:
            procure_lt = int(options['procure_lt'])
        else:
            procure_lt = 40
        if 'currentdate' in options:
            currentdate = options['currentdate'] or datetime.strftime(
                date.today(), '%Y-%m-%d')
        else:
            currentdate = datetime.strftime(date.today(), '%Y-%m-%d')
        if 'database' in options:
            database = options['database'] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        random.seed(100)  # Initialize random seed to get reproducible results

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'generate model':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='generate model',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
              "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
                cluster, demand, forecast_per_item, level, resource,
                resource_size, components, components_per, deliver_lt, procure_lt
              )
            task.save(using=database)

            # Pick up the startdate
            try:
                startdate = datetime.strptime(currentdate, '%Y-%m-%d')
            except:
                raise CommandError(
                    "current date is not matching format YYYY-MM-DD")

            # Check whether the database is empty
            if Buffer.objects.using(database).count(
            ) > 0 or Item.objects.using(database).count() > 0:
                raise CommandError(
                    "Database must be empty before creating a model")

            # Plan start date
            if verbosity > 0:
                print("Updating current date...")
            Parameter.objects.using(database).create(name="currentdate",
                                                     value=datetime.strftime(
                                                         startdate,
                                                         "%Y-%m-%d %H:%M:%S"))
            Parameter.objects.using(database).create(name="plan.loglevel",
                                                     value="3")

            # Planning horizon
            # minimum 10 daily buckets, weekly buckets till 40 days after current
            if verbosity > 0:
                print("Updating buckets...")
            management.call_command('frepple_createbuckets',
                                    user=user,
                                    database=database)
            task.status = '2%'
            task.save(using=database)

            # Weeks calendar
            if verbosity > 0:
                print("Creating weeks calendar...")
            with transaction.atomic(using=database):
                weeks = Calendar.objects.using(database).create(name="Weeks",
                                                                defaultvalue=0)
                for i in BucketDetail.objects.using(database).filter(
                        bucket="week").all():
                    CalendarBucket(startdate=i.startdate,
                                   enddate=i.enddate,
                                   value=1,
                                   calendar=weeks).save(using=database)
                task.status = '4%'
                task.save(using=database)

            # Working days calendar
            if verbosity > 0:
                print("Creating working days...")
            with transaction.atomic(using=database):
                workingdays = Calendar.objects.using(database).create(
                    name="Working Days", defaultvalue=0)
                minmax = BucketDetail.objects.using(database).filter(
                    bucket="week").aggregate(Min('startdate'),
                                             Max('startdate'))
                CalendarBucket(startdate=minmax['startdate__min'],
                               enddate=minmax['startdate__max'],
                               value=1,
                               calendar=workingdays,
                               priority=1,
                               saturday=False,
                               sunday=False).save(using=database)
                task.status = '6%'
                task.save(using=database)

            # Parent location
            loc = Location.objects.using(database).create(
                name="Factory", available=workingdays)

            # Create a random list of categories to choose from
            categories = [
                'cat A', 'cat B', 'cat C', 'cat D', 'cat E', 'cat F', 'cat G'
            ]

            # Create customers
            if verbosity > 0:
                print("Creating customers...")
            with transaction.atomic(using=database):
                cust = []
                for i in range(100):
                    c = Customer.objects.using(database).create(
                        name='Cust %03d' % i)
                    cust.append(c)
                task.status = '8%'
                task.save(using=database)

            # Create resources and their calendars
            if verbosity > 0:
                print("Creating resources and calendars...")
            with transaction.atomic(using=database):
                res = []
                for i in range(resource):
                    cal = Calendar.objects.using(database).create(
                        name='capacity for res %03d' % i,
                        category='capacity',
                        defaultvalue=0)
                    CalendarBucket.objects.using(database).create(
                        startdate=startdate, value=resource_size, calendar=cal)
                    r = Resource.objects.using(database).create(
                        name='Res %03d' % i,
                        maximum_calendar=cal,
                        location=loc)
                    res.append(r)
                task.status = '10%'
                task.save(using=database)
                random.shuffle(res)

            # Create the components
            if verbosity > 0:
                print("Creating raw materials...")
            with transaction.atomic(using=database):
                comps = []
                compsupplier = Supplier.objects.using(database).create(
                    name='component supplier')
                for i in range(components):
                    it = Item.objects.using(database).create(
                        name='Component %04d' % i,
                        category='Procured',
                        price=str(round(random.uniform(0, 100))))
                    ld = abs(
                        round(random.normalvariate(procure_lt,
                                                   procure_lt / 3)))
                    Buffer.objects.using(database).create(
                        name='%s @ %s' % (it.name, loc.name),
                        location=loc,
                        category='Procured',
                        item=it,
                        minimum=20,
                        onhand=str(
                            round(forecast_per_item * random.uniform(1, 3) *
                                  ld / 30)),
                    )
                    ItemSupplier.objects.using(database).create(
                        item=it,
                        location=loc,
                        supplier=compsupplier,
                        leadtime=timedelta(days=ld),
                        sizeminimum=80,
                        sizemultiple=10,
                        priority=1,
                        cost=it.price)
                    comps.append(it)
                task.status = '12%'
                task.save(using=database)

            # Loop over all clusters
            durations = [timedelta(days=i) for i in range(1, 6)]
            progress = 88.0 / cluster
            for i in range(cluster):
                with transaction.atomic(using=database):
                    if verbosity > 0:
                        print("Creating supply chain for end item %d..." % i)

                    # Item
                    it = Item.objects.using(database).create(
                        name='Itm %05d' % i,
                        category=random.choice(categories),
                        price=str(round(random.uniform(100, 200))))

                    # Level 0 buffer
                    buf = Buffer.objects.using(database).create(
                        name='%s @ %s' % (it.name, loc.name),
                        item=it,
                        location=loc,
                        category='00')

                    # Demand
                    for j in range(demand):
                        Demand.objects.using(database).create(
                            name='Dmd %05d %05d' % (i, j),
                            item=it,
                            location=loc,
                            quantity=int(random.uniform(1, 6)),
                            # Exponential distribution of due dates, with an average of deliver_lt days.
                            due=startdate + timedelta(days=round(
                                random.expovariate(float(1) / deliver_lt /
                                                   24)) / 24),
                            # Orders have higher priority than forecast
                            priority=random.choice([1, 2]),
                            customer=random.choice(cust),
                            category=random.choice(categories))

                    # Create upstream operations and buffers
                    ops = []
                    previtem = it
                    for k in range(level):
                        if k == 1 and res:
                            # Create a resource load for operations on level 1
                            oper = Operation.objects.using(database).create(
                                name='Oper %05d L%02d' % (i, k),
                                type='time_per',
                                location=loc,
                                duration_per=timedelta(days=1),
                                sizemultiple=1,
                                item=previtem)
                            if resource < cluster and i < resource:
                                # When there are more cluster than resources, we try to assure
                                # that each resource is loaded by at least 1 operation.
                                OperationResource.objects.using(
                                    database).create(resource=res[i],
                                                     operation=oper)
                            else:
                                OperationResource.objects.using(
                                    database).create(
                                        resource=random.choice(res),
                                        operation=oper)
                        else:
                            oper = Operation.objects.using(database).create(
                                name='Oper %05d L%02d' % (i, k),
                                duration=random.choice(durations),
                                sizemultiple=1,
                                location=loc,
                                item=previtem)
                        ops.append(oper)
                        # Some inventory in random buffers
                        if random.uniform(0, 1) > 0.8:
                            buf.onhand = int(random.uniform(5, 20))
                        buf.save(using=database)
                        OperationMaterial.objects.using(database).create(
                            operation=oper,
                            item=previtem,
                            quantity=1,
                            type="end")
                        if k != level - 1:
                            # Consume from the next level in the bill of material
                            it_tmp = Item.objects.using(database).create(
                                name='Itm %05d L%02d' % (i, k + 1),
                                category=random.choice(categories),
                                price=str(round(random.uniform(100, 200))))
                            buf = Buffer.objects.using(database).create(
                                name='%s @ %s' % (it_tmp.name, loc.name),
                                item=it_tmp,
                                location=loc,
                                category='%02d' % (k + 1))
                            OperationMaterial.objects.using(database).create(
                                operation=oper, item=it_tmp, quantity=-1)
                        previtem = it_tmp

                    # Consume raw materials / components
                    c = []
                    for j in range(components_per):
                        o = random.choice(ops)
                        b = random.choice(comps)
                        while (o, b) in c:
                            # A flow with the same operation and buffer already exists
                            o = random.choice(ops)
                            b = random.choice(comps)
                        c.append((o, b))
                        OperationMaterial.objects.using(database).create(
                            operation=o,
                            item=b,
                            quantity=random.choice([-1, -1, -1, -2, -3]))

                    # Commit the current cluster
                    task.status = '%d%%' % (12 + progress * (i + 1))
                    task.save(using=database)

            # Task update
            task.status = 'Done'
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
                task.save(using=database)
            raise e

        finally:
            if task:
                task.save(using=database)
            settings.DEBUG = tmp_debug
Beispiel #38
0
  def handle(self, *args, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    if 'force' in options:
      force = options['force']
    else:
      force = False
    test = 'FREPPLE_TEST' in os.environ
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    # Initialize the task
    now = datetime.now()
    task = None
    if 'task' in options and options['task']:
      try:
        task = Task.objects.all().get(pk=options['task'])
      except:
        raise CommandError("Task identifier not found")
      if task.started or task.finished or task.status != "Waiting" or task.name != 'copy scenario':
        raise CommandError("Invalid task identifier")
      task.status = '0%'
      task.started = now
    else:
      task = Task(name='copy scenario', submitted=now, started=now, status='0%', user=user)
    task.save()

    # Synchronize the scenario table with the settings
    Scenario.syncWithSettings()

    # Validate the arguments
    destinationscenario = None
    try:
      if len(args) != 2:
        raise CommandError("Command takes exactly 2 arguments.")
      task.arguments = "%s %s" % (args[0], args[1])
      task.save()
      source = args[0]
      try:
        sourcescenario = Scenario.objects.get(pk=source)
      except:
        raise CommandError("No source database defined with name '%s'" % source)
      destination = args[1]
      try:
        destinationscenario = Scenario.objects.get(pk=destination)
      except:
        raise CommandError("No destination database defined with name '%s'" % destination)
      if source == destination:
        raise CommandError("Can't copy a schema on itself")
      if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
        raise CommandError("Source and destination scenarios have a different engine")
      if sourcescenario.status != 'In use':
        raise CommandError("Source scenario is not in use")
      if destinationscenario.status != 'Free' and not force:
        raise CommandError("Destination scenario is not free")

      # Logging message - always logging in the default database
      destinationscenario.status = 'Busy'
      destinationscenario.save()

      # Copying the data
      if settings.DATABASES[source]['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
        # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
        os.environ['PGPASSWORD'] = settings.DATABASES[source]['PASSWORD']
        ret = os.system("pg_dump -c -U%s -Fp %s%s%s | psql -U%s %s%s%s" % (
          settings.DATABASES[source]['USER'],
          settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
          settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
          test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'],
          settings.DATABASES[destination]['USER'],
          settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
          settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
          test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'],
          ))
        if ret:
          raise Exception('Exit code of the database copy command is %d' % ret)
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.sqlite3':
        # A plain copy of the database file
        if test:
          shutil.copy2(settings.DATABASES[source]['TEST']['NAME'], settings.DATABASES[destination]['TEST']['NAME'])
        else:
          shutil.copy2(settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['NAME'])
      else:
        raise Exception('Copy command not supported for database engine %s' % settings.DATABASES[source]['ENGINE'])

      # Update the scenario table
      destinationscenario.status = 'In use'
      destinationscenario.lastrefresh = datetime.today()
      if 'description' in options:
        destinationscenario.description = options['description']
      else:
        destinationscenario.description = "Copied from scenario '%s'" % source
      destinationscenario.save()

      # Logging message
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      if destinationscenario and destinationscenario.status == 'Busy':
        destinationscenario.status = 'Free'
        destinationscenario.save()
      raise e

    finally:
      if task:
        task.save()
      settings.DEBUG = tmp_debug
Beispiel #39
0
    def handle(self, **options):
        # Pick up the options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name not in ("frepple_loadxml", "loadxml")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(name="loadxml",
                            submitted=now,
                            started=now,
                            status="0%",
                            user=user)
            task.arguments = " ".join(options["file"])
            task.processid = os.getpid()
            task.save(using=database)

            # Execute
            # TODO: if frePPLe is available as a module, we don't really need to spawn another process.
            os.environ["FREPPLE_HOME"] = settings.FREPPLE_HOME.replace(
                "\\", "\\\\")
            os.environ["FREPPLE_APP"] = settings.FREPPLE_APP
            os.environ["FREPPLE_DATABASE"] = database
            os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep +
                                  os.environ["PATH"] + os.pathsep +
                                  settings.FREPPLE_APP)
            os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME
            if "DJANGO_SETTINGS_MODULE" not in os.environ:
                os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings"
            if os.path.exists(
                    os.path.join(os.environ["FREPPLE_HOME"], "python36.zip")):
                # For the py2exe executable
                os.environ["PYTHONPATH"] = (os.path.join(
                    os.environ["FREPPLE_HOME"],
                    "python%d%d.zip" %
                    (sys.version_info[0], sys.version_info[1]),
                ) + os.pathsep + os.path.normpath(os.environ["FREPPLE_APP"]))
            else:
                # Other executables
                os.environ["PYTHONPATH"] = os.path.normpath(
                    os.environ["FREPPLE_APP"])
            cmdline = ['"%s"' % i for i in options["file"]]
            cmdline.insert(0, "frepple")
            cmdline.append('"%s"' % os.path.join(
                settings.FREPPLE_APP, "freppledb", "execute", "loadxml.py"))
            proc = subprocess.run(" ".join(cmdline))
            if proc.returncode:
                raise Exception("Exit code of the batch run is %d" %
                                proc.returncode)

            # Task update
            task.status = "Done"
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=database)
Beispiel #40
0
  def handle(self, *args, **options):

    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'restore database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='restore database', submitted=now, started=now, status='0%', user=user)
      task.arguments = args and args[0] or None
      task.save(using=database)
      transaction.commit(using=database)

      # Validate options
      if not args:
        raise CommandError("No dump file specified")
      if not os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, args[0])):
        raise CommandError("Dump file not found")

      # Run the restore command
      if settings.DATABASES[database]['ENGINE'] == 'django.db.backends.sqlite3':
        # SQLITE
        shutil.copy2(os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])), settings.DATABASES[database]['NAME'])
      elif settings.DATABASES[database]['ENGINE'] == 'django.db.backends.mysql':
        # MYSQL
        cmd = [
          'mysql',
          '--password=%s' % settings.DATABASES[database]['PASSWORD'],
          '--user=%s' % settings.DATABASES[database]['USER']
          ]
        if settings.DATABASES[database]['HOST']:
          cmd.append("--host=%s " % settings.DATABASES[database]['HOST'])
        if settings.DATABASES[database]['PORT']:
          cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
        cmd.append(settings.DATABASES[database]['NAME'])
        cmd.append('<%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])))
        ret = subprocess.call(cmd, shell=True)  # Shell needs to be True in order to interpret the < character
        if ret:
          raise Exception("Run of mysql failed")
      elif settings.DATABASES[database]['ENGINE'] == 'django.db.backends.oracle':
        # ORACLE
        if settings.DATABASES[database]['HOST'] and settings.DATABASES[database]['PORT']:
          # The setting 'NAME' contains the SID name
          dsn = "%s/%s@//%s:%s/%s" % (
            settings.DATABASES[database]['USER'],
            settings.DATABASES[database]['PASSWORD'],
            settings.DATABASES[database]['HOST'],
            settings.DATABASES[database]['PORT'],
            settings.DATABASES[database]['NAME']
            )
        else:
          # The setting 'NAME' contains the TNS name
          dsn = "%s/%s@%s" % (
            settings.DATABASES[database]['USER'],
            settings.DATABASES[database]['PASSWORD'],
            settings.DATABASES[database]['NAME']
            )
        cmd = [
          "impdp",
          dsn,
          "table_exists_action=replace",
          "nologfile=Y",
          "directory=frepple_logdir",
          "dumpfile=%s" % args[0]
          ]
        ret = subprocess.call(cmd)
        if ret:
          raise Exception("Run of impdp failed")
      elif settings.DATABASES[database]['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
        # POSTGRESQL
        # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
        os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
        cmd = [ "psql", '--username=%s' % settings.DATABASES[database]['USER'] ]
        if settings.DATABASES[database]['HOST']:
          cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
        if settings.DATABASES[database]['PORT']:
          cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
        cmd.append(settings.DATABASES[database]['NAME'])
        cmd.append('<%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])))
        ret = subprocess.call(cmd, shell=True)  # Shell needs to be True in order to interpret the < character
        if ret:
          raise Exception("Run of run psql failed")
      else:
        raise Exception('Database backup command not supported for engine %s' % settings.DATABASES[database]['ENGINE'])

      # Task update
      # We need to recreate a new task record, since the previous one is lost during the restoration.
      task = Task(
        name='restore database', submitted=task.submitted, started=task.started,
        arguments=task.arguments, status='Done', finished=datetime.now(),
        user=task.user
        )

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Commit it all, even in case of exceptions
      if task:
        task.save(using=database)
      try:
        transaction.commit(using=database)
      except:
        pass
      transaction.leave_transaction_management(using=database)
Beispiel #41
0
    def handle(self, **options):
        # Pick up the options
        database = options['database']
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name not in (
                        'frepple_loadxml', 'loadxml'):
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='loadxml',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.arguments = ' '.join(options['file'])
            task.processid = os.getpid()
            task.save(using=database)

            # Execute
            # TODO: if frePPLe is available as a module, we don't really need to spawn another process.
            os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace(
                '\\', '\\\\')
            os.environ['FREPPLE_APP'] = settings.FREPPLE_APP
            os.environ['FREPPLE_DATABASE'] = database
            os.environ[
                'PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ[
                    'PATH'] + os.pathsep + settings.FREPPLE_APP
            os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
            if 'DJANGO_SETTINGS_MODULE' not in os.environ:
                os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
            if os.path.exists(
                    os.path.join(os.environ['FREPPLE_HOME'], 'python36.zip')):
                # For the py2exe executable
                os.environ['PYTHONPATH'] = os.path.join(
                    os.environ['FREPPLE_HOME'], 'python%d%d.zip' %
                    (sys.version_info[0], sys.version_info[1])
                ) + os.pathsep + os.path.normpath(os.environ['FREPPLE_APP'])
            else:
                # Other executables
                os.environ['PYTHONPATH'] = os.path.normpath(
                    os.environ['FREPPLE_APP'])
            cmdline = ['"%s"' % i for i in options['file']]
            cmdline.insert(0, 'frepple')
            cmdline.append('"%s"' % os.path.join(
                settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py'))
            proc = subprocess.run(' '.join(cmdline))
            if proc.returncode:
                raise Exception('Exit code of the batch run is %d' %
                                proc.returncode)

            # Task update
            task.status = 'Done'
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=database)
Beispiel #42
0
    def handle(self, **options):
        # Pick up the options
        now = datetime.now()
        self.database = options["database"]
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        if options["user"]:
            try:
                self.user = (User.objects.all().using(
                    self.database).get(username=options["user"]))
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            self.user = None
        timestamp = now.strftime("%Y%m%d%H%M%S")
        if self.database == DEFAULT_DB_ALIAS:
            logfile = "importworkbook-%s.log" % timestamp
        else:
            logfile = "importworkbook_%s-%s.log" % (self.database, timestamp)

        task = None
        try:
            setattr(_thread_locals, "database", self.database)
            # Initialize the task
            if options["task"]:
                try:
                    task = (Task.objects.all().using(
                        self.database).get(pk=options["task"]))
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name
                        not in ("frepple_importworkbook", "importworkbook")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="importworkbook",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=self.user,
                )
            task.arguments = " ".join(options["file"])
            task.save(using=self.database)

            all_models = [(ct.model_class(), ct.pk)
                          for ct in ContentType.objects.all()
                          if ct.model_class()]
            try:
                with transaction.atomic(using=self.database):
                    # Find all models in the workbook
                    if "filename" not in locals():
                        filename = options["file"]
                    for file in filename:
                        wb = load_workbook(filename=file,
                                           read_only=True,
                                           data_only=True)
                        models = []
                        for ws_name in wb.sheetnames:
                            # Find the model
                            model = None
                            contenttype_id = None
                            for m, ct in all_models:
                                if matchesModelName(ws_name, m):
                                    model = m
                                    contenttype_id = ct
                                    break
                            if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                                print(
                                    force_text(
                                        _("Ignoring data in worksheet: %s") %
                                        ws_name))
                                # yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>'
                            elif not self.user.has_perm("%s.%s" % (
                                    model._meta.app_label,
                                    get_permission_codename(
                                        "add", model._meta),
                            )):
                                # Check permissions
                                print(
                                    force_text(
                                        _("You don't permissions to add: %s") %
                                        ws_name))
                                # yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>'
                            else:
                                deps = set([model])
                                GridReport.dependent_models(model, deps)
                                models.append(
                                    (ws_name, model, contenttype_id, deps))

                        # Sort the list of models, based on dependencies between models
                        models = GridReport.sort_models(models)

                        # Process all rows in each worksheet
                        for ws_name, model, contenttype_id, dependencies in models:
                            print(
                                force_text(
                                    _("Processing data in worksheet: %s") %
                                    ws_name))
                            # yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong><br>'
                            # yield ('<div class="table-responsive">'
                            # '<table class="table table-condensed" style="white-space: nowrap;"><tbody>')
                            numerrors = 0
                            numwarnings = 0
                            firsterror = True
                            ws = wb[ws_name]
                            for error in parseExcelWorksheet(
                                    model,
                                    ws,
                                    user=self.user,
                                    database=self.database,
                                    ping=True,
                            ):
                                if error[0] == logging.DEBUG:
                                    # Yield some result so we can detect disconnect clients and interrupt the upload
                                    # yield ' '
                                    continue
                                if firsterror and error[0] in (
                                        logging.ERROR,
                                        logging.WARNING,
                                ):
                                    print("%s %s %s %s %s%s%s" % (
                                        capfirst(_("worksheet")),
                                        capfirst(_("row")),
                                        capfirst(_("field")),
                                        capfirst(_("value")),
                                        capfirst(_("error")),
                                        " / ",
                                        capfirst(_("warning")),
                                    ))
                                    # yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % (
                                    #   capfirst(_("worksheet")), capfirst(_("row")),
                                    #   capfirst(_("field")), capfirst(_("value")),
                                    #   capfirst(_("error")), " / ", capfirst(_("warning"))
                                    #   )
                                    firsterror = False
                                if error[0] == logging.ERROR:
                                    print("%s %s %s %s %s: %s" % (
                                        ws_name,
                                        error[1] if error[1] else "",
                                        error[2] if error[2] else "",
                                        error[3] if error[3] else "",
                                        capfirst(_("error")),
                                        error[4],
                                    ))
                                    # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                                    #   ws_name,
                                    #   error[1] if error[1] else '',
                                    #   error[2] if error[2] else '',
                                    #   error[3] if error[3] else '',
                                    #   capfirst(_('error')),
                                    #   error[4]
                                    #   )
                                    numerrors += 1
                                elif error[1] == logging.WARNING:
                                    print("%s %s %s %s %s: %s" % (
                                        ws_name,
                                        error[1] if error[1] else "",
                                        error[2] if error[2] else "",
                                        error[3] if error[3] else "",
                                        capfirst(_("warning")),
                                        error[4],
                                    ))
                                    # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                                    #   ws_name,
                                    #   error[1] if error[1] else '',
                                    #   error[2] if error[2] else '',
                                    #   error[3] if error[3] else '',
                                    #   capfirst(_('warning')),
                                    #   error[4]
                                    #   )
                                    numwarnings += 1
                                else:
                                    print("%s %s %s %s %s %s" % (
                                        "danger"
                                        if numerrors > 0 else "success",
                                        ws_name,
                                        error[1] if error[1] else "",
                                        error[2] if error[2] else "",
                                        error[3] if error[3] else "",
                                        error[4],
                                    ))
                            #     yield '<tr class=%s><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % (
                            #       "danger" if numerrors > 0 else 'success',
                            #       ws_name,
                            #       error[1] if error[1] else '',
                            #       error[2] if error[2] else '',
                            #       error[3] if error[3] else '',
                            #       error[4]
                            #       )
                            # yield '</tbody></table></div>'
                        print("%s" % _("Done"))
                        # yield '<div><strong>%s</strong></div>' % _("Done")
            except GeneratorExit:
                logger.warning("Connection Aborted")
        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            setattr(_thread_locals, "database", None)
            if task:
                task.save(using=self.database)

        # Task update
        task.status = "Done"
        task.finished = datetime.now()
        task.processid = None
        task.save(using=self.database, update_fields=["status", "finished"])

        return _("Done")
Beispiel #43
0
    def handle(self, **options):
        # Pick up options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" % database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None
        if options["models"]:
            models = options["models"].split(",")
        else:
            models = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            setattr(_thread_locals, "database", database)
            if options["task"]:
                try:
                    task = Task.objects.all().using(database).get(pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (
                    task.started
                    or task.finished
                    or task.status != "Waiting"
                    or task.name not in ("frepple_flush", "empty")
                ):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="empty", submitted=now, started=now, status="0%", user=user
                )
                task.arguments = "%s%s" % (
                    "--user=%s " % options["user"] if options["user"] else "",
                    "--models=%s " % options["models"] if options["models"] else "",
                )
            task.processid = os.getpid()
            task.save(using=database)

            # Create a database connection
            cursor = connections[database].cursor()

            # Get a list of all django tables in the database
            tables = set(
                connections[database].introspection.django_table_names(
                    only_existing=True
                )
            )
            ContentTypekeys = set()
            # Validate the user list of tables
            if models:
                hasDemand = True if "input.demand" in models else False
                hasOperation = True if "input.operation" in models else False
                hasPO = True if "input.purchaseorder" in models else False
                hasDO = True if "input.distributionorder" in models else False
                hasMO = True if "input.manufacturingorder" in models else False
                hasDeO = True if "input.deliveryorder" in models else False

                if not hasOperation:
                    if hasDemand:
                        models.remove("input.demand")
                        cursor.execute(
                            "update operationplan set demand_id = null where demand_id is not null"
                        )
                        cursor.execute("delete from demand")
                        key = ContentType.objects.get_for_model(
                            inputmodels.Demand, for_concrete_model=False
                        ).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key,),
                        )

                    if not (hasPO and hasDO and hasMO and hasDeO):
                        if "input.operationplanmaterial" in models:
                            models.remove("input.operationplanmaterial")
                        if "input.operationplanresource" in models:
                            models.remove("input.operationplanresource")

                    if hasPO and not (hasDO and hasMO and hasDeO):
                        models.remove("input.purchaseorder")
                        cursor.execute("delete from operationplan where type = 'PO'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.PurchaseOrder, for_concrete_model=False
                        ).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key,),
                        )

                    if hasDO and not (hasPO and hasMO and hasDeO):
                        models.remove("input.distributionorder")
                        cursor.execute("delete from operationplan where type = 'DO'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.DistributionOrder, for_concrete_model=False
                        ).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key,),
                        )

                    if hasMO and not (hasPO and hasDO and hasDeO):
                        models.remove("input.manufacturingorder")
                        cursor.execute("delete from operationplan where type = 'MO'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.ManufacturingOrder, for_concrete_model=False
                        ).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key,),
                        )

                    if hasDeO and not (hasPO and hasDO and hasMO):
                        models.remove("input.deliveryorder")
                        cursor.execute("delete from operationplan where type = 'DLVR'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.DeliveryOrder, for_concrete_model=False
                        ).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key,),
                        )

                    if (hasPO or hasDO or hasMO or hasDeO) and not (
                        hasPO and hasDO and hasMO and hasDeO
                    ):
                        # Keep the database in shape
                        cursor.execute("vacuum analyze")

                models2tables = set()
                admin_log_positive = True
                for m in models:
                    try:
                        x = m.split(".", 1)
                        x = apps.get_model(x[0], x[1])
                        if x in EXCLUDE_FROM_BULK_OPERATIONS:
                            continue

                        ContentTypekeys.add(ContentType.objects.get_for_model(x).pk)

                        x = x._meta.db_table
                        if x not in tables:
                            raise
                        models2tables.add(x)
                    except Exception as e:
                        raise CommandError("Invalid model to erase: %s" % m)
                tables = models2tables
            else:
                admin_log_positive = False
                tables.discard("django_admin_log")
                for i in EXCLUDE_FROM_BULK_OPERATIONS:
                    tables.discard(i._meta.db_table)
                    ContentTypekeys.add(ContentType.objects.get_for_model(i).pk)
            # Some tables need to be handled a bit special
            if "operationplan" in tables:
                tables.add("operationplanmaterial")
                tables.add("operationplanresource")
                tables.add("out_problem")
            if "resource" in tables and "out_resourceplan" not in tables:
                tables.add("out_resourceplan")
            if "demand" in tables and "out_constraint" not in tables:
                tables.add("out_constraint")
            if (
                "reportmanager_report" in tables
                and "reportmanager_column" not in tables
            ):
                tables.add("reportmanager_column")
            tables.discard("auth_group_permissions")
            tables.discard("auth_permission")
            tables.discard("auth_group")
            tables.discard("django_session")
            tables.discard("common_user")
            tables.discard("common_user_groups")
            tables.discard("common_user_user_permissions")
            tables.discard("common_preference")
            tables.discard("django_content_type")
            tables.discard("execute_log")
            tables.discard("execute_schedule")
            tables.discard("common_scenario")

            # Delete all records from the tables.
            with transaction.atomic(using=database, savepoint=False):
                if ContentTypekeys:
                    if admin_log_positive:
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = any(%s)",
                            (list(ContentTypekeys),),
                        )
                    else:
                        cursor.execute(
                            "delete from django_admin_log where content_type_id != any(%s)",
                            (list(ContentTypekeys),),
                        )
                if "common_bucket" in tables:
                    cursor.execute("update common_user set horizonbuckets = null")
                for stmt in connections[database].ops.sql_flush(no_style(), tables, []):
                    cursor.execute(stmt)

            # Task update
            task.status = "Done"
            task.finished = datetime.now()
            task.processid = None
            task.save(using=database)

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
                task.processid = None
                task.save(using=database)
            raise CommandError("%s" % e)

        finally:
            setattr(_thread_locals, "database", None)
Beispiel #44
0
  def handle(self, *args, **options):

    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'restore database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='restore database', submitted=now, started=now, status='0%', user=user)
      task.arguments = args and args[0] or None
      task.save(using=database)

      # Validate options
      if not args:
        raise CommandError("No dump file specified")
      if not os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, args[0])):
        raise CommandError("Dump file not found")

      # Run the restore command
      # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
      if settings.DATABASES[database]['PASSWORD']:
        os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
      cmd = [ "psql", ]
      if settings.DATABASES[database]['USER']:
        cmd.append("--username=%s" % settings.DATABASES[database]['USER'])
      if settings.DATABASES[database]['HOST']:
        cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
      if settings.DATABASES[database]['PORT']:
        cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
      cmd.append(settings.DATABASES[database]['NAME'])
      cmd.append('<%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])))
      ret = subprocess.call(cmd, shell=True)  # Shell needs to be True in order to interpret the < character
      if ret:
        raise Exception("Run of run psql failed")

      # Task update
      # We need to recreate a new task record, since the previous one is lost during the restoration.
      task = Task(
        name='restore database', submitted=task.submitted, started=task.started,
        arguments=task.arguments, status='Done', finished=datetime.now(),
        user=task.user
        )

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Commit it all, even in case of exceptions
      if task:
        task.save(using=database)
Beispiel #45
0
    def handle(self, **options):
        # Pick up the options
        now = datetime.now()

        if "database" in options:
            database = options["database"] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if "user" in options and options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        timestamp = now.strftime("%Y%m%d%H%M%S")
        if database == DEFAULT_DB_ALIAS:
            logfile = "frepple-%s.log" % timestamp
        else:
            logfile = "frepple_%s-%s.log" % (database, timestamp)

        task = None
        try:
            # Initialize the task
            setattr(_thread_locals, "database", database)
            if "task" in options and options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name
                        not in ("runplan", "odoo_import", "odoo_export")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
                task.logfile = logfile
            else:
                task = Task(
                    name="runplan",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=user,
                    logfile=logfile,
                )

            # Validate options
            if "constraint" in options:
                constraint = int(options["constraint"])
                if constraint < 0 or constraint > 15:
                    raise ValueError("Invalid constraint: %s" %
                                     options["constraint"])
            else:
                constraint = 15
            if "plantype" in options:
                plantype = int(options["plantype"])
            else:
                plantype = 1

            # Reset environment variables
            # TODO avoid having to delete the environment variables. Use options directly?
            for label in freppledb.common.commands.PlanTaskRegistry.getLabels(
            ):
                if "env" in options:
                    # Options specified
                    if label[0] in os.environ:
                        del os.environ[label[0]]
                else:
                    # No options specified - default to activate them all
                    os.environ[label[0]] = "1"

            # Set environment variables
            if options["env"]:
                task.arguments = "--constraint=%d --plantype=%d --env=%s" % (
                    constraint,
                    plantype,
                    options["env"],
                )
                for i in options["env"].split(","):
                    j = i.split("=")
                    if len(j) == 1:
                        os.environ[j[0]] = "1"
                    else:
                        os.environ[j[0]] = j[1]
            else:
                task.arguments = "--constraint=%d --plantype=%d" % (
                    constraint,
                    plantype,
                )
            if options["background"]:
                task.arguments += " --background"

            # Log task
            # Different from the other tasks the frepple engine will write the processid
            task.save(using=database)

            # Locate commands.py
            cmd = freppledb.common.commands.__file__

            def setlimits():
                import resource

                if settings.MAXMEMORYSIZE:
                    resource.setrlimit(
                        resource.RLIMIT_AS,
                        (
                            settings.MAXMEMORYSIZE * 1024 * 1024,
                            (settings.MAXMEMORYSIZE + 10) * 1024 * 1024,
                        ),
                    )
                if settings.MAXCPUTIME:
                    resource.setrlimit(
                        resource.RLIMIT_CPU,
                        (settings.MAXCPUTIME, settings.MAXCPUTIME + 5),
                    )
                # Limiting the file size is a bit tricky as this limit not only applies to the log
                # file, but also to temp files during the export
                # if settings.MAXTOTALLOGFILESIZE:
                #  resource.setrlimit(
                #    resource.RLIMIT_FSIZE,
                #   (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024)
                #   )

            # Make sure the forecast engine uses the same correct timezone
            os.environ["PGTZ"] = settings.TIME_ZONE

            # Prepare environment
            os.environ["FREPPLE_PLANTYPE"] = str(plantype)
            os.environ["FREPPLE_CONSTRAINT"] = str(constraint)
            os.environ["FREPPLE_TASKID"] = str(task.id)
            os.environ["FREPPLE_DATABASE"] = database
            os.environ["FREPPLE_LOGFILE"] = logfile
            os.environ["FREPPLE_PROCESSNAME"] = settings.DATABASES[database][
                "NAME"].replace("demo", "")
            os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep +
                                  os.environ["PATH"] + os.pathsep +
                                  settings.FREPPLE_APP)
            if os.path.isfile(
                    os.path.join(settings.FREPPLE_HOME, "libfrepple.so")):
                os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME
            if "DJANGO_SETTINGS_MODULE" not in os.environ:
                os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings"
            os.environ["PYTHONPATH"] = os.path.normpath(settings.FREPPLE_APP)
            libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME),
                                  "lib")
            if os.path.isdir(libdir):
                # Folders used by the Windows version
                os.environ["PYTHONPATH"] += os.pathsep + libdir
                if os.path.isfile(os.path.join(libdir, "library.zip")):
                    os.environ["PYTHONPATH"] += os.pathsep + os.path.join(
                        libdir, "library.zip")

            if options["background"]:
                # Execute as background process on Windows
                if os.name == "nt":
                    startupinfo = subprocess.STARTUPINFO()
                    startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
                    subprocess.Popen(
                        ["frepple", cmd],
                        creationflags=0x08000000,
                        startupinfo=startupinfo,
                    )
                else:
                    # Execute as background process on Linux
                    subprocess.Popen(["frepple", cmd], preexec_fn=setlimits)
            else:
                if os.name == "nt":
                    # Execute in foreground on Windows
                    startupinfo = subprocess.STARTUPINFO()
                    startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
                    ret = subprocess.call(["frepple", cmd],
                                          startupinfo=startupinfo)
                else:
                    # Execute in foreground on Linux
                    ret = subprocess.call(["frepple", cmd],
                                          preexec_fn=setlimits)
                if ret != 0 and ret != 2:
                    # Return code 0 is a successful run
                    # Return code is 2 is a run cancelled by a user. That's shown in the status field.
                    raise Exception("Failed with exit code %d" % ret)

            if options["background"]:
                # Wait for the background task to be ready
                while True:
                    sleep(5)
                    t = Task.objects.using(database).get(pk=task.id)
                    if t.status in ["100%", "Canceled", "Failed", "Done"]:
                        break
                    if not self.process_exists(t.processid):
                        t.status = "Failed"
                        t.processid = None
                        t.save(update_fields=["processid", "status"],
                               using=database)
                        break
            else:
                # Reread the task from the database and update it
                task = Task.objects.all().using(database).get(pk=task.id)
                task.processid = None
                task.status = "Done"
                task.finished = datetime.now()
                task.save(using=database)

        except Exception as e:
            if task:
                task = Task.objects.all().using(database).get(pk=task.id)
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
                task.processid = None
                task.save(using=database)
            raise e

        finally:
            setattr(_thread_locals, "database", None)
Beispiel #46
0
    def handle(self, *args, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up options
        if 'force' in options:
            force = options['force']
        else:
            force = False
        test = 'FREPPLE_TEST' in os.environ
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        # Initialize the task
        now = datetime.now()
        task = None
        if 'task' in options and options['task']:
            try:
                task = Task.objects.all().get(pk=options['task'])
            except:
                raise CommandError("Task identifier not found")
            if task.started or task.finished or task.status != "Waiting" or task.name != 'copy scenario':
                raise CommandError("Invalid task identifier")
            task.status = '0%'
            task.started = now
        else:
            task = Task(name='copy scenario',
                        submitted=now,
                        started=now,
                        status='0%',
                        user=user)
        task.save()

        # Synchronize the scenario table with the settings
        Scenario.syncWithSettings()

        # Validate the arguments
        destinationscenario = None
        try:
            if len(args) != 2:
                raise CommandError("Command takes exactly 2 arguments.")
            task.arguments = "%s %s" % (args[0], args[1])
            task.save()
            source = args[0]
            try:
                sourcescenario = Scenario.objects.get(pk=source)
            except:
                raise CommandError(
                    "No source database defined with name '%s'" % source)
            destination = args[1]
            try:
                destinationscenario = Scenario.objects.get(pk=destination)
            except:
                raise CommandError(
                    "No destination database defined with name '%s'" %
                    destination)
            if source == destination:
                raise CommandError("Can't copy a schema on itself")
            if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[
                    destination]['ENGINE']:
                raise CommandError(
                    "Source and destination scenarios have a different engine")
            if sourcescenario.status != 'In use':
                raise CommandError("Source scenario is not in use")
            if destinationscenario.status != 'Free' and not force:
                raise CommandError("Destination scenario is not free")

            # Logging message - always logging in the default database
            destinationscenario.status = 'Busy'
            destinationscenario.save()

            # Copying the data
            # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
            if settings.DATABASES[source]['PASSWORD']:
                os.environ['PGPASSWORD'] = settings.DATABASES[source][
                    'PASSWORD']
            commandline = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s" % (
                settings.DATABASES[source]['USER'] and
                ("-U %s " % settings.DATABASES[source]['USER']) or '',
                settings.DATABASES[source]['HOST'] and
                ("-h %s " % settings.DATABASES[source]['HOST']) or '',
                settings.DATABASES[source]['PORT'] and
                ("-p %s " % settings.DATABASES[source]['PORT']) or '',
                test and settings.DATABASES[source]['TEST']['NAME']
                or settings.DATABASES[source]['NAME'],
                settings.DATABASES[destination]['USER'] and
                ("-U %s " % settings.DATABASES[destination]['USER']) or '',
                settings.DATABASES[destination]['HOST'] and
                ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
                settings.DATABASES[destination]['PORT'] and
                ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
                test and settings.DATABASES[destination]['TEST']['NAME']
                or settings.DATABASES[destination]['NAME'],
            )

            ret = subprocess.call(commandline,
                                  shell=True,
                                  stdout=subprocess.DEVNULL,
                                  stderr=subprocess.STDOUT)

            if ret:
                raise Exception(
                    'Exit code of the database copy command is %d' % ret)

            # Update the scenario table
            destinationscenario.status = 'In use'
            destinationscenario.lastrefresh = datetime.today()
            if 'description' in options:
                destinationscenario.description = options['description']
            else:
                destinationscenario.description = "Copied from scenario '%s'" % source
            destinationscenario.save()

            # Give access to the destination scenario to:
            #  a) the user doing the copy
            #  b) all superusers from the source schema
            User.objects.using(destination).filter(is_superuser=True).update(
                is_active=True)
            User.objects.using(destination).filter(is_superuser=False).update(
                is_active=False)
            if user:
                User.objects.using(destination).filter(
                    username=user.username).update(is_active=True)

            # Logging message
            task.status = 'Done'
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            if destinationscenario and destinationscenario.status == 'Busy':
                destinationscenario.status = 'Free'
                destinationscenario.save()
            raise e

        finally:
            if task:
                task.save()
            settings.DEBUG = tmp_debug
Beispiel #47
0
def wrapTask(request, action):
    # Allow only post
    if request.method != 'POST':
        raise Exception('Only post requests allowed')

    # Check user permissions
    if not request.user.has_perm('execute'):
        raise Exception('Missing execution privileges')

    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    # A
    if action == 'frepple_run':
        if not request.user.has_perm('execute.generate_plan'):
            raise Exception('Missing execution privileges')
        constraint = 0
        for value in request.POST.getlist('constraint'):
            try:
                constraint += int(value)
            except:
                pass
        task = Task(name='generate plan',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (
            constraint, request.POST.get('plantype'))
        env = []
        if request.POST.get('odoo_read', None) == '1':
            env.append("odoo_read")
            request.session['odoo_read'] = True
        else:
            request.session['odoo_read'] = False
        if request.POST.get('odoo_write', None) == '1':
            env.append("odoo_write")
            request.session['odoo_write'] = True
        else:
            request.session['odoo_write'] = False
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
        task.save(using=request.database)
        # Update the session object
        request.session['plantype'] = request.POST.get('plantype')
        request.session['constraint'] = constraint
    # B
    elif action == 'frepple_createmodel':
        task = Task(name='generate model',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
          "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
            request.POST['clusters'], request.POST['demands'], request.POST['fcst'], request.POST['levels'],
            request.POST['rsrc_number'], request.POST['rsrc_size'], request.POST['components'],
            request.POST['components_per'], request.POST['deliver_lt'], request.POST['procure_lt']
            )
        task.save(using=request.database)
    # C
    elif action == 'frepple_flush':
        task = Task(name='empty database',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        if not request.POST.get('all'):
            task.arguments = "--models=%s" % ','.join(
                request.POST.getlist('entities'))
        task.save(using=request.database)
    # D
    elif action == 'loaddata':
        task = Task(name='load dataset',
                    submitted=now,
                    status='Waiting',
                    user=request.user,
                    arguments=request.POST['datafile'])
        task.save(using=request.database)
    # E
    elif action == 'frepple_copy':
        worker_database = DEFAULT_DB_ALIAS
        if 'copy' in request.POST:
            if not request.user.has_perm('execute.copy_scenario'):
                raise Exception('Missing execution privileges')
            source = request.POST.get('source', DEFAULT_DB_ALIAS)
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name,
                                    'off') == 'on' and sc.status == 'Free':
                    task = Task(name='copy scenario',
                                submitted=now,
                                status='Waiting',
                                user=request.user,
                                arguments="%s %s" % (source, sc.name))
                    task.save()
        elif 'release' in request.POST:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm('execute.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name,
                                    'off') == 'on' and sc.status != 'Free':
                    sc.status = 'Free'
                    sc.lastrefresh = now
                    sc.save()
                    if request.database == sc.name:
                        # Erasing the database that is currently selected.
                        request.prefix = ''
        elif 'update' in request.POST:
            # Note: update is immediate and synchronous.
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, 'off') == 'on':
                    sc.description = request.POST.get('description', None)
                    sc.save()
        else:
            raise Exception('Invalid scenario task')
    # F
    elif action == 'frepple_backup':
        task = Task(name='backup database',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    # G
    elif action == 'frepple_createbuckets':
        task = Task(name='generate buckets',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--start=%s --end=%s --weekstart=%s" % (
            request.POST['start'], request.POST['end'],
            request.POST['weekstart'])
        task.save(using=request.database)
    # H
    elif action == 'openbravo_import' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
        task = Task(name='Openbravo import',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--delta=%s" % request.POST['delta']
        task.save(using=request.database)
    # I
    elif action == 'openbravo_export' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
        task = Task(name='Openbravo export',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    else:
        # Task not recognized
        raise Exception('Invalid launching task')

    # Launch a worker process
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen([
                    sys.executable,  # Python executable
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ])
            else:
                # Deployment on Apache web server
                Popen([
                    "python",
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ],
                      creationflags=0x08000000)
        elif sys.executable.find('freppleserver.exe') >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace(
                        'freppleserver.exe',
                        'frepplectl.exe'),  # frepplectl executable
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ],
                creationflags=0x08000000)  # Do not create a console window
        else:
            # Linux standard installation
            Popen([
                "frepplectl", "frepple_runworker",
                "--database=%s" % worker_database
            ])
    return task
    def handle(self, *args, **options):
        # Pick up the options
        now = datetime.now()
        self.database = options['database']
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)

        if options['user']:
            try:
                self.user = User.objects.all().using(
                    self.database).get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            self.user = None

        timestamp = now.strftime("%Y%m%d%H%M%S")
        if self.database == DEFAULT_DB_ALIAS:
            logfile = 'exporttofolder-%s.log' % timestamp
        else:
            logfile = 'exporttofolder_%s-%s.log' % (self.database, timestamp)

        task = None
        self.logfile = None
        errors = 0
        try:
            # Initialize the task
            if options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'export to folder':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
                task.logfile = logfile
            else:
                task = Task(name='export to folder',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=self.user,
                            logfile=logfile)
            task.arguments = ' '.join(['"%s"' % i for i in args])
            task.save(using=self.database)

            # Execute
            if os.path.isdir(
                    settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

                # Open the logfile
                # The log file remains in the upload folder as different folders can be specified
                # We do not want t create one log file per folder
                if not os.path.isdir(
                        os.path.join(
                            settings.DATABASES[self.database]
                            ['FILEUPLOADFOLDER'], 'export')):
                    try:
                        os.makedirs(
                            os.path.join(
                                settings.DATABASES[self.database]
                                ['FILEUPLOADFOLDER'], 'export'))
                    except OSError as exception:
                        if exception.errno != errno.EEXIST:
                            raise

                self.logfile = open(
                    os.path.join(settings.FREPPLE_LOGDIR, logfile), "a")
                print("%s Started export to folder\n" % datetime.now(),
                      file=self.logfile)

                cursor = connections[self.database].cursor()

                task.status = '0%'
                task.save(using=self.database)

                i = 0
                cnt = len(self.statements)

                for filename, export, sqlquery in self.statements:
                    print("%s Started export of %s" %
                          (datetime.now(), filename),
                          file=self.logfile)

                    #make sure export folder exists
                    exportFolder = os.path.join(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER'],
                        export)
                    if not os.path.isdir(exportFolder):
                        os.makedirs(exportFolder)

                    try:
                        if filename.lower().endswith(".gz"):
                            csv_datafile = gzip.open(
                                os.path.join(exportFolder, filename), "w")
                        else:
                            csv_datafile = open(
                                os.path.join(exportFolder, filename), "w")

                        cursor.copy_expert(sqlquery, csv_datafile)

                        csv_datafile.close()
                        i += 1

                    except Exception as e:
                        errors += 1
                        print("%s Failed to export to %s" %
                              (datetime.now(), filename),
                              file=self.logfile)
                        if task:
                            task.message = 'Failed to export %s' % filename

                    task.status = str(int(i / cnt * 100)) + '%'
                    task.save(using=self.database)

                print("%s Exported %s file(s)\n" %
                      (datetime.now(), cnt - errors),
                      file=self.logfile)

            else:
                errors += 1
                print("%s Failed, folder does not exist" % datetime.now(),
                      file=self.logfile)
                task.message = "Destination folder does not exist"
                task.save(using=self.database)

        except Exception as e:
            if self.logfile:
                print("%s Failed" % datetime.now(), file=self.logfile)
            errors += 1
            if task:
                task.message = 'Failed to export'
            logger.error("Failed to export: %s" % e)

        finally:
            if task:
                if not errors:
                    task.status = '100%'
                    task.message = "Exported %s data files" % (cnt)
                else:
                    task.status = 'Failed'
                    #  task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
                task.finished = datetime.now()
                task.save(using=self.database)

            if self.logfile:
                print('%s End of export to folder\n' % datetime.now(),
                      file=self.logfile)
                self.logfile.close()
Beispiel #49
0
  def handle(self, *args, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'load XML file':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='load XML file', submitted=now, started=now, status='0%', user=user)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.save(using=database)
      transaction.commit(using=database)

      if not args:
        raise CommandError("No XML input file given")

      # Execute
      # TODO: if frePPLe is available as a module, we don't really need to spawn another process.
      os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace('\\', '\\\\')
      os.environ['FREPPLE_APP'] = settings.FREPPLE_APP
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ.keys():
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(os.environ['FREPPLE_HOME'], 'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(os.environ['FREPPLE_HOME'], 'python27.zip') + ';' + os.path.normpath(os.environ['FREPPLE_APP'])
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(os.environ['FREPPLE_APP'])
      cmdline = [ '"%s"' % i for i in args ]
      cmdline.insert(0, 'frepple')
      cmdline.append( '"%s"' % os.path.join(settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py') )
      ret = os.system(' '.join(cmdline))
      if ret:
        raise Exception('Exit code of the batch run is %d' % ret)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)
      try:
        transaction.commit(using=database)
      except:
        pass
      transaction.leave_transaction_management(using=database)
Beispiel #50
0
  def handle(self, *args, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    if 'force' in options: force = options['force']
    else: force = False
    test = 'FREPPLE_TEST' in os.environ
    if 'user' in options and options['user']:
      try: user = User.objects.all().get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    # Initialize the task
    now = datetime.now()
    task = None
    if 'task' in options and options['task']:
      try: task = Task.objects.all().get(pk=options['task'])
      except: raise CommandError("Task identifier not found")
      if task.started or task.finished or task.status != "Waiting" or task.name != 'copy scenario':
        raise CommandError("Invalid task identifier")
      task.status = '0%'
      task.started = now
    else:
      task = Task(name='copy scenario', submitted=now, started=now, status='0%', user=user)
    task.save()

    # Synchronize the scenario table with the settings
    Scenario.syncWithSettings()

    # Validate the arguments
    destinationscenario = None
    try:
      if len(args) != 2:
        raise CommandError("Command takes exactly 2 arguments.")
      task.arguments = "%s %s" % (args[0], args[1])
      task.save()
      source = args[0]
      try:
        sourcescenario = Scenario.objects.get(pk=source)
      except:
        raise CommandError("No source database defined with name '%s'" % source)
      destination = args[1]
      try:
        destinationscenario = Scenario.objects.get(pk=destination)
      except:
        raise CommandError("No destination database defined with name '%s'" % destination)
      if source == destination:
        raise CommandError("Can't copy a schema on itself")
      if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
        raise CommandError("Source and destination scenarios have a different engine")
      if sourcescenario.status != u'In use':
        raise CommandError("Source scenario is not in use")
      if destinationscenario.status != u'Free' and not force:
        raise CommandError("Destination scenario is not free")

      # Logging message - always logging in the default database
      destinationscenario.status = u'Busy'
      destinationscenario.save()

      # Copying the data
      if settings.DATABASES[source]['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
        ret = os.system("pg_dump -c -U%s -Fp %s%s%s | psql -U%s %s%s%s" % (
          settings.DATABASES[source]['USER'],
          settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
          settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
          test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
          settings.DATABASES[destination]['USER'],
          settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
          settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
          test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
          ))
        if ret: raise Exception('Exit code of the database copy command is %d' % ret)
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.sqlite3':
        # A plain copy of the database file
        if test:
          shutil.copy2(settings.DATABASES[source]['TEST_NAME'], settings.DATABASES[destination]['TEST_NAME'])
        else:
          shutil.copy2(settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['NAME'])
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.mysql':
        ret = os.system("mysqldump %s --password=%s --user=%s %s%s--quick --compress --extended-insert --add-drop-table | mysql %s --password=%s --user=%s %s%s" % (
          test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
          settings.DATABASES[source]['PASSWORD'],
          settings.DATABASES[source]['USER'],
          settings.DATABASES[source]['HOST'] and ("--host=%s " % settings.DATABASES[source]['HOST']) or '',
          settings.DATABASES[source]['PORT'] and ("--port=%s " % settings.DATABASES[source]['PORT']) or '',
          test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
          settings.DATABASES[destination]['PASSWORD'],
          settings.DATABASES[destination]['USER'],
          settings.DATABASES[destination]['HOST'] and ("--host=%s " % settings.DATABASES[destination]['HOST']) or '',
          settings.DATABASES[destination]['PORT'] and ("--port=%s " % settings.DATABASES[destination]['PORT']) or '',
          ))
        if ret: raise Exception('Exit code of the database copy command is %d' % ret)
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.oracle':
        try:
          try: os.unlink(os.path.join(settings.FREPPLE_LOGDIR,'frepple.dmp'))
          except: pass
          ret = os.system("expdp %s/%s@//%s:%s/%s schemas=%s directory=frepple_logdir nologfile=Y dumpfile=frepple.dmp" % (
            test and settings.DATABASES[source]['TEST_USER'] or settings.DATABASES[source]['USER'],
            settings.DATABASES[source]['PASSWORD'],
            settings.DATABASES[source]['HOST'] or 'localhost',
            settings.DATABASES[source]['PORT'] or '1521',
            test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
            test and settings.DATABASES[source]['TEST_USER'] or settings.DATABASES[source]['USER'],
            ))
          if ret: raise Exception('Exit code of the database export command is %d' % ret)
          ret = os.system("impdp %s/%s@//%s:%s/%s remap_schema=%s:%s table_exists_action=replace directory=frepple_logdir nologfile=Y dumpfile=frepple.dmp" % (
            test and settings.DATABASES[destination]['TEST_USER'] or settings.DATABASES[destination]['USER'],
            settings.DATABASES[destination]['PASSWORD'],
            settings.DATABASES[destination]['HOST'],
            settings.DATABASES[destination]['PORT'],
            test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
            test and settings.DATABASES[source]['TEST_USER'] or settings.DATABASES[source]['USER'],
            test and settings.DATABASES[destination]['TEST_USER'] or settings.DATABASES[destination]['USER'],
            ))
          if ret: raise Exception('Exit code of the database import command is %d' % ret)
        finally:
          try: os.unlink(os.path.join(settings.FREPPLE_LOGDIR,'frepple.dmp'))
          except: pass
      else:
        raise Exception('Copy command not supported for database engine %s' % settings.DATABASES[source]['ENGINE'])

      # Update the scenario table
      destinationscenario.status = 'In use'
      destinationscenario.lastrefresh = datetime.today()
      if 'description' in options:
        destinationscenario.description = options['description']
      else:
        destinationscenario.description = "Copied from scenario '%s'" % source
      destinationscenario.save()

      # Logging message
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      if destinationscenario and destinationscenario.status == u'Busy':
        destinationscenario.status = u'Free'
        destinationscenario.save()
      raise e

    finally:
      if task: task.save()
      settings.DEBUG = tmp_debug
Beispiel #51
0
    def handle(self, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up the options
        verbosity = int(options["verbosity"])
        cluster = int(options["cluster"])
        demand = int(options["demand"])
        forecast_per_item = int(options["forecast_per_item"])
        level = int(options["level"])
        resource = int(options["resource"])
        resource_size = int(options["resource_size"])
        components = int(options["components"])
        components_per = int(options["components_per"])
        if components <= 0:
            components_per = 0
        deliver_lt = int(options["deliver_lt"])
        procure_lt = int(options["procure_lt"])
        if options["currentdate"]:
            currentdate = options["currentdate"]
        else:
            currentdate = datetime.strftime(date.today(), "%Y-%m-%d")
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        random.seed(100)  # Initialize random seed to get reproducible results

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name
                        not in ("frepple_createmodel", "createmodel")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="createmodel",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=user,
                )
            task.arguments = (
                "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s "
                "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s"
                % (
                    cluster,
                    demand,
                    forecast_per_item,
                    level,
                    resource,
                    resource_size,
                    components,
                    components_per,
                    deliver_lt,
                    procure_lt,
                ))
            task.save(using=database)

            # Pick up the startdate
            try:
                startdate = datetime.strptime(currentdate, "%Y-%m-%d")
            except Exception:
                raise CommandError(
                    "current date is not matching format YYYY-MM-DD")

            # Check whether the database is empty
            if (Buffer.objects.using(database).count() > 0
                    or Item.objects.using(database).count() > 0):
                raise CommandError(
                    "Database must be empty before creating a model")

            # Plan start date
            if verbosity > 0:
                print("Updating current date...")
            param = Parameter.objects.using(database).get_or_create(
                name="currentdate")[0]
            param.value = datetime.strftime(startdate, "%Y-%m-%d %H:%M:%S")
            param.save(using=database)

            # Planning horizon
            # minimum 10 daily buckets, weekly buckets till 40 days after current
            if verbosity > 0:
                print("Updating buckets...")
            management.call_command("createbuckets",
                                    user=user,
                                    database=database)
            task.status = "2%"
            task.save(using=database)

            # Weeks calendar
            if verbosity > 0:
                print("Creating weeks calendar...")
            with transaction.atomic(using=database):
                weeks = Calendar.objects.using(database).create(name="Weeks",
                                                                defaultvalue=0)
                for i in (BucketDetail.objects.using(database).filter(
                        bucket="week").all()):
                    CalendarBucket(
                        startdate=i.startdate,
                        enddate=i.enddate,
                        value=1,
                        calendar=weeks,
                    ).save(using=database)
                task.status = "4%"
                task.save(using=database)

            # Working days calendar
            if verbosity > 0:
                print("Creating working days...")
            with transaction.atomic(using=database):
                workingdays = Calendar.objects.using(database).create(
                    name="Working Days", defaultvalue=0)
                minmax = (BucketDetail.objects.using(database).filter(
                    bucket="week").aggregate(Min("startdate"),
                                             Max("startdate")))
                CalendarBucket(
                    startdate=minmax["startdate__min"],
                    enddate=minmax["startdate__max"],
                    value=1,
                    calendar=workingdays,
                    priority=1,
                    saturday=False,
                    sunday=False,
                ).save(using=database)
                task.status = "6%"
                task.save(using=database)

            # Parent location
            loc = Location.objects.using(database).create(
                name="Factory", available=workingdays)

            # Create a random list of categories to choose from
            categories = [
                "cat A", "cat B", "cat C", "cat D", "cat E", "cat F", "cat G"
            ]

            # Create customers
            if verbosity > 0:
                print("Creating customers...")
            with transaction.atomic(using=database):
                cust = []
                for i in range(100):
                    c = Customer.objects.using(database).create(
                        name="Cust %03d" % i)
                    cust.append(c)
                task.status = "8%"
                task.save(using=database)

            # Create resources and their calendars
            if verbosity > 0:
                print("Creating resources and calendars...")
            with transaction.atomic(using=database):
                res = []
                for i in range(resource):
                    cal = Calendar.objects.using(database).create(
                        name="capacity for res %03d" % i,
                        category="capacity",
                        defaultvalue=0,
                    )
                    CalendarBucket.objects.using(database).create(
                        startdate=startdate, value=resource_size, calendar=cal)
                    r = Resource.objects.using(database).create(
                        name="Res %03d" % i,
                        maximum_calendar=cal,
                        location=loc)
                    res.append(r)
                task.status = "10%"
                task.save(using=database)
                random.shuffle(res)

            # Create the components
            if verbosity > 0:
                print("Creating raw materials...")
            with transaction.atomic(using=database):
                comps = []
                compsupplier = Supplier.objects.using(database).create(
                    name="component supplier")
                for i in range(components):
                    it = Item.objects.using(database).create(
                        name="Component %04d" % i,
                        category="Procured",
                        cost=str(round(random.uniform(0, 100))),
                    )
                    ld = abs(
                        round(random.normalvariate(procure_lt,
                                                   procure_lt / 3)))
                    Buffer.objects.using(database).create(
                        location=loc,
                        category="Procured",
                        item=it,
                        minimum=20,
                        onhand=str(
                            round(forecast_per_item * random.uniform(1, 3) *
                                  ld / 30)),
                    )
                    ItemSupplier.objects.using(database).create(
                        item=it,
                        location=loc,
                        supplier=compsupplier,
                        leadtime=timedelta(days=ld),
                        sizeminimum=80,
                        sizemultiple=10,
                        priority=1,
                        cost=it.cost,
                    )
                    comps.append(it)
                task.status = "12%"
                task.save(using=database)

            # Loop over all clusters
            durations = [timedelta(days=i) for i in range(1, 6)]
            progress = 88.0 / cluster
            for i in range(cluster):
                with transaction.atomic(using=database):
                    if verbosity > 0:
                        print("Creating supply chain for end item %d..." % i)

                    # Item
                    it = Item.objects.using(database).create(
                        name="Itm %05d" % i,
                        category=random.choice(categories),
                        cost=str(round(random.uniform(100, 200))),
                    )

                    # Level 0 buffer
                    buf = Buffer.objects.using(database).create(item=it,
                                                                location=loc,
                                                                category="00")

                    # Demand
                    for j in range(demand):
                        Demand.objects.using(database).create(
                            name="Dmd %05d %05d" % (i, j),
                            item=it,
                            location=loc,
                            quantity=int(random.uniform(1, 6)),
                            # Exponential distribution of due dates, with an average of deliver_lt days.
                            due=startdate + timedelta(days=round(
                                random.expovariate(float(1) / deliver_lt /
                                                   24)) / 24),
                            # Orders have higher priority than forecast
                            priority=random.choice([1, 2]),
                            customer=random.choice(cust),
                            category=random.choice(categories),
                        )

                    # Create upstream operations and buffers
                    ops = []
                    previtem = it
                    for k in range(level):
                        if k == 1 and res:
                            # Create a resource load for operations on level 1
                            oper = Operation.objects.using(database).create(
                                name="Oper %05d L%02d" % (i, k),
                                type="time_per",
                                location=loc,
                                duration_per=timedelta(days=1),
                                sizemultiple=1,
                                item=previtem,
                            )
                            if resource < cluster and i < resource:
                                # When there are more cluster than resources, we try to assure
                                # that each resource is loaded by at least 1 operation.
                                OperationResource.objects.using(
                                    database).create(resource=res[i],
                                                     operation=oper)
                            else:
                                OperationResource.objects.using(
                                    database).create(
                                        resource=random.choice(res),
                                        operation=oper)
                        else:
                            oper = Operation.objects.using(database).create(
                                name="Oper %05d L%02d" % (i, k),
                                duration=random.choice(durations),
                                sizemultiple=1,
                                location=loc,
                                item=previtem,
                            )
                        ops.append(oper)
                        # Some inventory in random buffers
                        if random.uniform(0, 1) > 0.8:
                            buf.onhand = int(random.uniform(5, 20))
                        buf.save(using=database)
                        OperationMaterial.objects.using(database).create(
                            operation=oper,
                            item=previtem,
                            quantity=1,
                            type="end")
                        if k != level - 1:
                            # Consume from the next level in the bill of material
                            it_tmp = Item.objects.using(database).create(
                                name="Itm %05d L%02d" % (i, k + 1),
                                category=random.choice(categories),
                                cost=str(round(random.uniform(100, 200))),
                            )
                            buf = Buffer.objects.using(database).create(
                                item=it_tmp,
                                location=loc,
                                category="%02d" % (k + 1))
                            OperationMaterial.objects.using(database).create(
                                operation=oper, item=it_tmp, quantity=-1)
                        previtem = it_tmp

                    # Consume raw materials / components
                    c = []
                    for j in range(components_per):
                        o = random.choice(ops)
                        b = random.choice(comps)
                        while (o, b) in c:
                            # A flow with the same operation and buffer already exists
                            o = random.choice(ops)
                            b = random.choice(comps)
                        c.append((o, b))
                        OperationMaterial.objects.using(database).create(
                            operation=o,
                            item=b,
                            quantity=random.choice([-1, -1, -1, -2, -3]),
                        )

                    # Commit the current cluster
                    task.status = "%d%%" % (12 + progress * (i + 1))
                    task.save(using=database)

            # Task update
            task.status = "Done"
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
                task.save(using=database)
            raise e

        finally:
            if task:
                task.save(using=database)
            settings.DEBUG = tmp_debug
Beispiel #52
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else:
        constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else:
        plantype = 1

      # Reset environment variables
      # TODO avoid having to delete the environment variables. Use options directly?
      PlanTaskRegistry.autodiscover()
      for i in PlanTaskRegistry.reg:
        if options['env']:
          # Options specified
          if i.label and i.label[0] in os.environ:
            del os.environ[i.label[0]]
        elif i.label:
          # No options specified - default to activate them all
          os.environ[i.label[0]] = '1'

      # Set environment variables
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      if options['background']:
        task.arguments += " --background"

      # Log task
      task.save(using=database)

      # Locate commands.py
      import freppledb.common.commands
      cmd = freppledb.common.commands.__file__

      # Prepare environment
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)

      if options['background']:
        # Execute as background process on Windows
        if os.name == 'nt':
          subprocess.Popen(['frepple', cmd], creationflags=0x08000000)
        else:
          # Execute as background process on Linux
          subprocess.Popen(['frepple', cmd])
      else:
        # Execute in foreground
        ret = subprocess.call(['frepple', cmd])
        if ret != 0 and ret != 2:
          # Return code 0 is a successful run
          # Return code is 2 is a run cancelled by a user. That's shown in the status field.
          raise Exception('Failed with exit code %d' % ret)

        # Task update
        task.status = 'Done'
        task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)