示例#1
0
class Command(BaseCommand):
  help = '''
  Update the ERP system with frePPLe planning information.
  '''
  # For the display in the execution screen
  title = _('Export data to %(erp)s') % {'erp': 'erp'}

  # For the display in the execution screen
  index = 1500

  requires_system_checks = False

  def get_version(self):
    return VERSION


  def add_arguments(self, parser):
    parser.add_argument(
      '--user', help='User running the command'
      )
    parser.add_argument(
      '--database', default=DEFAULT_DB_ALIAS,
      help='Nominates the frePPLe database to load'
      )
    parser.add_argument(
      '--task', type=int,
      help='Task identifier (generated automatically if not provided)'
      )


  @ staticmethod
  def getHTML(request):
    if 'freppledb.erpconnection' in settings.INSTALLED_APPS:
      context = RequestContext(request)

      template = Template('''
        {% load i18n %}
        <form role="form" method="post" action="{{request.prefix}}/execute/launch/erp2frepple/">{% csrf_token %}
        <table>
          <tr>
            <td style="vertical-align:top; padding: 15px">
               <button  class="btn btn-primary"  type="submit" value="{% trans "launch"|capfirst %}">{% trans "launch"|capfirst %}</button>
            </td>
            <td  style="padding: 0px 15px;">{% trans "Export erp data to frePPLe." %}
            </td>
          </tr>
        </table>
        </form>
      ''')
      return template.render(context)
    else:
      return None


  def handle(self, **options):
    '''
    Uploads approved operationplans to the ERP system.
    '''

    # Select the correct frePPLe scenario database
    self.database = options['database']
    if self.database not in settings.DATABASES.keys():
      raise CommandError("No database settings known for '%s'" % self.database)
    self.cursor_frepple = connections[self.database].cursor()

    # FrePPle user running this task
    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    # FrePPLe task identifier
    if options['task']:
      try:
        self.task = Task.objects.all().using(self.database).get(pk=options['task'])
      except:
        raise CommandError("Task identifier not found")
      if self.task.started or self.task.finished or self.task.status != "Waiting" or self.task.name != 'frepple2erp':
        raise CommandError("Invalid task identifier")
    else:
      now = datetime.now()
      self.task = Task(name='frepple2erp', submitted=now, started=now, status='0%', user=self.user)
    self.task.processid = os.getpid()
    self.task.save(using=self.database)

    try:
      # Open database connection
      print("Connecting to the ERP database")
      with getERPconnection() as erp_connection:
        self.cursor_erp = erp_connection.cursor(self.database)
        try:
          self.extractPurchaseOrders()
          self.task.status = '33%'
          self.task.save(using=self.database)

          self.extractDistributionOrders()
          self.task.status = '66%'
          self.task.save(using=self.database)

          self.extractManufacturingOrders()
          self.task.status = '100%'
          self.task.save(using=self.database)

          # Optional extra planning output the ERP might be interested in:
          #  - planned delivery date of sales orders
          #  - safety stock (Enterprise Edition only)
          #  - reorder quantities (Enterprise Edition only)
          #  - forecast (Enterprise Edition only)
          self.task.status = 'Done'
        finally:
          self.cursor_erp.close()
    except Exception as e:
      self.task.status = 'Failed'
      self.task.message = 'Failed: %s' % e
    self.task.finished = datetime.now()
    self.task.processid = None
    self.task.save(using=self.database)
    self.cursor_frepple.close()


  def extractPurchaseOrders(self):
    '''
    Export purchase orders from frePPle.
    We export:
      - approved purchase orders.
      - proposed purchase orders that start within the next day and with a total cost less than 500$.
    '''
    print("Start exporting purchase orders")
    self.cursor_frepple.execute('''
      select
        item_id, location_id, supplier_id, quantity, startdate, enddate
      from operationplan
      inner join item on item_id = item.name
      where type = 'PO'
        and (
          status = 'approved'
          or (status = 'proposed' and quantity * cost < 500 and startdate < now() + interval '1 day')
          )
      order by supplier_id
      ''')
    output = [ i for i in self.cursor_frepple.fetchall()]
    self.cursor_erp.executemany('''
      insert into test
      (item, location, location2, quantity, startdate, enddate)
      values (?, ?, ?, ?, ?, ?)
      ''', output)


  def extractDistributionOrders(self):
    '''
    Export distribution orders from frePPle.
    We export:
      - approved distribution orders.
      - proposed distribution orders that start within the next day and with a total cost less than 500$.
    '''
    print("Start exporting distribution orders")
    self.cursor_frepple.execute('''
      select
        item_id, destination_id, origin_id, quantity, startdate, enddate
      from operationplan
      inner join item on item_id = item.name
      where type = 'DO'
        and (
          status = 'approved' 
          or (status = 'proposed' and quantity * cost < 500 and startdate < now() + interval '1 day')
          )
      order by origin_id, destination_id
      ''')
    output = [ i for i in self.cursor_frepple.fetchall()]
    self.cursor_erp.executemany('''
      insert into test
      (item, location, location2, quantity, startdate, enddate)
      values (?, ?, ?, ?, ?, ?)
      ''', output)


  def extractManufacturingOrders(self):
    '''
    Export manufacturing orders from frePPle.
    We export:
      - approved manufacturing orders.
      - proposed manufacturing orders that start within the next day.
    '''
    print("Start exporting manufacturing orders")
    self.cursor_frepple.execute('''
      select
        item_id, location_id, operation_id, quantity, startdate, enddate
      from operationplan
      where type = 'MO'
        and (
          status = 'approved'
          or (status = 'proposed' and startdate < now() + interval '1 day')
          )
      order by operation_id
      ''')
    output = [ i for i in self.cursor_frepple.fetchall()]
    self.cursor_erp.executemany('''
      insert into test
      (item, location, location2, quantity, startdate, enddate)
      values (?, ?, ?, ?, ?, ?)
      ''', output)
示例#2
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up the options
    if 'start' in options:
      start = options['start'] or '2011-1-1'
    else:
      start = '2011-1-1'
    if 'end' in options:
      end = options['end'] or '2019-1-1'
    else:
      end = '2019-1-1'
    if 'weekstart' in options:
      weekstart = int(options['weekstart'])
      if weekstart < 0 or weekstart > 6:
        raise CommandError("Invalid weekstart %s" % weekstart)
    else:
      weekstart = 1
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate buckets':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate buckets', submitted=now, started=now, status='0%', user=user, arguments="--start=%s --end=%s --weekstart=%s" % (start, end, weekstart))
      task.save(using=database)

      # Validate the date arguments
      try:
        curdate = datetime.strptime(start, '%Y-%m-%d')
        enddate = datetime.strptime(end, '%Y-%m-%d')
      except Exception as e:
        raise CommandError("Date is not matching format YYYY-MM-DD")

      with transaction.atomic(using=database, savepoint=False):
        # Delete previous contents
        connections[database].cursor().execute(
          "delete from common_bucketdetail where bucket_id in ('year','quarter','month','week','day')"
          )
        connections[database].cursor().execute(
          "delete from common_bucket where name in ('year','quarter','month','week','day')"
          )

        # Create buckets
        y = Bucket(name='year', description='Yearly time buckets', level=1)
        q = Bucket(name='quarter', description='Quarterly time buckets', level=2)
        m = Bucket(name='month', description='Monthly time buckets', level=3)
        w = Bucket(name='week', description='Weeky time buckets', level=4)
        d = Bucket(name='day', description='Daily time buckets', level=5)
        y.save(using=database)
        q.save(using=database)
        m.save(using=database)
        w.save(using=database)
        d.save(using=database)

        # Loop over all days in the chosen horizon
        prev_year = None
        prev_quarter = None
        prev_month = None
        prev_week = None
        while curdate < enddate:
          month = int(curdate.strftime("%m"))  # an integer in the range 1 - 12
          quarter = (month - 1) // 3 + 1       # an integer in the range 1 - 4
          year = int(curdate.strftime("%Y"))
          dayofweek = int(curdate.strftime("%w"))  # day of the week, 0 = sunday, 1 = monday, ...
          year_start = datetime(year, 1, 1)
          year_end = datetime(year + 1, 1, 1)
          week_start = curdate - timedelta((dayofweek + 6) % 7 + 1 - weekstart)
          week_end = curdate - timedelta((dayofweek + 6) % 7 - 6 - weekstart)
          if week_start < year_start:
            week_start = year_start
          if week_end > year_end:
            week_end = year_end

          # Create buckets
          if year != prev_year:
            prev_year = year
            BucketDetail(
              bucket=y,
              name=str(year),
              startdate=year_start,
              enddate=year_end
              ).save(using=database)
          if quarter != prev_quarter:
            prev_quarter = quarter
            BucketDetail(
              bucket=q,
              name="%02d Q%s" % (year - 2000, quarter),
              startdate=date(year, quarter * 3 - 2, 1),
              enddate=date(year + quarter // 4, quarter * 3 + 1 - 12 * (quarter // 4), 1)
              ).save(using=database)
          if month != prev_month:
            prev_month = month
            BucketDetail(
              bucket=m,
              name=curdate.strftime("%b %y"),
              startdate=date(year, month, 1),
              enddate=date(year + month // 12, month + 1 - 12 * (month // 12), 1),
              ).save(using=database)
          if week_start != prev_week:
            prev_week = week_start
            BucketDetail(
              bucket=w,
              name=curdate.strftime("%y W%W"),
              startdate=week_start,
              enddate=week_end,
              ).save(using=database)
          BucketDetail(
            bucket=d,
            name=str(curdate.date()),
            startdate=curdate,
            enddate=curdate + timedelta(1),
            ).save(using=database)

          # Next date
          curdate = curdate + timedelta(1)

      # Log success
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)
      settings.DEBUG = tmp_debug
示例#3
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    param = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'plan simulation':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='plan simulation', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'horizon' in options:
        horizon = int(options['horizon'])
        if horizon < 0:
          raise ValueError("Invalid horizon: %s" % options['horizon'])
        task.arguments = "--horizon=%d" % horizon
      else:
        horizon = 60
      if 'step' in options:
        step = int(options['step'])
        if step < 0:
          raise ValueError("Invalid step: %s" % options['step'])
        task.arguments = "--step=%d" % step
      else:
        step = 1
      if 'verbosity' in options:
        verbosity = int(options['verbosity'])
      else:
        verbosity = 0

      # Log task
      task.save(using=database)

      # Load the initial status
      if options.get('initial', None):
        if verbosity > 0:
          print("Erasing simulation database")
        management.call_command('frepple_flush', database=database, verbosity=verbosity)
        if verbosity > 0:
          print("Loading initial data")
        management.call_command('loaddata', options.get('initial'), database=database, verbosity=verbosity)

      # Get current date
      param = Parameter.objects.all().using(database).get_or_create(name='currentdate')[0]
      try:
        curdate = datetime.strptime(param.value, "%Y-%m-%d %H:%M:%S")
      except:
        curdate = datetime.now()
      curdate = curdate.date()

      # Compute how many simulation steps we need
      bckt_list = []
      tmp = 0
      while tmp <= horizon:
        bckt_list.append( curdate + timedelta(days=tmp) )
        tmp += step
      bckt_list_len = len(bckt_list)

      # Create the simulator class
      if options.get('simulator', None):
        cls = load_class(options['simulator'])
        simulator = cls(database=database, verbosity=verbosity)
      else:
        simulator = Simulator(database=database, verbosity=verbosity)
      simulator.buckets = 1

      # Loop over all dates in the simulation horizon
      idx = 0
      strt = None
      nd = None
      for bckt in bckt_list:
        if nd:
          strt = nd
          nd = bckt
        else:
          nd = bckt
          continue

        # Start message
        task.status = "%.0f%%" % (100.0 * idx / bckt_list_len)
        task.message = 'Simulating bucket from %s to %s ' % (strt, nd)
        task.save(using=database)
        idx += 1
        simulator.buckets += 1

        if verbosity > 0:
          print("\nStart simulating bucket from %s to %s (%s out of %s)" % (strt, nd, idx, bckt_list_len))

        # Update currentdate parameter
        param.value = strt.strftime("%Y-%m-%d %H:%M:%S")
        param.save(using=database)

        # Initialization of the bucket
        if verbosity > 1:
          print("  Starting the bucket")
        with transaction.atomic(using=database):
          simulator.start_bucket(strt, nd)

        # Generate new demand records
        if verbosity > 1:
          print("  Receive new orders from customers")
        with transaction.atomic(using=database):
          simulator.generate_customer_demand(strt, nd)

        # Generate the constrained plan
        if verbosity > 1:
          print("  Generating plan...")
        management.call_command('frepple_run', database=database)

        if options['pause']:
          print("\nYou can analyze the plan in the bucket in the user interface now...")
          input("\nPress Enter to continue the simulation...\n")

        # Release new purchase orders
        if verbosity > 1:
          print("  Create new purchase orders")
        with transaction.atomic(using=database):
          simulator.create_purchase_orders(strt, nd)

        # Release new manufacturing orders
        if verbosity > 1:
          print("  Create new manufacturing orders")
        with transaction.atomic(using=database):
          simulator.create_manufacturing_orders(strt, nd)

        # Release new distribution orders
        if verbosity > 1:
          print("  Create new distribution orders")
        with transaction.atomic(using=database):
          simulator.create_distribution_orders(strt, nd)

        # Receive open purchase orders
        if verbosity > 1:
          print("  Receive open purchase orders")
        with transaction.atomic(using=database):
          simulator.receive_purchase_orders(strt, nd)

        # Receive open distribution orders
        if verbosity > 1:
          print("  Receive open distribution orders")
        with transaction.atomic(using=database):
          simulator.receive_distribution_orders(strt, nd)

        # Finish open manufacturing orders
        if verbosity > 1:
          print("  Finish open manufacturing orders")
        with transaction.atomic(using=database):
          simulator.finish_manufacturing_orders(strt, nd)

        # Ship demand to customers
        if verbosity > 1:
          print("  Ship orders to customers")
        with transaction.atomic(using=database):
          simulator.ship_customer_demand(strt, nd)

        # Finish of the bucket
        if verbosity > 1:
          print("  Ending the bucket")
        with transaction.atomic(using=database):
          simulator.end_bucket(strt, nd)

      # Report statistics from the simulation.
      # The simulator class collected these results during its run.
      if verbosity > 1:
        print("Displaying final simulation metrics")
      with transaction.atomic(using=database):
        simulator.show_metrics()

      # Task update
      task.status = 'Done'
      task.message = "Simulated from %s till %s" % (bckt_list[0], bckt_list[-1])
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Final task status
      if task:
        task.save(using=database)
示例#4
0
  def handle(self, *args, **options):

    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'restore database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='restore database', submitted=now, started=now, status='0%', user=user)
      task.arguments = args and args[0] or None
      task.save(using=database)
      transaction.commit(using=database)

      # Validate options
      if not args:
        raise CommandError("No dump file specified")
      if not os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, args[0])):
        raise CommandError("Dump file not found")

      # Run the restore command
      if settings.DATABASES[database]['ENGINE'] == 'django.db.backends.sqlite3':
        # SQLITE
        shutil.copy2(os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])), settings.DATABASES[database]['NAME'])
      elif settings.DATABASES[database]['ENGINE'] == 'django.db.backends.mysql':
        # MYSQL
        cmd = [
          'mysql',
          '--password=%s' % settings.DATABASES[database]['PASSWORD'],
          '--user=%s' % settings.DATABASES[database]['USER']
          ]
        if settings.DATABASES[database]['HOST']:
          cmd.append("--host=%s " % settings.DATABASES[database]['HOST'])
        if settings.DATABASES[database]['PORT']:
          cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
        cmd.append(settings.DATABASES[database]['NAME'])
        cmd.append('<%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])))
        ret = subprocess.call(cmd, shell=True)  # Shell needs to be True in order to interpret the < character
        if ret:
          raise Exception("Run of mysql failed")
      elif settings.DATABASES[database]['ENGINE'] == 'django.db.backends.oracle':
        # ORACLE
        if settings.DATABASES[database]['HOST'] and settings.DATABASES[database]['PORT']:
          # The setting 'NAME' contains the SID name
          dsn = "%s/%s@//%s:%s/%s" % (
            settings.DATABASES[database]['USER'],
            settings.DATABASES[database]['PASSWORD'],
            settings.DATABASES[database]['HOST'],
            settings.DATABASES[database]['PORT'],
            settings.DATABASES[database]['NAME']
            )
        else:
          # The setting 'NAME' contains the TNS name
          dsn = "%s/%s@%s" % (
            settings.DATABASES[database]['USER'],
            settings.DATABASES[database]['PASSWORD'],
            settings.DATABASES[database]['NAME']
            )
        cmd = [
          "impdp",
          dsn,
          "table_exists_action=replace",
          "nologfile=Y",
          "directory=frepple_logdir",
          "dumpfile=%s" % args[0]
          ]
        ret = subprocess.call(cmd)
        if ret:
          raise Exception("Run of impdp failed")
      elif settings.DATABASES[database]['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
        # POSTGRESQL
        # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
        os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
        cmd = [ "psql", '--username=%s' % settings.DATABASES[database]['USER'] ]
        if settings.DATABASES[database]['HOST']:
          cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
        if settings.DATABASES[database]['PORT']:
          cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
        cmd.append(settings.DATABASES[database]['NAME'])
        cmd.append('<%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])))
        ret = subprocess.call(cmd, shell=True)  # Shell needs to be True in order to interpret the < character
        if ret:
          raise Exception("Run of run psql failed")
      else:
        raise Exception('Database backup command not supported for engine %s' % settings.DATABASES[database]['ENGINE'])

      # Task update
      # We need to recreate a new task record, since the previous one is lost during the restoration.
      task = Task(
        name='restore database', submitted=task.submitted, started=task.started,
        arguments=task.arguments, status='Done', finished=datetime.now(),
        user=task.user
        )

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Commit it all, even in case of exceptions
      if task:
        task.save(using=database)
      try:
        transaction.commit(using=database)
      except:
        pass
      transaction.leave_transaction_management(using=database)
示例#5
0
  def handle(self, *args, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    if 'force' in options: force = options['force']
    else: force = False
    test = 'FREPPLE_TEST' in os.environ
    if 'user' in options and options['user']:
      try: user = User.objects.all().get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    # Initialize the task
    now = datetime.now()
    task = None
    if 'task' in options and options['task']:
      try: task = Task.objects.all().get(pk=options['task'])
      except: raise CommandError("Task identifier not found")
      if task.started or task.finished or task.status != "Waiting" or task.name != 'copy scenario':
        raise CommandError("Invalid task identifier")
      task.status = '0%'
      task.started = now
    else:
      task = Task(name='copy scenario', submitted=now, started=now, status='0%', user=user)
    task.save()

    # Synchronize the scenario table with the settings
    Scenario.syncWithSettings()

    # Validate the arguments
    destinationscenario = None
    try:
      if len(args) != 2:
        raise CommandError("Command takes exactly 2 arguments.")
      task.arguments = "%s %s" % (args[0], args[1])
      task.save()
      source = args[0]
      try:
        sourcescenario = Scenario.objects.get(pk=source)
      except:
        raise CommandError("No source database defined with name '%s'" % source)
      destination = args[1]
      try:
        destinationscenario = Scenario.objects.get(pk=destination)
      except:
        raise CommandError("No destination database defined with name '%s'" % destination)
      if source == destination:
        raise CommandError("Can't copy a schema on itself")
      if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
        raise CommandError("Source and destination scenarios have a different engine")
      if sourcescenario.status != u'In use':
        raise CommandError("Source scenario is not in use")
      if destinationscenario.status != u'Free' and not force:
        raise CommandError("Destination scenario is not free")

      # Logging message - always logging in the default database
      destinationscenario.status = u'Busy'
      destinationscenario.save()

      # Copying the data
      if settings.DATABASES[source]['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
        ret = os.system("pg_dump -c -U%s -Fp %s%s%s | psql -U%s %s%s%s" % (
          settings.DATABASES[source]['USER'],
          settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
          settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
          test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
          settings.DATABASES[destination]['USER'],
          settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
          settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
          test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
          ))
        if ret: raise Exception('Exit code of the database copy command is %d' % ret)
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.sqlite3':
        # A plain copy of the database file
        if test:
          shutil.copy2(settings.DATABASES[source]['TEST_NAME'], settings.DATABASES[destination]['TEST_NAME'])
        else:
          shutil.copy2(settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['NAME'])
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.mysql':
        ret = os.system("mysqldump %s --password=%s --user=%s %s%s--quick --compress --extended-insert --add-drop-table | mysql %s --password=%s --user=%s %s%s" % (
          test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
          settings.DATABASES[source]['PASSWORD'],
          settings.DATABASES[source]['USER'],
          settings.DATABASES[source]['HOST'] and ("--host=%s " % settings.DATABASES[source]['HOST']) or '',
          settings.DATABASES[source]['PORT'] and ("--port=%s " % settings.DATABASES[source]['PORT']) or '',
          test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
          settings.DATABASES[destination]['PASSWORD'],
          settings.DATABASES[destination]['USER'],
          settings.DATABASES[destination]['HOST'] and ("--host=%s " % settings.DATABASES[destination]['HOST']) or '',
          settings.DATABASES[destination]['PORT'] and ("--port=%s " % settings.DATABASES[destination]['PORT']) or '',
          ))
        if ret: raise Exception('Exit code of the database copy command is %d' % ret)
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.oracle':
        try:
          try: os.unlink(os.path.join(settings.FREPPLE_LOGDIR,'frepple.dmp'))
          except: pass
          ret = os.system("expdp %s/%s@//%s:%s/%s schemas=%s directory=frepple_logdir nologfile=Y dumpfile=frepple.dmp" % (
            test and settings.DATABASES[source]['TEST_USER'] or settings.DATABASES[source]['USER'],
            settings.DATABASES[source]['PASSWORD'],
            settings.DATABASES[source]['HOST'] or 'localhost',
            settings.DATABASES[source]['PORT'] or '1521',
            test and settings.DATABASES[source]['TEST_NAME'] or settings.DATABASES[source]['NAME'],
            test and settings.DATABASES[source]['TEST_USER'] or settings.DATABASES[source]['USER'],
            ))
          if ret: raise Exception('Exit code of the database export command is %d' % ret)
          ret = os.system("impdp %s/%s@//%s:%s/%s remap_schema=%s:%s table_exists_action=replace directory=frepple_logdir nologfile=Y dumpfile=frepple.dmp" % (
            test and settings.DATABASES[destination]['TEST_USER'] or settings.DATABASES[destination]['USER'],
            settings.DATABASES[destination]['PASSWORD'],
            settings.DATABASES[destination]['HOST'],
            settings.DATABASES[destination]['PORT'],
            test and settings.DATABASES[destination]['TEST_NAME'] or settings.DATABASES[destination]['NAME'],
            test and settings.DATABASES[source]['TEST_USER'] or settings.DATABASES[source]['USER'],
            test and settings.DATABASES[destination]['TEST_USER'] or settings.DATABASES[destination]['USER'],
            ))
          if ret: raise Exception('Exit code of the database import command is %d' % ret)
        finally:
          try: os.unlink(os.path.join(settings.FREPPLE_LOGDIR,'frepple.dmp'))
          except: pass
      else:
        raise Exception('Copy command not supported for database engine %s' % settings.DATABASES[source]['ENGINE'])

      # Update the scenario table
      destinationscenario.status = 'In use'
      destinationscenario.lastrefresh = datetime.today()
      if 'description' in options:
        destinationscenario.description = options['description']
      else:
        destinationscenario.description = "Copied from scenario '%s'" % source
      destinationscenario.save()

      # Logging message
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      if destinationscenario and destinationscenario.status == u'Busy':
        destinationscenario.status = u'Free'
        destinationscenario.save()
      raise e

    finally:
      if task: task.save()
      settings.DEBUG = tmp_debug
示例#6
0
    def handle(self, **options):
        """
    Uploads approved operationplans to the ERP system.
    """

        # Select the correct frePPLe scenario database
        self.database = options["database"]
        if self.database not in settings.DATABASES.keys():
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        self.cursor_frepple = connections[self.database].cursor()

        # FrePPle user running this task
        if options["user"]:
            try:
                self.user = (User.objects.all().using(
                    self.database).get(username=options["user"]))
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            self.user = None

        # FrePPLe task identifier
        if options["task"]:
            try:
                self.task = (Task.objects.all().using(
                    self.database).get(pk=options["task"]))
            except Exception:
                raise CommandError("Task identifier not found")
            if (self.task.started or self.task.finished
                    or self.task.status != "Waiting"
                    or self.task.name != "frepple2erp"):
                raise CommandError("Invalid task identifier")
        else:
            now = datetime.now()
            self.task = Task(
                name="frepple2erp",
                submitted=now,
                started=now,
                status="0%",
                user=self.user,
            )
        self.task.processid = os.getpid()
        self.task.save(using=self.database)

        try:
            # Open database connection
            print("Connecting to the ERP database")
            with getERPconnection() as erp_connection:
                self.cursor_erp = erp_connection.cursor(self.database)
                try:
                    self.extractPurchaseOrders()
                    self.task.status = "33%"
                    self.task.save(using=self.database)

                    self.extractDistributionOrders()
                    self.task.status = "66%"
                    self.task.save(using=self.database)

                    self.extractManufacturingOrders()
                    self.task.status = "100%"
                    self.task.save(using=self.database)

                    # Optional extra planning output the ERP might be interested in:
                    #  - planned delivery date of sales orders
                    #  - safety stock (Enterprise Edition only)
                    #  - reorder quantities (Enterprise Edition only)
                    #  - forecast (Enterprise Edition only)
                    self.task.status = "Done"
                finally:
                    self.cursor_erp.close()
        except Exception as e:
            self.task.status = "Failed"
            self.task.message = "Failed: %s" % e
        self.task.finished = datetime.now()
        self.task.processid = None
        self.task.save(using=self.database)
        self.cursor_frepple.close()
示例#7
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None
    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'importfromfolder-%s.log' % timestamp
    else:
      logfile = 'importfromfolder_%s-%s.log' % (self.database, timestamp)

    try:
      handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8')
      # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format']))
      logger.addHandler(handler)
      logger.propagate = False
    except Exception as e:
      print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e))

    task = None
    errors = [0, 0]
    returnederrors = [0, 0]
    try:
      setattr(_thread_locals, 'database', self.database)
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_importfromfolder', 'importfromfolder'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='importfromfolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile)
      task.processid = os.getpid()
      task.save(using=self.database)

      # Choose the right self.delimiter and language
      self.delimiter = get_format('DECIMAL_SEPARATOR', settings.LANGUAGE_CODE, True) == ',' and ';' or ','
      translation.activate(settings.LANGUAGE_CODE)

      # Execute
      if 'FILEUPLOADFOLDER' in settings.DATABASES[self.database] \
        and os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

        # Open the logfile
        logger.info("%s Started importfromfolder\n" % datetime.now().replace(microsecond=0))

        all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
        models = []
        for ifile in os.listdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
          if not ifile.lower().endswith(('.csv', '.csv.gz', '.xlsx')):
            continue
          filename0 = ifile.split('.')[0]

          model = None
          contenttype_id = None
          for m, ct in all_models:
            if matchesModelName(filename0, m):
              model = m
              contenttype_id = ct
              logger.info("%s Matched a model to file: %s" % (datetime.now().replace(microsecond=0), ifile))
              break

          if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
            logger.info("%s Ignoring data in file: %s" % (datetime.now().replace(microsecond=0), ifile))
          elif self.user and not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
            # Check permissions
            logger.info("%s You don't have permissions to add: %s" % (datetime.now().replace(microsecond=0), ifile))
          else:
            deps = set([model])
            GridReport.dependent_models(model, deps)

            models.append( (ifile, model, contenttype_id, deps) )

        # Sort the list of models, based on dependencies between models
        models = GridReport.sort_models(models)

        i = 0
        cnt = len(models)
        for ifile, model, contenttype_id, dependencies in models:
          task.status = str(int(10 + i / cnt * 80)) + '%'
          task.message = 'Processing data file %s' % ifile
          task.save(using=self.database)
          i += 1
          filetoparse = os.path.join(os.path.abspath(settings.DATABASES[self.database]['FILEUPLOADFOLDER']), ifile)
          if ifile.lower().endswith('.xlsx'):
            logger.info("%s Started processing data in Excel file: %s" % (datetime.now().replace(microsecond=0), ifile))
            returnederrors = self.loadExcelfile(model, filetoparse)
            errors[0] += returnederrors[0]
            errors[1] += returnederrors[1]
            logger.info("%s Finished processing data in file: %s" % (datetime.now().replace(microsecond=0), ifile))
          else:
            logger.info("%s Started processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile))
            returnederrors = self.loadCSVfile(model, filetoparse)
            errors[0] += returnederrors[0]
            errors[1] += returnederrors[1]
            logger.info("%s Finished processing data in CSV file: %s" % (datetime.now().replace(microsecond=0), ifile))
      else:
        errors[0] += 1
        cnt = 0
        logger.error("%s Failed, folder does not exist" % datetime.now().replace(microsecond=0))

      # Task update
      if errors[0] > 0:
        task.status = 'Failed'
        if not cnt:
          task.message = "Destination folder does not exist"
        else:
          task.message = "Uploaded %s data files with %s errors and %s warnings" % (cnt, errors[0], errors[1])
      else:
        task.status = 'Done'
        task.message = "Uploaded %s data files with %s warnings" % (cnt, errors[1])
      task.finished = datetime.now()

    except KeyboardInterrupt:
      if task:
        task.status = 'Cancelled'
        task.message = 'Cancelled'
      logger.info('%s Cancelled\n' % datetime.now().replace(microsecond=0))

    except Exception as e:
      logger.error("%s Failed" % datetime.now().replace(microsecond=0))
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
      raise e

    finally:
      setattr(_thread_locals, 'database', None)
      if task:
        if errors[0] == 0:
          task.status = 'Done'
        else:
          task.status = 'Failed'
        task.processid = None
        task.finished = datetime.now()
        task.save(using=self.database)
      logger.info('%s End of importfromfolder\n' % datetime.now().replace(microsecond=0))
示例#8
0
  def handle(self, *args, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )

    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'exporttofolder-%s.log' % timestamp
    else:
      logfile = 'exporttofolder_%s-%s.log' % (self.database, timestamp)

    try:
      handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8')
      # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format']))
      logger.addHandler(handler)
      logger.propagate = False
    except Exception as e:
      print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e))

    task = None
    errors = 0
    try:
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_exporttofolder', 'exporttofolder'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='exporttofolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.save(using=self.database)

      # Execute
      if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
        if not os.path.isdir(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export')):
          try:
            os.makedirs(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export'))
          except OSError as exception:
            if exception.errno != errno.EEXIST:
              raise

        logger.info("%s Started export to folder\n" % datetime.now())

        cursor = connections[self.database].cursor()

        task.status = '0%'
        task.save(using=self.database)

        i = 0
        cnt = len(self.statements)

        # Calling all the pre-sql statements
        for stmt in self.pre_sql_statements:
          try:
            logger.info("Executing pre-statement '%s'" % stmt)
            cursor.execute(stmt)
            logger.info("%s record(s) modified" % cursor.rowcount)
          except:
            logger.error("An error occurred when executing statement '%s'" % stmt)

        for cfg in self.statements:
          # Validate filename
          filename = cfg.get('filename', None)
          if not filename:
            raise Exception("Missing filename in export configuration")
          folder = cfg.get('folder', None)
          if not folder:
            raise Exception("Missing folder in export configuration for %s" % filename)
          logger.info("%s Started export of %s" % (datetime.now(), filename))

          # Make sure export folder exists
          exportFolder = os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], folder)
          if not os.path.isdir(exportFolder):
            os.makedirs(exportFolder)

          try:
            reportclass = cfg.get('report', None)
            sql = cfg.get('sql', None)
            if reportclass:
              # Export from report class

              # Create a dummy request
              factory = RequestFactory()
              request = factory.get("/dummy/", cfg.get('data', {}))
              if self.user:
                request.user = self.user
              else:
                request.user = User.objects.all().get(username="******")
              request.database = self.database
              request.LANGUAGE_CODE = settings.LANGUAGE_CODE
              request.prefs = cfg.get('prefs', None)

              # Initialize the report
              if hasattr(reportclass, "initialize"):
                reportclass.initialize(request)
              if not reportclass._attributes_added and reportclass.model:
                reportclass._attributes_added = True
                for f in reportclass.getAttributeFields(reportclass.model):
                  reportclass.rows += (f,)
              if reportclass.hasTimeBuckets:
                reportclass.getBuckets(request)

              # Write the report file
              datafile = open(os.path.join(exportFolder, filename), "wb")
              if filename.endswith(".xlsx"):
                reportclass._generate_spreadsheet_data(request, datafile, **cfg.get('data', {}))
              elif filename.endswith(".csv"):
                for r in reportclass._generate_csv_data(request, **cfg.get('data', {})):
                  datafile.write(
                    r.encode(settings.CSV_CHARSET)
                    if isinstance(r, str) else r
                    )
              else:
                raise Exception("Unknown output format for %s" % filename)
            elif sql:
              # Exporting using SQL
              if filename.lower().endswith(".gz"):
                datafile = gzip.open(os.path.join(exportFolder, filename), "w")
              else:
                datafile = open(os.path.join(exportFolder, filename), "w")
              cursor.copy_expert(sql, datafile)
            else:
              raise Exception("Unknown export type for %s" % filename)
            datafile.close()
            i += 1

          except Exception as e:
            errors += 1
            logger.error("%s Failed to export to %s" % (datetime.now(), filename))
            if task:
              task.message = 'Failed to export %s' % filename

          task.status = str(int(i / cnt * 100)) + '%'
          task.save(using=self.database)

        logger.info("%s Exported %s file(s)\n" % (datetime.now(), cnt - errors))

        for stmt in self.post_sql_statements:
          try:
            logger.info("Executing post-statement '%s'" % stmt)
            cursor.execute(stmt)
            logger.info("%s record(s) modified" % cursor.rowcount)
          except:
            logger.error("An error occured when executing statement '%s'" % stmt)

      else:
        errors += 1
        logger.error("%s Failed, folder does not exist" % datetime.now())
        task.message = "Destination folder does not exist"
        task.save(using=self.database)

    except Exception as e:
      logger.error("%s Failed to export: %s" % (datetime.now(), e))
      errors += 1
      if task:
        task.message = 'Failed to export'

    finally:
      logger.info('%s End of export to folder\n' % datetime.now())
      if task:
        if not errors:
          task.status = '100%'
          task.message = "Exported %s data files" % (cnt)
        else:
          task.status = 'Failed'
          #  task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
        task.finished = datetime.now()
        task.save(using=self.database)
    def handle(self, **options):
        # Pick up the options
        now = datetime.now()
        self.database = options["database"]
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        if options["user"]:
            try:
                self.user = (User.objects.all().using(
                    self.database).get(username=options["user"]))
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            self.user = None
        timestamp = now.strftime("%Y%m%d%H%M%S")
        if self.database == DEFAULT_DB_ALIAS:
            logfile = "importworkbook-%s.log" % timestamp
        else:
            logfile = "importworkbook_%s-%s.log" % (self.database, timestamp)

        task = None
        try:
            setattr(_thread_locals, "database", self.database)
            # Initialize the task
            if options["task"]:
                try:
                    task = (Task.objects.all().using(
                        self.database).get(pk=options["task"]))
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name
                        not in ("frepple_importworkbook", "importworkbook")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="importworkbook",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=self.user,
                )
            task.arguments = " ".join(options["file"])
            task.save(using=self.database)

            all_models = [(ct.model_class(), ct.pk)
                          for ct in ContentType.objects.all()
                          if ct.model_class()]
            try:
                with transaction.atomic(using=self.database):
                    # Find all models in the workbook
                    if "filename" not in locals():
                        filename = options["file"]
                    for file in filename:
                        wb = load_workbook(filename=file,
                                           read_only=True,
                                           data_only=True)
                        models = []
                        for ws_name in wb.sheetnames:
                            # Find the model
                            model = None
                            contenttype_id = None
                            for m, ct in all_models:
                                if matchesModelName(ws_name, m):
                                    model = m
                                    contenttype_id = ct
                                    break
                            if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                                print(
                                    force_text(
                                        _("Ignoring data in worksheet: %s") %
                                        ws_name))
                                # yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>'
                            elif not self.user.has_perm("%s.%s" % (
                                    model._meta.app_label,
                                    get_permission_codename(
                                        "add", model._meta),
                            )):
                                # Check permissions
                                print(
                                    force_text(
                                        _("You don't permissions to add: %s") %
                                        ws_name))
                                # yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>'
                            else:
                                deps = set([model])
                                GridReport.dependent_models(model, deps)
                                models.append(
                                    (ws_name, model, contenttype_id, deps))

                        # Sort the list of models, based on dependencies between models
                        models = GridReport.sort_models(models)

                        # Process all rows in each worksheet
                        for ws_name, model, contenttype_id, dependencies in models:
                            print(
                                force_text(
                                    _("Processing data in worksheet: %s") %
                                    ws_name))
                            # yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong><br>'
                            # yield ('<div class="table-responsive">'
                            # '<table class="table table-condensed" style="white-space: nowrap;"><tbody>')
                            numerrors = 0
                            numwarnings = 0
                            firsterror = True
                            ws = wb[ws_name]
                            for error in parseExcelWorksheet(
                                    model,
                                    ws,
                                    user=self.user,
                                    database=self.database,
                                    ping=True,
                            ):
                                if error[0] == logging.DEBUG:
                                    # Yield some result so we can detect disconnect clients and interrupt the upload
                                    # yield ' '
                                    continue
                                if firsterror and error[0] in (
                                        logging.ERROR,
                                        logging.WARNING,
                                ):
                                    print("%s %s %s %s %s%s%s" % (
                                        capfirst(_("worksheet")),
                                        capfirst(_("row")),
                                        capfirst(_("field")),
                                        capfirst(_("value")),
                                        capfirst(_("error")),
                                        " / ",
                                        capfirst(_("warning")),
                                    ))
                                    # yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % (
                                    #   capfirst(_("worksheet")), capfirst(_("row")),
                                    #   capfirst(_("field")), capfirst(_("value")),
                                    #   capfirst(_("error")), " / ", capfirst(_("warning"))
                                    #   )
                                    firsterror = False
                                if error[0] == logging.ERROR:
                                    print("%s %s %s %s %s: %s" % (
                                        ws_name,
                                        error[1] if error[1] else "",
                                        error[2] if error[2] else "",
                                        error[3] if error[3] else "",
                                        capfirst(_("error")),
                                        error[4],
                                    ))
                                    # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                                    #   ws_name,
                                    #   error[1] if error[1] else '',
                                    #   error[2] if error[2] else '',
                                    #   error[3] if error[3] else '',
                                    #   capfirst(_('error')),
                                    #   error[4]
                                    #   )
                                    numerrors += 1
                                elif error[1] == logging.WARNING:
                                    print("%s %s %s %s %s: %s" % (
                                        ws_name,
                                        error[1] if error[1] else "",
                                        error[2] if error[2] else "",
                                        error[3] if error[3] else "",
                                        capfirst(_("warning")),
                                        error[4],
                                    ))
                                    # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                                    #   ws_name,
                                    #   error[1] if error[1] else '',
                                    #   error[2] if error[2] else '',
                                    #   error[3] if error[3] else '',
                                    #   capfirst(_('warning')),
                                    #   error[4]
                                    #   )
                                    numwarnings += 1
                                else:
                                    print("%s %s %s %s %s %s" % (
                                        "danger"
                                        if numerrors > 0 else "success",
                                        ws_name,
                                        error[1] if error[1] else "",
                                        error[2] if error[2] else "",
                                        error[3] if error[3] else "",
                                        error[4],
                                    ))
                            #     yield '<tr class=%s><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % (
                            #       "danger" if numerrors > 0 else 'success',
                            #       ws_name,
                            #       error[1] if error[1] else '',
                            #       error[2] if error[2] else '',
                            #       error[3] if error[3] else '',
                            #       error[4]
                            #       )
                            # yield '</tbody></table></div>'
                        print("%s" % _("Done"))
                        # yield '<div><strong>%s</strong></div>' % _("Done")
            except GeneratorExit:
                logger.warning("Connection Aborted")
        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            setattr(_thread_locals, "database", None)
            if task:
                task.save(using=self.database)

        # Task update
        task.status = "Done"
        task.finished = datetime.now()
        task.processid = None
        task.save(using=self.database, update_fields=["status", "finished"])

        return _("Done")
示例#10
0
文件: views.py 项目: albertca/frePPLe
def LaunchTask(request, action):
  # Allow only post
  if request.method != 'POST':
    raise Http404('Only post requests allowed')

  # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
  worker_database = request.database
  try:
    now = datetime.now()
    # A
    if action == 'generate plan':
      constraint = 0
      for value in request.POST.getlist('constraint'):
        try: constraint += int(value)
        except: pass
      task = Task(name='generate plan', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--constraint=%s --plantype=%s" % (constraint, request.POST.get('plantype'))
      task.save(using=request.database)
      # Update the session object   TODO REPLACE WITH PREFERENCE INFO
      request.session['plantype'] = request.POST.get('plantype')
      request.session['constraint'] = constraint
    # B
    elif action == 'generate model':
      task = Task(name='generate model', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
        "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
        request.POST['clusters'], request.POST['demands'], request.POST['fcst'], request.POST['levels'],
        request.POST['rsrc_number'], request.POST['rsrc_size'], request.POST['components'],
        request.POST['components_per'], request.POST['deliver_lt'], request.POST['procure_lt']
        )
      task.save(using=request.database)
    # C
    elif action == 'empty database':
      task = Task(name='empty database', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    # D
    elif action == 'load dataset':
      task = Task(name='load dataset', submitted=now, status='Waiting', user=request.user, arguments=request.POST['datafile'])
      task.save(using=request.database)
    # E
    elif action == 'manage scenarios':
      worker_database = DEFAULT_DB_ALIAS
      if 'copy' in request.POST:
        source = request.POST.get('source', DEFAULT_DB_ALIAS)
        for sc in Scenario.objects.all():
          if request.POST.get(sc.name,'off') == 'on' and sc.status == u'Free':
            task = Task(name='copy scenario', submitted=now, status='Waiting', user=request.user, arguments="%s %s" % (source, sc.name))
            task.save()
      elif 'release' in request.POST:
        # Note: release is immediate and synchronous.
        for sc in Scenario.objects.all():
          if request.POST.get(sc.name,'off') == u'on' and sc.status != u'Free':
            sc.status = u'Free'
            sc.lastrefresh = now
            sc.save()
            if request.database == sc.name:
              # Erasing the database that is currently selected.
              request.prefix = ''
      elif 'update' in request.POST:
        # Note: update is immediate and synchronous.
        for sc in Scenario.objects.all():
          if request.POST.get(sc.name, 'off') == 'on':
            sc.description = request.POST.get('description',None)
            sc.save()
      else:
        raise Http404('Invalid scenario task')
    # F
    elif action == 'backup database':
      task = Task(name='backup database', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    # G
    elif action == 'generate buckets':
      task = Task(name='generate buckets', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--start=%s --end=%s --weekstart=%s" % (
        request.POST['start'], request.POST['end'], request.POST['weekstart']
        )
      task.save(using=request.database)
    # H
    elif action == 'exportworkbook':
      return exportWorkbook(request)
    # I
    elif action == 'importworkbook':
      return importWorkbook(request)
    # J
    elif action == 'openbravo_import' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
      task = Task(name='Openbravo import', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--delta=%s" % request.POST['delta']
      task.save(using=request.database)
    # K
    elif action == 'openbravo_export' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
      task = Task(name='Openbravo export', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    # L
    elif action == 'openerp_import' and 'freppledb.openerp' in settings.INSTALLED_APPS:
      task = Task(name='OpenERP import', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--delta=%s" % request.POST['delta']
      task.save(using=request.database)
    # M
    elif action == 'openerp_export' and 'freppledb.openerp' in settings.INSTALLED_APPS:
      task = Task(name='OpenERP export', submitted=now, status='Waiting', user=request.user)
      task.save(using=request.database)
    else:
      # Task not recognized
      raise Http404('Invalid launching task')

    # Launch a worker process
    if not checkActive(worker_database):
      if os.path.isfile(os.path.join(settings.FREPPLE_APP,"frepplectl.py")):
        # Development layout
        Popen([
          sys.executable, # Python executable
          os.path.join(settings.FREPPLE_APP,"frepplectl.py"),
          "frepple_runworker",
          "--database=%s" % worker_database
          ])
      elif sys.executable.find('freppleserver.exe') >= 0:
        # Py2exe executable
        Popen([
          sys.executable.replace('freppleserver.exe','frepplectl.exe'), # frepplectl executable
          "frepple_runworker",
          "--database=%s" % worker_database
          ], creationflags=0x08000000) # Do not create a console window
      else:
        # Linux standard installation
        Popen([
          "frepplectl",
          "frepple_runworker",
          "--database=%s" % worker_database
          ])

    # Task created successfully
    return HttpResponseRedirect('%s/execute/' % request.prefix)
  except Exception as e:
    messages.add_message(request, messages.ERROR,
        force_unicode(_('Failure launching action: %(msg)s') % {'msg':e}))
    return HttpResponseRedirect('%s/execute/' % request.prefix)
示例#11
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    force = options['force']
    test = 'FREPPLE_TEST' in os.environ
    if options['user']:
      try:
        user = User.objects.all().get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    # Synchronize the scenario table with the settings
    Scenario.syncWithSettings()

    # Initialize the task
    source = options['source']
    try:
      sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=source)
    except:
      raise CommandError("No source database defined with name '%s'" % source)
    now = datetime.now()
    task = None
    if 'task' in options and options['task']:
      try:
        task = Task.objects.all().using(source).get(pk=options['task'])
      except:
        raise CommandError("Task identifier not found")
      if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_copy', 'scenario_copy'):
        raise CommandError("Invalid task identifier")
      task.status = '0%'
      task.started = now
    else:
      task = Task(name='scenario_copy', submitted=now, started=now, status='0%', user=user)
    task.processid = os.getpid()
    task.save(using=source)

    # Validate the arguments
    destination = options['destination']
    destinationscenario = None
    try:
      task.arguments = "%s %s" % (source, destination)
      if options['description']:
        task.arguments += '--description="%s"' % options['description'].replace('"', '\\"')
      if force:
        task.arguments += " --force"
      task.save(using=source)
      try:
        destinationscenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(pk=destination)
      except:
        raise CommandError("No destination database defined with name '%s'" % destination)
      if source == destination:
        raise CommandError("Can't copy a schema on itself")
      if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
        raise CommandError("Source and destination scenarios have a different engine")
      if sourcescenario.status != 'In use':
        raise CommandError("Source scenario is not in use")
      if destinationscenario.status != 'Free' and not force:
        raise CommandError("Destination scenario is not free")

      # Logging message - always logging in the default database
      destinationscenario.status = 'Busy'
      destinationscenario.save(using=DEFAULT_DB_ALIAS)

      # Copying the data
      # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
      if settings.DATABASES[source]['PASSWORD']:
        os.environ['PGPASSWORD'] = settings.DATABASES[source]['PASSWORD']
      if os.name == 'nt':
        # On windows restoring with pg_restore over a pipe is broken :-(
        cmd = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s"
      else:
        cmd = "pg_dump -Fc %s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s"
      commandline = cmd % (
        settings.DATABASES[source]['USER'] and ("-U %s " % settings.DATABASES[source]['USER']) or '',
        settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
        settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
        test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'],
        settings.DATABASES[destination]['USER'] and ("-U %s " % settings.DATABASES[destination]['USER']) or '',
        settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
        settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
        test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'],
        )
      with subprocess.Popen(commandline, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.STDOUT) as p:
        try:
          task.processid = p.pid
          task.save(using=source)
          p.wait()
        except:
          p.kill()
          p.wait()
          # Consider the destination database free again
          destinationscenario.status = 'Free'
          destinationscenario.lastrefresh = datetime.today()
          destinationscenario.save(using=DEFAULT_DB_ALIAS)
          raise Exception("Database copy failed")

      # Update the scenario table
      destinationscenario.status = 'In use'
      destinationscenario.lastrefresh = datetime.today()
      if 'description' in options:
        destinationscenario.description = options['description']
      destinationscenario.save(using=DEFAULT_DB_ALIAS)

      # Give access to the destination scenario to:
      #  a) the user doing the copy
      #  b) all superusers from the source schema
      User.objects.using(destination).filter(is_superuser=True).update(is_active=True)
      User.objects.using(destination).filter(is_superuser=False).update(is_active=False)
      if user:
        User.objects.using(destination).filter(username=user.username).update(is_active=True)

      # Logging message
      task.processid = None
      task.status = 'Done'
      task.finished = datetime.now()

      # Update the task in the destination database
      task.message = "Scenario copied from %s" % source
      task.save(using=destination)
      task.message = "Scenario copied to %s" % destination

      # Delete any waiting tasks in the new copy.
      # This is needed for situations where the same source is copied to
      # multiple destinations at the same moment.
      Task.objects.all().using(destination).filter(id__gt=task.id).delete()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      if destinationscenario and destinationscenario.status == 'Busy':
        destinationscenario.status = 'Free'
        destinationscenario.save(using=DEFAULT_DB_ALIAS)
      raise e

    finally:
      if task:
        task.processid = None
        task.save(using=source)
      settings.DEBUG = tmp_debug
示例#12
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'empty database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='empty database', submitted=now, started=now, status='0%', user=user)
      task.save(using=database)
      transaction.commit(using=database)

      # Create a database connection
      cursor = connections[database].cursor()

      # Get a list of all django tables in the database
      tables = set(connections[database].introspection.django_table_names(only_existing=True))

      # Some tables need to be handled a bit special
      cursor.execute('update common_user set horizonbuckets = null')
      tables.discard('auth_group_permissions')
      tables.discard('auth_permission')
      tables.discard('auth_group')
      tables.discard('django_session')
      tables.discard('common_user')
      tables.discard('common_user_groups')
      tables.discard('common_user_user_permissions')
      tables.discard('django_content_type')
      tables.discard('execute_log')
      tables.discard('execute_scenario')
      transaction.commit(using=database)

      # Delete all records from the tables.
      for stmt in connections[database].ops.sql_flush(no_style(), tables, []):
        cursor.execute(stmt)
      transaction.commit(using=database)

      # SQLite specials
      if settings.DATABASES[database]['ENGINE'] == 'django.db.backends.sqlite3':
        cursor.execute('vacuum')   # Shrink the database file

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      settings.DEBUG = tmp_debug
      transaction.leave_transaction_management(using=database)
示例#13
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None
    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'importworkbook-%s.log' % timestamp
    else:
      logfile = 'importworkbook_%s-%s.log' % (self.database, timestamp)

    task = None
    try:
      setattr(_thread_locals, 'database', self.database)
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_importworkbook', 'importworkbook'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='importworkbook', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(options['file'])
      task.save(using=self.database)

      all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
      try:
        with transaction.atomic(using=self.database):
          # Find all models in the workbook
          for file in filename:
            wb = load_workbook(filename=file, read_only=True, data_only=True)
            models = []
            for ws_name in wb.sheetnames:
              # Find the model
              model = None
              contenttype_id = None
              for m, ct in all_models:
                if matchesModelName(ws_name, m):
                  model = m
                  contenttype_id = ct
                  break
              if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                print(force_text(_("Ignoring data in worksheet: %s") % ws_name))
                # yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>'
              elif not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
                # Check permissions
                print(force_text(_("You don't permissions to add: %s") % ws_name))
                # yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>'
              else:
                deps = set([model])
                GridReport.dependent_models(model, deps)
                models.append( (ws_name, model, contenttype_id, deps) )

            # Sort the list of models, based on dependencies between models
            models = GridReport.sort_models(models)
            print('197----', models)
            # Process all rows in each worksheet
            for ws_name, model, contenttype_id, dependencies in models:
              print(force_text(_("Processing data in worksheet: %s") % ws_name))
              # yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong><br>'
              # yield ('<div class="table-responsive">'
                     # '<table class="table table-condensed" style="white-space: nowrap;"><tbody>')
              numerrors = 0
              numwarnings = 0
              firsterror = True
              ws = wb[ws_name]
              for error in parseExcelWorksheet(model, ws, user=self.user, database=self.database, ping=True):
                if error[0] == DEBUG:
                  # Yield some result so we can detect disconnect clients and interrupt the upload
                  # yield ' '
                  continue
                if firsterror and error[0] in (ERROR, WARNING):
                  print('%s %s %s %s %s%s%s' % (
                    capfirst(_("worksheet")), capfirst(_("row")),
                    capfirst(_("field")), capfirst(_("value")),
                    capfirst(_("error")), " / ", capfirst(_("warning"))
                    ))
                  # yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % (
                  #   capfirst(_("worksheet")), capfirst(_("row")),
                  #   capfirst(_("field")), capfirst(_("value")),
                  #   capfirst(_("error")), " / ", capfirst(_("warning"))
                  #   )
                  firsterror = False
                if error[0] == ERROR:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('error')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('error')),
                  #   error[4]
                  #   )
                  numerrors += 1
                elif error[1] == WARNING:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('warning')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('warning')),
                  #   error[4]
                  #   )
                  numwarnings += 1
                else:
                  print('%s %s %s %s %s %s' % (
                    "danger" if numerrors > 0 else 'success',
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    error[4]
                    ))
              #     yield '<tr class=%s><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % (
              #       "danger" if numerrors > 0 else 'success',
              #       ws_name,
              #       error[1] if error[1] else '',
              #       error[2] if error[2] else '',
              #       error[3] if error[3] else '',
              #       error[4]
              #       )
              # yield '</tbody></table></div>'
            print('%s' % _("Done"))
            # yield '<div><strong>%s</strong></div>' % _("Done")
      except GeneratorExit:
        logger.warning('Connection Aborted')
    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      setattr(_thread_locals, 'database', None)
      if task:
        task.save(using=self.database)

    return _("Done")
示例#14
0
  def handle(self, **options):
    '''
    Uploads approved operationplans to the ERP system.
    '''

    # Select the correct frePPLe scenario database
    self.database = options['database']
    if self.database not in settings.DATABASES.keys():
      raise CommandError("No database settings known for '%s'" % self.database)
    self.cursor_frepple = connections[self.database].cursor()

    # FrePPle user running this task
    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    # FrePPLe task identifier
    if options['task']:
      try:
        self.task = Task.objects.all().using(self.database).get(pk=options['task'])
      except:
        raise CommandError("Task identifier not found")
      if self.task.started or self.task.finished or self.task.status != "Waiting" or self.task.name != 'frepple2erp':
        raise CommandError("Invalid task identifier")
    else:
      now = datetime.now()
      self.task = Task(name='frepple2erp', submitted=now, started=now, status='0%', user=self.user)
    self.task.processid = os.getpid()
    self.task.save(using=self.database)

    try:
      # Open database connection
      print("Connecting to the ERP database")
      with getERPconnection() as erp_connection:
        self.cursor_erp = erp_connection.cursor(self.database)
        try:
          self.extractPurchaseOrders()
          self.task.status = '33%'
          self.task.save(using=self.database)

          self.extractDistributionOrders()
          self.task.status = '66%'
          self.task.save(using=self.database)

          self.extractManufacturingOrders()
          self.task.status = '100%'
          self.task.save(using=self.database)

          # Optional extra planning output the ERP might be interested in:
          #  - planned delivery date of sales orders
          #  - safety stock (Enterprise Edition only)
          #  - reorder quantities (Enterprise Edition only)
          #  - forecast (Enterprise Edition only)
          self.task.status = 'Done'
        finally:
          self.cursor_erp.close()
    except Exception as e:
      self.task.status = 'Failed'
      self.task.message = 'Failed: %s' % e
    self.task.finished = datetime.now()
    self.task.processid = None
    self.task.save(using=self.database)
    self.cursor_frepple.close()
示例#15
0
    def handle(self, **options):
        # Pick up the options
        now = datetime.now()
        self.database = options["database"]
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        if options["user"]:
            try:
                self.user = (User.objects.all().using(
                    self.database).get(username=options["user"]))
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            self.user = None
        timestamp = now.strftime("%Y%m%d%H%M%S")
        if self.database == DEFAULT_DB_ALIAS:
            logfile = "importfromfolder-%s.log" % timestamp
        else:
            logfile = "importfromfolder_%s-%s.log" % (self.database, timestamp)

        try:
            handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR,
                                                       logfile),
                                          encoding="utf-8")
            # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format']))
            logger.addHandler(handler)
            logger.propagate = False
        except Exception as e:
            print("%s Failed to open logfile %s: %s" %
                  (datetime.now(), logfile, e))

        task = None
        errors = [0, 0]
        try:
            setattr(_thread_locals, "database", self.database)
            # Initialize the task
            if options["task"]:
                try:
                    task = (Task.objects.all().using(
                        self.database).get(pk=options["task"]))
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name not in ("frepple_importfromfolder",
                                             "importfromfolder")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
                task.logfile = logfile
            else:
                task = Task(
                    name="importfromfolder",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=self.user,
                    logfile=logfile,
                )
            task.processid = os.getpid()
            task.save(using=self.database)

            # Choose the right self.delimiter and language
            self.delimiter = (get_format("DECIMAL_SEPARATOR",
                                         settings.LANGUAGE_CODE, True) == ","
                              and ";" or ",")
            translation.activate(settings.LANGUAGE_CODE)
            self.SQLrole = settings.DATABASES[self.database].get(
                "SQL_ROLE", "report_role")

            # Execute
            if "FILEUPLOADFOLDER" in settings.DATABASES[
                    self.database] and os.path.isdir(
                        settings.DATABASES[self.database]["FILEUPLOADFOLDER"]):

                # Open the logfile
                logger.info("%s Started importfromfolder\n" %
                            datetime.now().replace(microsecond=0))

                all_models = [(ct.model_class(), ct.pk)
                              for ct in ContentType.objects.all()
                              if ct.model_class()]
                models = []
                for ifile in os.listdir(
                        settings.DATABASES[self.database]["FILEUPLOADFOLDER"]):
                    if not ifile.lower().endswith((
                            ".sql",
                            ".sql.gz",
                            ".csv",
                            ".csv.gz",
                            ".cpy",
                            ".cpy.gz",
                            ".xlsx",
                    )):
                        continue
                    filename0 = ifile.split(".")[0].split(" (")[0]

                    model = None
                    contenttype_id = None
                    for m, ct in all_models:
                        if matchesModelName(filename0, m):
                            model = m
                            contenttype_id = ct
                            break

                    if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                        logger.info(
                            "%s Ignoring data in file: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
                    elif self.user and not self.user.has_perm("%s.%s" % (
                            model._meta.app_label,
                            get_permission_codename("add", model._meta),
                    )):
                        # Check permissions
                        logger.info(
                            "%s You don't have permissions to add: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
                    else:
                        deps = set([model])
                        GridReport.dependent_models(model, deps)

                        models.append((ifile, model, contenttype_id, deps))

                # Sort the list of models, based on dependencies between models
                models = GridReport.sort_models(models)

                i = 0
                cnt = len(models)
                for ifile, model, contenttype_id, dependencies in models:
                    task.status = str(int(10 + i / cnt * 80)) + "%"
                    task.message = "Processing data file %s" % ifile
                    task.save(using=self.database)
                    i += 1
                    filetoparse = os.path.join(
                        os.path.abspath(settings.DATABASES[self.database]
                                        ["FILEUPLOADFOLDER"]),
                        ifile,
                    )
                    if ifile.lower().endswith((".sql", ".sql.gz")):
                        logger.info(
                            "%s Started executing SQL statements from file: %s"
                            % (datetime.now().replace(microsecond=0), ifile))
                        errors[0] += self.executeSQLfile(filetoparse)
                        logger.info(
                            "%s Finished executing SQL statements from file: %s"
                            % (datetime.now().replace(microsecond=0), ifile))
                    elif ifile.lower().endswith((".cpy", ".cpy.gz")):
                        logger.info(
                            "%s Started uploading copy file: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
                        errors[0] += self.executeCOPYfile(model, filetoparse)
                        logger.info(
                            "%s Finished uploading copy file: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
                    elif ifile.lower().endswith(".xlsx"):
                        logger.info(
                            "%s Started processing data in Excel file: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
                        returnederrors = self.loadExcelfile(model, filetoparse)
                        errors[0] += returnederrors[0]
                        errors[1] += returnederrors[1]
                        logger.info(
                            "%s Finished processing data in file: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
                    else:
                        logger.info(
                            "%s Started processing data in CSV file: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
                        returnederrors = self.loadCSVfile(model, filetoparse)
                        errors[0] += returnederrors[0]
                        errors[1] += returnederrors[1]
                        logger.info(
                            "%s Finished processing data in CSV file: %s" %
                            (datetime.now().replace(microsecond=0), ifile))
            else:
                errors[0] += 1
                cnt = 0
                logger.error("%s Failed, folder does not exist" %
                             datetime.now().replace(microsecond=0))

            # Task update
            if errors[0] > 0:
                task.status = "Failed"
                if not cnt:
                    task.message = "Destination folder does not exist"
                else:
                    task.message = (
                        "Uploaded %s data files with %s errors and %s warnings"
                        % (cnt, errors[0], errors[1]))
            else:
                task.status = "Done"
                task.message = "Uploaded %s data files with %s warnings" % (
                    cnt,
                    errors[1],
                )
            task.finished = datetime.now()

        except KeyboardInterrupt:
            if task:
                task.status = "Cancelled"
                task.message = "Cancelled"
            logger.info("%s Cancelled\n" %
                        datetime.now().replace(microsecond=0))

        except Exception as e:
            logger.error("%s Failed" % datetime.now().replace(microsecond=0))
            if task:
                task.status = "Failed"
                task.message = "%s" % e
            raise e

        finally:
            setattr(_thread_locals, "database", None)
            if task:
                if errors[0] == 0:
                    task.status = "Done"
                else:
                    task.status = "Failed"
                task.processid = None
                task.finished = datetime.now()
                task.save(using=self.database)
            logger.info("%s End of importfromfolder\n" %
                        datetime.now().replace(microsecond=0))
示例#16
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()

    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    timestamp = now.strftime("%Y%m%d%H%M%S")
    if database == DEFAULT_DB_ALIAS:
      logfile = 'frepple-%s.log' % timestamp
    else:
      logfile = 'frepple_%s-%s.log' % (database, timestamp)

    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('runplan', 'frepple_run'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='runplan', submitted=now, started=now, status='0%', user=user, logfile=logfile)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else:
        constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
      else:
        plantype = 1

      # Reset environment variables
      # TODO avoid having to delete the environment variables. Use options directly?
      PlanTaskRegistry.autodiscover()
      for i in PlanTaskRegistry.reg:
        if 'env' in options:
          # Options specified
          if i.label and i.label[0] in os.environ:
            del os.environ[i.label[0]]
        elif i.label:
          # No options specified - default to activate them all
          os.environ[i.label[0]] = '1'

      # Set environment variables
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      if options['background']:
        task.arguments += " --background"

      # Log task
      # Different from the other tasks the frepple engine will write the processid
      task.save(using=database)

      # Locate commands.py
      import freppledb.common.commands
      cmd = freppledb.common.commands.__file__

      def setlimits():
        import resource
        if settings.MAXMEMORYSIZE:
          resource.setrlimit(
            resource.RLIMIT_AS,
            (settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024)
            )
        if settings.MAXCPUTIME:
          resource.setrlimit(
            resource.RLIMIT_CPU,
            (settings.MAXCPUTIME, settings.MAXCPUTIME + 5)
            )
        # Limiting the file size is a bit tricky as this limit not only applies to the log
        # file, but also to temp files during the export
        # if settings.MAXTOTALLOGFILESIZE:
        #  resource.setrlimit(
        #    resource.RLIMIT_FSIZE,
        #   (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024)
        #   )

      # Prepare environment
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['FREPPLE_LOGFILE'] = logfile
      os.environ['FREPPLE_PROCESSNAME'] = settings.DATABASES[database]['NAME'].replace('demo', '')
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), 'lib')
      if os.path.isdir(libdir):
        # Folders used by the Windows version
        os.environ['PYTHONPATH'] += os.pathsep + libdir
        if os.path.isfile(os.path.join(libdir, 'library.zip')):
          os.environ['PYTHONPATH'] += os.pathsep + os.path.join(libdir, 'library.zip')

      if options['background']:
        # Execute as background process on Windows
        if os.name == 'nt':
          subprocess.Popen(['frepple', cmd], creationflags=0x08000000)
        else:
          # Execute as background process on Linux
          subprocess.Popen(['frepple', cmd], preexec_fn=setlimits)
      else:
        if os.name == 'nt':
          # Execute in foreground on Windows
          ret = subprocess.call(['frepple', cmd])
        else:
          # Execute in foreground on Linux
          ret = subprocess.call(['frepple', cmd], preexec_fn=setlimits)
        if ret != 0 and ret != 2:
          # Return code 0 is a successful run
          # Return code is 2 is a run cancelled by a user. That's shown in the status field.
          raise Exception('Failed with exit code %d' % ret)

      # Reread the task from the database and update it
      if not options['background']:
        task = Task.objects.all().using(database).get(pk=task.id)
        task.processid = None
        task.status = 'Done'
        task.finished = datetime.now()
        task.save(using=database)

    except Exception as e:
      if task:
        task = Task.objects.all().using(database).get(pk=task.id)
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.processid = None
        task.save(using=database)
      raise e
示例#17
0
    def handle(self, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up the options
        start = options["start"]
        end = options["end"]
        weekstart = int(options["weekstart"])
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options["task"]:
                if options["task"] > 0:
                    try:
                        task = (Task.objects.all().using(database).get(
                            pk=options["task"]))
                    except Task.DoesNotExist:
                        raise CommandError("Task identifier not found")
                    if (task.started or task.finished
                            or task.status != "Waiting" or task.name
                            not in ("frepple_createbuckets", "createbuckets")):
                        raise CommandError("Invalid task identifier")
                    task.status = "0%"
                    task.started = now
            else:
                task = Task(
                    name="createbuckets",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=user,
                    arguments="--start=%s --end=%s --weekstart=%s" %
                    (start, end, weekstart),
                )
            if task:
                task.processid = os.getpid()
                task.save(using=database)

            # Validate the date arguments
            try:
                curdate = datetime.strptime(start, "%Y-%m-%d")
                enddate = datetime.strptime(end, "%Y-%m-%d")
            except Exception as e:
                raise CommandError("Date is not matching format YYYY-MM-DD")

            with transaction.atomic(using=database, savepoint=False):
                # Delete previous contents
                with connections[database].cursor() as cursor:
                    cursor.execute(
                        "delete from common_bucketdetail where bucket_id in ('year','quarter','month','week','day')"
                    )
                    cursor.execute(
                        "delete from common_bucket where name in ('year','quarter','month','week','day')"
                    )

                # Create buckets
                y = Bucket(name="year",
                           description="Yearly time buckets",
                           level=1)
                q = Bucket(name="quarter",
                           description="Quarterly time buckets",
                           level=2)
                m = Bucket(name="month",
                           description="Monthly time buckets",
                           level=3)
                w = Bucket(name="week",
                           description="Weeky time buckets",
                           level=4)
                d = Bucket(name="day",
                           description="Daily time buckets",
                           level=5)
                y.save(using=database)
                q.save(using=database)
                m.save(using=database)
                w.save(using=database)
                d.save(using=database)

                # Loop over all days in the chosen horizon
                prev_year = None
                prev_quarter = None
                prev_month = None
                prev_week = None
                while curdate < enddate:
                    month = int(curdate.strftime(
                        "%m"))  # an integer in the range 1 - 12
                    quarter = (month -
                               1) // 3 + 1  # an integer in the range 1 - 4
                    year = int(curdate.strftime("%Y"))
                    dayofweek = int(curdate.strftime(
                        "%w"))  # day of the week, 0 = sunday, 1 = monday, ...
                    year_start = datetime(year, 1, 1)
                    year_end = datetime(year + 1, 1, 1)
                    week_start = curdate - timedelta((dayofweek + 6) % 7 + 1 -
                                                     weekstart)
                    week_end = curdate - timedelta((dayofweek + 6) % 7 - 6 -
                                                   weekstart)

                    # Create buckets
                    if year != prev_year:
                        prev_year = year
                        BucketDetail(
                            bucket=y,
                            name=self.formatDate(curdate,
                                                 options["format_year"]),
                            startdate=year_start,
                            enddate=year_end,
                        ).save(using=database)
                    if quarter != prev_quarter:
                        prev_quarter = quarter
                        BucketDetail(
                            bucket=q,
                            name=self.formatDate(curdate,
                                                 options["format_quarter"]),
                            startdate=date(year, quarter * 3 - 2, 1),
                            enddate=date(
                                year + quarter // 4,
                                quarter * 3 + 1 - 12 * (quarter // 4),
                                1,
                            ),
                        ).save(using=database)
                    if month != prev_month:
                        prev_month = month
                        BucketDetail(
                            bucket=m,
                            name=self.formatDate(curdate,
                                                 options["format_month"]),
                            startdate=date(year, month, 1),
                            enddate=date(year + month // 12,
                                         month + 1 - 12 * (month // 12), 1),
                        ).save(using=database)
                    if week_start != prev_week:
                        prev_week = week_start
                        # we need to avoid weeks 00
                        # we will therefore take the name of the week starting the monday
                        # included in that week
                        BucketDetail(
                            bucket=w,
                            name=self.formatDate(
                                week_start +
                                timedelta(days=(7 - week_start.weekday()) % 7),
                                options["format_week"],
                            ),
                            startdate=week_start,
                            enddate=week_end,
                        ).save(using=database)
                    BucketDetail(
                        bucket=d,
                        name=self.formatDate(curdate.date(),
                                             options["format_day"]),
                        startdate=curdate,
                        enddate=curdate + timedelta(1),
                    ).save(using=database)

                    # Next date
                    curdate = curdate + timedelta(1)

            # Log success
            if task:
                task.status = "Done"
                task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=database)
            settings.DEBUG = tmp_debug
示例#18
0
def wrapTask(request, action):
    # Allow only post
    if request.method != 'POST':
        raise Exception('Only post requests allowed')
    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    args = request.POST or request.GET

    # A
    if action in ('frepple_run', 'runplan'):
        if not request.user.has_perm('auth.generate_plan'):
            raise Exception('Missing execution privileges')
        constraint = 0
        for value in args.getlist('constraint'):
            try:
                constraint += int(value)
            except:
                pass
        task = Task(name='runplan',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (
            constraint, args.get('plantype', 1))
        env = []
        for value in args.getlist('env'):
            env.append(value)
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
        request.session['env'] = env
        task.save(using=request.database)
        # Update the session object
        request.session['plantype'] = args.get('plantype')
        request.session['constraint'] = constraint
    # C
    elif action in ('frepple_flush', 'empty'):
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='empty',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        models = ','.join(args.getlist('models'))
        if models:
            task.arguments = "--models=%s" % (models)
        task.save(using=request.database)
    # D
    elif action == 'loaddata':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='loaddata',
                    submitted=now,
                    status='Waiting',
                    user=request.user,
                    arguments=args['fixture'])
        task.save(using=request.database)
        # Also run the workflow upon loading of manufacturing_demo or distribution_demo
        if (args['regenerateplan'] == 'true'):
            active_modules = 'supply'
            task = Task(name='runplan',
                        submitted=now,
                        status='Waiting',
                        user=request.user)
            task.arguments = "--constraint=15 --plantype=1 --env=%s --background" % (
                active_modules, )
            task.save(using=request.database)
    # E
    elif action in ('frepple_copy', 'scenario_copy'):
        worker_database = DEFAULT_DB_ALIAS
        if 'copy' in args:
            if not request.user.has_perm('auth.copy_scenario'):
                raise Exception('Missing execution privileges')
            source = args.get('source', DEFAULT_DB_ALIAS)
            worker_database = source
            destination = args.getlist('destination')
            force = args.get('force', False)
            for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
                arguments = "%s %s" % (source, sc.name)
                if force:
                    arguments += ' --force'
                if args.get(sc.name, 'off') == 'on' or sc.name in destination:
                    task = Task(name='scenario_copy',
                                submitted=now,
                                status='Waiting',
                                user=request.user,
                                arguments=arguments)
                    task.save(using=source)
        elif 'release' in args:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
                if args.get(sc.name, 'off') == 'on' and sc.status != 'Free':
                    sc.status = 'Free'
                    sc.lastrefresh = now
                    sc.save(using=DEFAULT_DB_ALIAS)
                    if request.database == sc.name:
                        # Erasing the database that is currently selected.
                        request.prefix = ''
        elif 'update' in args:
            # Note: update is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
                if args.get(sc.name, 'off') == 'on':
                    sc.description = args.get('description', None)
                    sc.save(using=DEFAULT_DB_ALIAS)
        else:
            raise Exception('Invalid scenario task')
    # G
    elif action in ('frepple_createbuckets', 'createbuckets'):
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='createbuckets',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        arguments = []
        start = args.get('start', None)
        if start:
            arguments.append("--start=%s" % start)
        end = args.get('end', None)
        if end:
            arguments.append("--end=%s" % end)
        weekstart = args.get('weekstart', None)
        if weekstart:
            arguments.append("--weekstart=%s" % weekstart)
        format_day = args.get('format-day', None)
        if format_day:
            arguments.append('--format-day="%s"' % format_day)
        format_week = args.get('format-week', None)
        if format_week:
            arguments.append('--format-week="%s"' % format_week)
        format_month = args.get('format-month', None)
        if format_month:
            arguments.append('--format-month="%s"' % format_month)
        format_quarter = args.get('format-quarter', None)
        if format_quarter:
            arguments.append('--format-quarter="%s"' % format_quarter)
        format_year = args.get('format-year', None)
        if format_year:
            arguments.append('--format-year="%s"' % format_year)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)
    else:
        # Generic task wrapper

        # Find the command and verify we have permissions to run it
        command = None
        for commandname, appname in get_commands().items():
            if commandname == action:
                try:
                    c = getattr(
                        import_module('%s.management.commands.%s' %
                                      (appname, commandname)), 'Command')
                    if c.index >= 0:
                        if getattr(c, 'getHTML', None) and c.getHTML(request):
                            # Command class has getHTML method
                            command = c
                            break
                        else:
                            for p in c.__bases__:
                                # Parent command class has getHTML method
                                if getattr(p, 'getHTML',
                                           None) and p.getHTML(request):
                                    command = c
                                    break
                            if command:
                                break
                except Exception:
                    pass  # Silently ignore failures
        if not command:
            raise Exception("Invalid task name '%s'" % action)
        # Create a task
        arguments = []
        for arg, val in args.lists():
            if arg != 'csrfmiddlewaretoken':
                arguments.append('--%s=%s' % (arg, ','.join(val)))
        task = Task(name=action,
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)

    # Launch a worker process, making sure it inherits the right
    # environment variables from this parent
    os.environ['FREPPLE_CONFIGDIR'] = settings.FREPPLE_CONFIGDIR
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen([
                    sys.executable,  # Python executable
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "runworker",
                    "--database=%s" % worker_database
                ])
            else:
                # Deployment on Apache web server
                Popen([
                    "python",
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "runworker",
                    "--database=%s" % worker_database
                ],
                      creationflags=0x08000000)
        elif sys.executable.find('freppleserver.exe') >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace(
                        'freppleserver.exe',
                        'frepplectl.exe'),  # frepplectl executable
                    "runworker",
                    "--database=%s" % worker_database
                ],
                creationflags=0x08000000)  # Do not create a console window
        else:
            # Linux standard installation
            Popen(
                ["frepplectl", "runworker",
                 "--database=%s" % worker_database])
    return task
示例#19
0
    def handle(self, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up options
        force = options["force"]
        promote = options["promote"]
        test = "FREPPLE_TEST" in os.environ
        if options["user"]:
            try:
                user = User.objects.all().get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        # Synchronize the scenario table with the settings
        Scenario.syncWithSettings()

        # Initialize the task
        source = options["source"]
        try:
            sourcescenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(
                pk=source)
        except Exception:
            raise CommandError("No source database defined with name '%s'" %
                               source)
        now = datetime.now()
        task = None
        if "task" in options and options["task"]:
            try:
                task = Task.objects.all().using(source).get(pk=options["task"])
            except Exception:
                raise CommandError("Task identifier not found")
            if (task.started or task.finished or task.status != "Waiting"
                    or task.name != "scenario_copy"):
                raise CommandError("Invalid task identifier")
            task.status = "0%"
            task.started = now
        else:
            task = Task(name="scenario_copy",
                        submitted=now,
                        started=now,
                        status="0%",
                        user=user)
        task.processid = os.getpid()
        task.save(using=source)

        # Validate the arguments
        destination = options["destination"]
        destinationscenario = None
        try:
            task.arguments = "%s %s" % (source, destination)
            if options["description"]:
                task.arguments += '--description="%s"' % options[
                    "description"].replace('"', '\\"')
            if force:
                task.arguments += " --force"
            task.save(using=source)
            try:
                destinationscenario = Scenario.objects.using(
                    DEFAULT_DB_ALIAS).get(pk=destination)
            except Exception:
                raise CommandError(
                    "No destination database defined with name '%s'" %
                    destination)
            if source == destination:
                raise CommandError("Can't copy a schema on itself")
            if sourcescenario.status != "In use":
                raise CommandError("Source scenario is not in use")
            if destinationscenario.status != "Free" and not force and not promote:
                # make sure destination scenario is properly built otherwise it is considered Free
                scenario_is_free = False
                try:
                    User.objects.using(destination).all().count(
                    )  # fails if scenario not properly built
                except Exception:
                    scenario_is_free = True
                if not scenario_is_free:
                    raise CommandError("Destination scenario is not free")
            if promote and (destination != DEFAULT_DB_ALIAS
                            or source == DEFAULT_DB_ALIAS):
                raise CommandError(
                    "Incorrect source or destination database with promote flag"
                )

            # Logging message - always logging in the default database
            destinationscenario.status = "Busy"
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Copying the data
            # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
            if settings.DATABASES[source]["PASSWORD"]:
                os.environ["PGPASSWORD"] = settings.DATABASES[source][
                    "PASSWORD"]
            if os.name == "nt":
                # On windows restoring with pg_restore over a pipe is broken :-(
                cmd = "pg_dump -c -Fp %s%s%s%s%s | psql %s%s%s%s"
            else:
                cmd = "pg_dump -Fc %s%s%s%s%s | pg_restore -n public -Fc -c --if-exists %s%s%s -d %s"
            commandline = cmd % (
                settings.DATABASES[source]["USER"] and
                ("-U %s " % settings.DATABASES[source]["USER"]) or "",
                settings.DATABASES[source]["HOST"] and
                ("-h %s " % settings.DATABASES[source]["HOST"]) or "",
                settings.DATABASES[source]["PORT"] and
                ("-p %s " % settings.DATABASES[source]["PORT"]) or "",
                """
                -T common_user
                -T common_scenario
                -T auth_group
                -T auth_group_permission
                -T auth_permission
                -T common_user_groups
                -T common_user_user_permissions
                -T common_preferences
                -T reportmanager_report
                """ if destination == DEFAULT_DB_ALIAS else "",
                test and settings.DATABASES[source]["TEST"]["NAME"]
                or settings.DATABASES[source]["NAME"],
                settings.DATABASES[destination]["USER"] and
                ("-U %s " % settings.DATABASES[destination]["USER"]) or "",
                settings.DATABASES[destination]["HOST"] and
                ("-h %s " % settings.DATABASES[destination]["HOST"]) or "",
                settings.DATABASES[destination]["PORT"] and
                ("-p %s " % settings.DATABASES[destination]["PORT"]) or "",
                test and settings.DATABASES[destination]["TEST"]["NAME"]
                or settings.DATABASES[destination]["NAME"],
            )
            with subprocess.Popen(
                    commandline,
                    shell=True,
                    stdout=subprocess.DEVNULL,
                    stderr=subprocess.STDOUT,
            ) as p:
                try:
                    task.processid = p.pid
                    task.save(using=source)
                    p.wait()
                except Exception:
                    p.kill()
                    p.wait()
                    # Consider the destination database free again
                    destinationscenario.status = "Free"
                    destinationscenario.lastrefresh = datetime.today()
                    destinationscenario.save(using=DEFAULT_DB_ALIAS)
                    raise Exception("Database copy failed")

            # Update the scenario table
            destinationscenario.status = "In use"
            destinationscenario.lastrefresh = datetime.today()
            if options["description"]:
                destinationscenario.description = options["description"]
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Give access to the destination scenario to:
            #  a) the user doing the copy
            #  b) all superusers from the source schema
            # unless it's a promotion
            if destination != DEFAULT_DB_ALIAS:
                User.objects.using(destination).filter(
                    is_superuser=True).update(is_active=True)
                User.objects.using(destination).filter(
                    is_superuser=False).update(is_active=False)
                if user:
                    User.objects.using(destination).filter(
                        username=user.username).update(is_active=True)

            # Logging message
            task.processid = None
            task.status = "Done"
            task.finished = datetime.now()

            # Update the task in the destination database
            task.message = "Scenario %s from %s" % (
                "promoted" if promote else "copied",
                source,
            )
            task.save(using=destination)
            task.message = "Scenario copied to %s" % destination

            # Delete any waiting tasks in the new copy.
            # This is needed for situations where the same source is copied to
            # multiple destinations at the same moment.
            Task.objects.all().using(destination).filter(
                id__gt=task.id).delete()

            # Don't automate any task in the new copy
            if not promote:
                for i in ScheduledTask.objects.all().using(destination):
                    i.next_run = None
                    i.data.pop("starttime", None)
                    i.data.pop("monday", None)
                    i.data.pop("tuesday", None)
                    i.data.pop("wednesday", None)
                    i.data.pop("thursday", None)
                    i.data.pop("friday", None)
                    i.data.pop("saturday", None)
                    i.data.pop("sunday", None)
                    i.save(using=destination)

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            if destinationscenario and destinationscenario.status == "Busy":
                destinationscenario.status = "Free"
                destinationscenario.save(using=DEFAULT_DB_ALIAS)
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=source)
            settings.DEBUG = tmp_debug
示例#20
0
    def handle(self, **options):
        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all sql
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        # Pick up the options
        if 'verbosity' in options:
            verbosity = int(options['verbosity'])
        else:
            verbosity = 1
        if 'cluster' in options:
            cluster = int(options['cluster'])
        else:
            cluster = 100
        if 'demand' in options:
            demand = int(options['demand'])
        else:
            demand = 30
        if 'forecast_per_item' in options:
            forecast_per_item = int(options['forecast_per_item'])
        else:
            forecast_per_item = 50
        if 'level' in options:
            level = int(options['level'])
        else:
            level = 5
        if 'resource' in options:
            resource = int(options['resource'])
        else:
            resource = 60
        if 'resource_size' in options:
            resource_size = int(options['resource_size'])
        else:
            resource_size = 5
        if 'components' in options:
            components = int(options['components'])
        else:
            components = 200
        if 'components_per' in options:
            components_per = int(options['components_per'])
        else:
            components_per = 5
        if components == 0:
            components_per = 0
        if 'deliver_lt' in options:
            deliver_lt = int(options['deliver_lt'])
        else:
            deliver_lt = 30
        if 'procure_lt' in options:
            procure_lt = int(options['procure_lt'])
        else:
            procure_lt = 40
        if 'currentdate' in options:
            currentdate = options['currentdate'] or datetime.strftime(
                date.today(), '%Y-%m-%d')
        else:
            currentdate = datetime.strftime(date.today(), '%Y-%m-%d')
        if 'database' in options:
            database = options['database'] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        random.seed(100)  # Initialize random seed to get reproducible results

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'generate model':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='generate model',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
              "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
                cluster, demand, forecast_per_item, level, resource,
                resource_size, components, components_per, deliver_lt, procure_lt
              )
            task.save(using=database)

            # Pick up the startdate
            try:
                startdate = datetime.strptime(currentdate, '%Y-%m-%d')
            except:
                raise CommandError(
                    "current date is not matching format YYYY-MM-DD")

            # Check whether the database is empty
            if Buffer.objects.using(database).count(
            ) > 0 or Item.objects.using(database).count() > 0:
                raise CommandError(
                    "Database must be empty before creating a model")

            # Plan start date
            if verbosity > 0:
                print("Updating current date...")
            param = Parameter.objects.using(database).create(
                name="currentdate")
            param.value = datetime.strftime(startdate, "%Y-%m-%d %H:%M:%S")
            param.save(using=database)

            # Planning horizon
            # minimum 10 daily buckets, weekly buckets till 40 days after current
            if verbosity > 0:
                print("Updating buckets...")
            management.call_command('frepple_createbuckets',
                                    user=user,
                                    database=database)
            task.status = '2%'
            task.save(using=database)

            # Weeks calendar
            if verbosity > 0:
                print("Creating weeks calendar...")
            with transaction.atomic(using=database):
                weeks = Calendar.objects.using(database).create(name="Weeks",
                                                                defaultvalue=0)
                for i in BucketDetail.objects.using(database).filter(
                        bucket="week").all():
                    CalendarBucket(startdate=i.startdate,
                                   enddate=i.enddate,
                                   value=1,
                                   calendar=weeks).save(using=database)
                task.status = '4%'
                task.save(using=database)

            # Working days calendar
            if verbosity > 0:
                print("Creating working days...")
            with transaction.atomic(using=database):
                workingdays = Calendar.objects.using(database).create(
                    name="Working Days", defaultvalue=0)
                minmax = BucketDetail.objects.using(database).filter(
                    bucket="week").aggregate(Min('startdate'),
                                             Max('startdate'))
                CalendarBucket(startdate=minmax['startdate__min'],
                               enddate=minmax['startdate__max'],
                               value=1,
                               calendar=workingdays,
                               priority=1,
                               saturday=False,
                               sunday=False).save(using=database)
                task.status = '6%'
                task.save(using=database)

            # Create a random list of categories to choose from
            categories = [
                'cat A', 'cat B', 'cat C', 'cat D', 'cat E', 'cat F', 'cat G'
            ]

            # Create customers
            if verbosity > 0:
                print("Creating customers...")
            with transaction.atomic(using=database):
                cust = []
                for i in range(100):
                    c = Customer.objects.using(database).create(
                        name='Cust %03d' % i)
                    cust.append(c)
                task.status = '8%'
                task.save(using=database)

            # Create resources and their calendars
            if verbosity > 0:
                print("Creating resources and calendars...")
            with transaction.atomic(using=database):
                res = []
                for i in range(resource):
                    loc = Location(name='Loc %05d' %
                                   int(random.uniform(1, cluster)))
                    loc.save(using=database)
                    cal = Calendar(name='capacity for res %03d' % i,
                                   category='capacity',
                                   defaultvalue=0)
                    bkt = CalendarBucket(startdate=startdate,
                                         value=resource_size,
                                         calendar=cal)
                    cal.save(using=database)
                    bkt.save(using=database)
                    r = Resource.objects.using(database).create(
                        name='Res %03d' % i,
                        maximum_calendar=cal,
                        location=loc)
                    res.append(r)
                task.status = '10%'
                task.save(using=database)
                random.shuffle(res)

            # Create the components
            if verbosity > 0:
                print("Creating raw materials...")
            with transaction.atomic(using=database):
                comps = []
                comploc = Location.objects.using(database).create(
                    name='Procured materials')
                for i in range(components):
                    it = Item.objects.using(database).create(
                        name='Component %04d' % i,
                        category='Procured',
                        price=str(round(random.uniform(0, 100))))
                    ld = abs(
                        round(random.normalvariate(procure_lt,
                                                   procure_lt / 3)))
                    c = Buffer.objects.using(database).create(
                        name='Component %04d' % i,
                        location=comploc,
                        category='Procured',
                        item=it,
                        type='procure',
                        min_inventory=20,
                        max_inventory=100,
                        size_multiple=10,
                        leadtime=str(ld * 86400),
                        onhand=str(
                            round(forecast_per_item * random.uniform(1, 3) *
                                  ld / 30)),
                    )
                    comps.append(c)
                task.status = '12%'
                task.save(using=database)

            # Loop over all clusters
            durations = [86400, 86400 * 2, 86400 * 3, 86400 * 5, 86400 * 6]
            progress = 88.0 / cluster
            for i in range(cluster):
                with transaction.atomic(using=database):
                    if verbosity > 0:
                        print("Creating supply chain for end item %d..." % i)

                    # location
                    loc = Location.objects.using(database).get_or_create(
                        name='Loc %05d' % i)[0]
                    loc.available = workingdays
                    loc.save(using=database)

                    # Item and delivery operation
                    oper = Operation.objects.using(database).create(
                        name='Del %05d' % i, sizemultiple=1, location=loc)
                    it = Item.objects.using(database).create(
                        name='Itm %05d' % i,
                        operation=oper,
                        category=random.choice(categories),
                        price=str(round(random.uniform(100, 200))))

                    # Level 0 buffer
                    buf = Buffer.objects.using(database).create(
                        name='Buf %05d L00' % i,
                        item=it,
                        location=loc,
                        category='00')
                    Flow.objects.using(database).create(operation=oper,
                                                        thebuffer=buf,
                                                        quantity=-1)

                    # Demand
                    for j in range(demand):
                        Demand.objects.using(database).create(
                            name='Dmd %05d %05d' % (i, j),
                            item=it,
                            quantity=int(random.uniform(1, 6)),
                            # Exponential distribution of due dates, with an average of deliver_lt days.
                            due=startdate + timedelta(days=round(
                                random.expovariate(float(1) / deliver_lt /
                                                   24)) / 24),
                            # Orders have higher priority than forecast
                            priority=random.choice([1, 2]),
                            customer=random.choice(cust),
                            category=random.choice(categories))

                    # Create upstream operations and buffers
                    ops = []
                    for k in range(level):
                        if k == 1 and res:
                            # Create a resource load for operations on level 1
                            oper = Operation.objects.using(database).create(
                                name='Oper %05d L%02d' % (i, k),
                                type='time_per',
                                location=loc,
                                duration_per=86400,
                                sizemultiple=1,
                            )
                            if resource < cluster and i < resource:
                                # When there are more cluster than resources, we try to assure
                                # that each resource is loaded by at least 1 operation.
                                Load.objects.using(database).create(
                                    resource=res[i], operation=oper)
                            else:
                                Load.objects.using(database).create(
                                    resource=random.choice(res),
                                    operation=oper)
                        else:
                            oper = Operation.objects.using(database).create(
                                name='Oper %05d L%02d' % (i, k),
                                duration=random.choice(durations),
                                sizemultiple=1,
                                location=loc,
                            )
                        ops.append(oper)
                        buf.producing = oper
                        # Some inventory in random buffers
                        if random.uniform(0, 1) > 0.8:
                            buf.onhand = int(random.uniform(5, 20))
                        buf.save(using=database)
                        Flow(operation=oper,
                             thebuffer=buf,
                             quantity=1,
                             type="end").save(using=database)
                        if k != level - 1:
                            # Consume from the next level in the bill of material
                            buf = Buffer.objects.using(database).create(
                                name='Buf %05d L%02d' % (i, k + 1),
                                item=it,
                                location=loc,
                                category='%02d' % (k + 1))
                            Flow.objects.using(database).create(operation=oper,
                                                                thebuffer=buf,
                                                                quantity=-1)

                    # Consume raw materials / components
                    c = []
                    for j in range(components_per):
                        o = random.choice(ops)
                        b = random.choice(comps)
                        while (o, b) in c:
                            # A flow with the same operation and buffer already exists
                            o = random.choice(ops)
                            b = random.choice(comps)
                        c.append((o, b))
                        Flow.objects.using(database).create(
                            operation=o,
                            thebuffer=b,
                            quantity=random.choice([-1, -1, -1, -2, -3]))

                    # Commit the current cluster
                    task.status = '%d%%' % (12 + progress * (i + 1))
                    task.save(using=database)

            # Task update
            task.status = 'Done'
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
                task.save(using=database)
            raise e

        finally:
            if task:
                task.save(using=database)
            settings.DEBUG = tmp_debug
示例#21
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    transaction.managed(True, using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'empty database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='empty database', submitted=now, started=now, status='0%', user=user)
      task.save(using=database)
      transaction.commit(using=database)

      # Create a database connection
      cursor = connections[database].cursor()

      # Delete all records from the tables.
      # We split the tables in groups to speed things up in postgreSQL.
      cursor.execute('update common_user set horizonbuckets = null')
      transaction.commit(using=database)
      tables = [
        ['out_demandpegging'],
        ['out_problem','out_resourceplan','out_constraint'],
        ['out_loadplan','out_flowplan','out_operationplan'],
        ['out_demand',],
        ['demand','customer','resourceskill','skill',
         'setuprule','setupmatrix','resourceload','resource',
         'flow','buffer','operationplan','item',
         'suboperation','operation', # TODO Required to add for enterprise version on postgresql :'forecast','forecastdemand',
         'location','calendarbucket','calendar',],
        ['common_parameter','common_bucketdetail','common_bucket'],
        ['common_comment','django_admin_log'],
        ]
      for group in tables:
        sql_list = connections[database].ops.sql_flush(no_style(), group, [] )
        for sql in sql_list:
          cursor.execute(sql)
          transaction.commit(using=database)

      # SQLite specials
      if settings.DATABASES[database]['ENGINE'] == 'django.db.backends.sqlite3':
        cursor.execute('vacuum')   # Shrink the database file

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      settings.DEBUG = tmp_debug
      transaction.leave_transaction_management(using=database)
示例#22
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up the options
    if 'start' in options: start = options['start'] or '2008-1-1'
    else: start = '2008-1-1'
    if 'end' in options: end = options['end'] or '2016-1-1'
    else: end = '2016-1-1'
    if 'weekstart' in options:
      weekstart = options['weekstart']
      if weekstart < 0 or weekstart > 6:
        raise CommandError("Invalid weekstart %s" % weekstart)
    else: weekstart = 1
    if 'database' in options: database = options['database'] or DEFAULT_DB_ALIAS
    else: database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    ac = transaction.get_autocommit(using=database)
    transaction.set_autocommit(False, using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate buckets':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate buckets', submitted=now, started=now, status='0%', user=user, arguments="--start=%s --end=%s --weekstart=%s" % (start, end, weekstart))
      task.save(using=database)
      transaction.commit(using=database)

      # Validate the date arguments
      try:
        curdate = datetime.strptime(start,'%Y-%m-%d')
        enddate = datetime.strptime(end,'%Y-%m-%d')
      except Exception as e:
        raise CommandError("Date is not matching format YYYY-MM-DD")

      # Delete previous contents
      connections[database].cursor().execute(
        "delete from common_bucketdetail where bucket_id in ('year', 'quarter','month','week','day')"
        )
      connections[database].cursor().execute(
        "delete from common_bucket where name in ('year', 'quarter','month','week','day')"
        )

      # Create buckets
      y = Bucket(name='year',description='Yearly time buckets')
      q = Bucket(name='quarter',description='Quarterly time buckets')
      m = Bucket(name='month',description='Monthly time buckets')
      w = Bucket(name='week',description='Weeky time buckets')
      d = Bucket(name='day',description='Daily time buckets')
      y.save(using=database)
      q.save(using=database)
      m.save(using=database)
      w.save(using=database)
      d.save(using=database)

      # Loop over all days in the chosen horizon
      prev_year = None
      prev_quarter = None
      prev_month = None
      prev_week = None
      while curdate < enddate:
        month = int(curdate.strftime("%m"))  # an integer in the range 1 - 12
        quarter = (month-1) / 3 + 1          # an integer in the range 1 - 4
        year = int(curdate.strftime("%Y"))
        dayofweek = int(curdate.strftime("%w")) # day of the week, 0 = sunday, 1 = monday, ...
        year_start = datetime(year,1,1)
        year_end = datetime(year+1,1,1)
        week_start = curdate - timedelta((dayofweek+6)%7 + 1 - weekstart)
        week_end = curdate - timedelta((dayofweek+6)%7-7 + 1 - weekstart)
        if week_start < year_start: week_start = year_start
        if week_end > year_end: week_end = year_end

        # Create buckets
        if year != prev_year:
          prev_year = year
          BucketDetail(
            bucket = y,
            name = str(year),
            startdate = year_start,
            enddate = year_end
            ).save(using=database)
        if quarter != prev_quarter:
          prev_quarter = quarter
          BucketDetail(
            bucket = q,
            name = "%02d Q%s" % (year-2000,quarter),
            startdate = date(year, quarter*3-2, 1),
            enddate = date(year+quarter/4, quarter*3+1-12*(quarter/4), 1)
            ).save(using=database)
        if month != prev_month:
          prev_month = month
          BucketDetail(
            bucket = m,
            name = curdate.strftime("%b %y"),
            startdate = date(year, month, 1),
            enddate = date(year+month/12, month+1-12*(month/12), 1),
            ).save(using=database)
        if week_start != prev_week:
          prev_week = week_start
          BucketDetail(
            bucket = w,
            name = curdate.strftime("%y W%W"),
            startdate = week_start,
            enddate = week_end,
            ).save(using=database)
        BucketDetail(
          bucket = d,
          name = str(curdate.date()),
          startdate = curdate,
          enddate = curdate + timedelta(1),
          ).save(using=database)

        # Next date
        curdate = curdate + timedelta(1)

      # Log success
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      settings.DEBUG = tmp_debug
      transaction.set_autocommit(ac, using=database)
示例#23
0
  def handle(self, **options):

    # Pick up the options
    if 'verbosity' in options: self.verbosity = int(options['verbosity'] or '1')
    else: self.verbosity = 1
    if 'user' in options: user = options['user']
    else: user = ''
    if 'database' in options: self.database = options['database'] or DEFAULT_DB_ALIAS
    else: self.database = DEFAULT_DB_ALIAS
    if not self.database in settings.DATABASES.keys():
      raise CommandError("No database settings known for '%s'" % self.database )
    if 'delta' in options: self.delta = float(options['delta'] or '3650')
    else: self.delta = 3650
    self.date = datetime.now()

    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    now = datetime.now()
    ac = transaction.get_autocommit(using=self.database)
    transaction.set_autocommit(False, using=self.database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(self.database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'Odoo import':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='Odoo import', submitted=now, started=now, status='0%', user=user,
          arguments="--delta=%s" % self.delta)
      task.save(using=self.database)
      transaction.commit(using=self.database)

      # Find the connector class
      # We look for a module called "odoo_export" in each of the installed
      # applications, and expect to find a class called connector in it
      connector = None
      for app in reversed(settings.INSTALLED_APPS):
        try:
          connector = getattr(import_module('%s.odoo_import' % app),'Connector')
        except ImportError as e:
          # Silently ignore if it's the module which isn't found in the app
          if str(e) != 'No module named odoo_import': raise e
      if not connector:
          raise CommandError("No odoo_import connector found")

      # Instantiate the connector and upload all data
      connector(task, self.delta, self.database, self.verbosity).run()

      # Log success
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=self.database)
      try: transaction.commit(using=self.database)
      except: pass
      settings.DEBUG = tmp_debug
      transaction.set_autocommit(ac, using=self.database)
示例#24
0
    def handle(self, **options):

        # Pick up the options
        if 'verbosity' in options:
            self.verbosity = int(options['verbosity'] or '1')
        else:
            self.verbosity = 1
        if 'user' in options:
            user = options['user']
        else:
            user = ''
        if 'database' in options:
            self.database = options['database'] or DEFAULT_DB_ALIAS
        else:
            self.database = DEFAULT_DB_ALIAS
        if not self.database in settings.DATABASES.keys():
            raise CommandError("No database settings known for '%s'" %
                               self.database)

        # Pick up configuration parameters
        self.openbravo_user = Parameter.getValue("openbravo.user",
                                                 self.database)
        self.openbravo_password = Parameter.getValue("openbravo.password",
                                                     self.database)
        self.openbravo_host = Parameter.getValue("openbravo.host",
                                                 self.database)
        self.openbravo_organization = Parameter.getValue(
            "openbravo.organization", self.database)
        if not self.openbravo_user:
            raise CommandError("Missing or invalid parameter openbravo_user")
        if not self.openbravo_password:
            raise CommandError(
                "Missing or invalid parameter openbravo_password")
        if not self.openbravo_host:
            raise CommandError("Missing or invalid parameter openbravo_host")
        if not self.openbravo_organization:
            raise CommandError(
                "Missing or invalid parameter openbravo_organization")

        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all SQL
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        now = datetime.now()
        ac = transaction.get_autocommit(using=self.database)
        transaction.set_autocommit(False, using=self.database)
        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'Openbravo export':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='Openbravo export',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.save(using=self.database)
            transaction.commit(using=self.database)

            # Create a database connection to the frePPLe database
            cursor = connections[self.database].cursor()

            # Look up the id of the Openbravo user
            query = urllib.quote("name='%s'" %
                                 self.openbravo_user.encode('utf8'))
            conn = self.get_data(
                "/openbravo/ws/dal/ADUser?where=%s&includeChildren=false" %
                query)[0]
            self.user_id = None
            for event, elem in conn:
                if event != 'end' or elem.tag != 'ADUser':
                    continue
                self.user_id = elem.get('id')
            if not self.user_id:
                raise CommandError("Can't find user id in Openbravo")

            # Look up the id of the Openbravo organization id
            query = urllib.quote("name='%s'" %
                                 self.openbravo_organization.encode('utf8'))
            conn = self.get_data(
                "/openbravo/ws/dal/Organization?where=%s&includeChildren=false"
                % query)[0]
            self.organization_id = None
            for event, elem in conn:
                if event != 'end' or elem.tag != 'Organization':
                    continue
                self.organization_id = elem.get('id')
            if not self.organization_id:
                raise CommandError("Can't find organization id in Openbravo")

            # Upload all data
            self.export_procurement_order(cursor)
            self.export_work_order(cursor)
            self.export_sales_order(cursor)

            # Log success
            task.status = 'Done'
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.save(using=self.database)
            try:
                transaction.commit(using=self.database)
            except:
                pass
            settings.DEBUG = tmp_debug
            transaction.set_autocommit(ac, using=self.database)
示例#25
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up the options
    if 'verbosity' in options:
      verbosity = int(options['verbosity'])
    else:
      verbosity = 1
    if 'cluster' in options:
      cluster = int(options['cluster'])
    else:
      cluster = 100
    if 'demand' in options:
      demand = int(options['demand'])
    else:
      demand = 30
    if 'forecast_per_item' in options:
      forecast_per_item = int(options['forecast_per_item'])
    else:
      forecast_per_item = 50
    if 'level' in options:
      level = int(options['level'])
    else:
      level = 5
    if 'resource' in options:
      resource = int(options['resource'])
    else:
      resource = 60
    if 'resource_size' in options:
      resource_size = int(options['resource_size'])
    else:
      resource_size = 5
    if 'components' in options:
      components = int(options['components'])
    else:
      components = 200
    if 'components_per' in options:
      components_per = int(options['components_per'])
    else:
      components_per = 5
    if components == 0:
      components_per = 0
    if 'deliver_lt' in options:
      deliver_lt = int(options['deliver_lt'])
    else:
      deliver_lt = 30
    if 'procure_lt' in options:
      procure_lt = int(options['procure_lt'])
    else:
      procure_lt = 40
    if 'currentdate' in options:
      currentdate = options['currentdate'] or datetime.strftime(date.today(), '%Y-%m-%d')
    else:
      currentdate = datetime.strftime(date.today(), '%Y-%m-%d')
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    random.seed(100)  # Initialize random seed to get reproducible results

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate model':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate model', submitted=now, started=now, status='0%', user=user)
      task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
        "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
          cluster, demand, forecast_per_item, level, resource,
          resource_size, components, components_per, deliver_lt, procure_lt
        )
      task.save(using=database)
      transaction.commit(using=database)

      # Pick up the startdate
      try:
        startdate = datetime.strptime(currentdate, '%Y-%m-%d')
      except:
        raise CommandError("current date is not matching format YYYY-MM-DD")

      # Check whether the database is empty
      if Buffer.objects.using(database).count() > 0 or Item.objects.using(database).count() > 0:
        raise CommandError("Database must be empty before creating a model")

      # Plan start date
      if verbosity > 0:
        print("Updating current date...")
      param = Parameter.objects.using(database).create(name="currentdate")
      param.value = datetime.strftime(startdate, "%Y-%m-%d %H:%M:%S")
      param.save(using=database)

      # Planning horizon
      # minimum 10 daily buckets, weekly buckets till 40 days after current
      if verbosity > 0:
        print("Updating buckets...")
      management.call_command('frepple_createbuckets', user=user, database=database)
      if verbosity > 0:
        print("Updating horizon telescope...")
      updateTelescope(10, 40, 730, database)
      task.status = '2%'
      task.save(using=database)

      # Weeks calendar
      if verbosity > 0:
        print("Creating weeks calendar...")
      with transaction.atomic(using=database):
        weeks = Calendar.objects.using(database).create(name="Weeks", defaultvalue=0)
        for i in BucketDetail.objects.using(database).filter(bucket="week").all():
          CalendarBucket(
            startdate=i.startdate, enddate=i.enddate, value=1, calendar=weeks
            ).save(using=database)
        task.status = '4%'
        task.save(using=database)

      # Working days calendar
      if verbosity > 0:
        print("Creating working days...")
      with transaction.atomic(using=database):
        workingdays = Calendar.objects.using(database).create(name="Working Days", defaultvalue=0)
        minmax = BucketDetail.objects.using(database).filter(bucket="week").aggregate(Min('startdate'), Max('startdate'))
        CalendarBucket(
          startdate=minmax['startdate__min'], enddate=minmax['startdate__max'],
          value=1, calendar=workingdays, priority=1, saturday=False, sunday=False
          ).save(using=database)
        task.status = '6%'
        task.save(using=database)

      # Create a random list of categories to choose from
      categories = [
        'cat A', 'cat B', 'cat C', 'cat D', 'cat E', 'cat F', 'cat G'
        ]

      # Create customers
      if verbosity > 0:
        print("Creating customers...")
      with transaction.atomic(using=database):
        cust = []
        for i in range(100):
          c = Customer.objects.using(database).create(name='Cust %03d' % i)
          cust.append(c)
        task.status = '8%'
        task.save(using=database)

      # Create resources and their calendars
      if verbosity > 0:
        print("Creating resources and calendars...")
      with transaction.atomic(using=database):
        res = []
        for i in range(resource):
          loc = Location(name='Loc %05d' % int(random.uniform(1, cluster)))
          loc.save(using=database)
          cal = Calendar(name='capacity for res %03d' % i, category='capacity', defaultvalue=0)
          bkt = CalendarBucket(startdate=startdate, value=resource_size, calendar=cal)
          cal.save(using=database)
          bkt.save(using=database)
          r = Resource.objects.using(database).create(
            name='Res %03d' % i, maximum_calendar=cal, location=loc
            )
          res.append(r)
        task.status = '10%'
        task.save(using=database)
        random.shuffle(res)

      # Create the components
      if verbosity > 0:
        print("Creating raw materials...")
      with transaction.atomic(using=database):
        comps = []
        comploc = Location.objects.using(database).create(name='Procured materials')
        for i in range(components):
          it = Item.objects.using(database).create(
            name='Component %04d' % i,
            category='Procured',
            price=str(round(random.uniform(0, 100)))
            )
          ld = abs(round(random.normalvariate(procure_lt, procure_lt / 3)))
          c = Buffer.objects.using(database).create(
            name='Component %04d' % i,
            location=comploc,
            category='Procured',
            item=it,
            type='procure',
            min_inventory=20,
            max_inventory=100,
            size_multiple=10,
            leadtime=str(ld * 86400),
            onhand=str(round(forecast_per_item * random.uniform(1, 3) * ld / 30)),
            )
          comps.append(c)
        task.status = '12%'
        task.save(using=database)

      # Loop over all clusters
      durations = [ 86400, 86400 * 2, 86400 * 3, 86400 * 5, 86400 * 6 ]
      progress = 88.0 / cluster
      for i in range(cluster):
        with transaction.atomic(using=database):
          if verbosity > 0:
            print("Creating supply chain for end item %d..." % i)

          # location
          loc = Location.objects.using(database).get_or_create(name='Loc %05d' % i)[0]
          loc.available = workingdays
          loc.save(using=database)

          # Item and delivery operation
          oper = Operation.objects.using(database).create(name='Del %05d' % i, sizemultiple=1, location=loc)
          it = Item.objects.using(database).create(
            name='Itm %05d' % i,
            operation=oper,
            category=random.choice(categories),
            price=str(round(random.uniform(100, 200)))
            )

          # Level 0 buffer
          buf = Buffer.objects.using(database).create(
            name='Buf %05d L00' % i,
            item=it,
            location=loc,
            category='00'
            )
          Flow.objects.using(database).create(operation=oper, thebuffer=buf, quantity=-1)

          # Demand
          for j in range(demand):
            Demand.objects.using(database).create(
              name='Dmd %05d %05d' % (i, j),
              item=it,
              quantity=int(random.uniform(1, 6)),
              # Exponential distribution of due dates, with an average of deliver_lt days.
              due=startdate + timedelta(days=round(random.expovariate(float(1) / deliver_lt / 24)) / 24),
              # Orders have higher priority than forecast
              priority=random.choice([1, 2]),
              customer=random.choice(cust),
              category=random.choice(categories)
              )

          # Create upstream operations and buffers
          ops = []
          for k in range(level):
            if k == 1 and res:
              # Create a resource load for operations on level 1
              oper = Operation.objects.using(database).create(
                name='Oper %05d L%02d' % (i, k),
                type='time_per',
                location=loc,
                duration_per=86400,
                sizemultiple=1,
                )
              if resource < cluster and i < resource:
                # When there are more cluster than resources, we try to assure
                # that each resource is loaded by at least 1 operation.
                Load.objects.using(database).create(resource=res[i], operation=oper)
              else:
                Load.objects.using(database).create(resource=random.choice(res), operation=oper)
            else:
              oper = Operation.objects.using(database).create(
                name='Oper %05d L%02d' % (i, k),
                duration=random.choice(durations),
                sizemultiple=1,
                location=loc,
                )
            ops.append(oper)
            buf.producing = oper
            # Some inventory in random buffers
            if random.uniform(0, 1) > 0.8:
              buf.onhand = int(random.uniform(5, 20))
            buf.save(using=database)
            Flow(operation=oper, thebuffer=buf, quantity=1, type="end").save(using=database)
            if k != level - 1:
              # Consume from the next level in the bill of material
              buf = Buffer.objects.using(database).create(
                name='Buf %05d L%02d' % (i, k + 1),
                item=it,
                location=loc,
                category='%02d' % (k + 1)
                )
              Flow.objects.using(database).create(operation=oper, thebuffer=buf, quantity=-1)

          # Consume raw materials / components
          c = []
          for j in range(components_per):
            o = random.choice(ops)
            b = random.choice(comps)
            while (o, b) in c:
              # A flow with the same operation and buffer already exists
              o = random.choice(ops)
              b = random.choice(comps)
            c.append( (o, b) )
            Flow.objects.using(database).create(
              operation=o, thebuffer=b,
              quantity=random.choice([-1, -1, -1, -2, -3])
              )

          # Commit the current cluster
          task.status = '%d%%' % (12 + progress * (i + 1))
          task.save(using=database)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.save(using=database)
      raise e

    finally:
      if task:
        task.save(using=database)
      settings.DEBUG = tmp_debug
示例#26
0
    def handle(self, **options):
        # Pick up options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None
        if options["models"]:
            models = options["models"].split(",")
        else:
            models = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name not in ("frepple_flush", "empty")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(name="empty",
                            submitted=now,
                            started=now,
                            status="0%",
                            user=user)
                task.arguments = "%s%s" % (
                    "--user=%s " % options["user"] if options["user"] else "",
                    "--models=%s " %
                    options["models"] if options["models"] else "",
                )
            task.processid = os.getpid()
            task.save(using=database)

            # Create a database connection
            cursor = connections[database].cursor()

            # Get a list of all django tables in the database
            tables = set(
                connections[database].introspection.django_table_names(
                    only_existing=True))
            ContentTypekeys = set()
            # Validate the user list of tables
            if models:
                hasDemand = True if "input.demand" in models else False
                hasOperation = True if "input.operation" in models else False
                hasPO = True if "input.purchaseorder" in models else False
                hasDO = True if "input.distributionorder" in models else False
                hasMO = True if "input.manufacturingorder" in models else False
                hasDeO = True if "input.deliveryorder" in models else False

                if not hasOperation:
                    if hasDemand:
                        models.remove("input.demand")
                        cursor.execute(
                            "update operationplan set demand_id = null where demand_id is not null"
                        )
                        cursor.execute("delete from demand")
                        key = ContentType.objects.get_for_model(
                            inputmodels.Demand, for_concrete_model=False).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key, ),
                        )

                    if not (hasPO and hasDO and hasMO and hasDeO):
                        if "input.operationplanmaterial" in models:
                            models.remove("input.operationplanmaterial")
                        if "input.operationplanresource" in models:
                            models.remove("input.operationplanresource")

                    if hasPO and not (hasDO and hasMO and hasDeO):
                        models.remove("input.purchaseorder")
                        cursor.execute(
                            "delete from operationplan where type = 'PO'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.PurchaseOrder,
                            for_concrete_model=False).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key, ),
                        )

                    if hasDO and not (hasPO and hasMO and hasDeO):
                        models.remove("input.distributionorder")
                        cursor.execute(
                            "delete from operationplan where type = 'DO'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.DistributionOrder,
                            for_concrete_model=False).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key, ),
                        )

                    if hasMO and not (hasPO and hasDO and hasDeO):
                        models.remove("input.manufacturingorder")
                        cursor.execute(
                            "delete from operationplan where type = 'MO'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.ManufacturingOrder,
                            for_concrete_model=False).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key, ),
                        )

                    if hasDeO and not (hasPO and hasDO and hasMO):
                        models.remove("input.deliveryorder")
                        cursor.execute(
                            "delete from operationplan where type = 'DLVR'")
                        key = ContentType.objects.get_for_model(
                            inputmodels.DeliveryOrder,
                            for_concrete_model=False).pk
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = %s",
                            (key, ),
                        )

                    if (hasPO or hasDO or hasMO or hasDeO
                        ) and not (hasPO and hasDO and hasMO and hasDeO):
                        # Keep the database in shape
                        cursor.execute("vacuum analyze")

                models2tables = set()
                admin_log_positive = True
                for m in models:
                    try:
                        x = m.split(".", 1)
                        x = apps.get_model(x[0], x[1])
                        if x in EXCLUDE_FROM_BULK_OPERATIONS:
                            continue

                        ContentTypekeys.add(
                            ContentType.objects.get_for_model(x).pk)

                        x = x._meta.db_table
                        if x not in tables:
                            raise
                        models2tables.add(x)
                    except Exception as e:
                        raise CommandError("Invalid model to erase: %s" % m)
                tables = models2tables
            else:
                admin_log_positive = False
                tables.discard("django_admin_log")
                for i in EXCLUDE_FROM_BULK_OPERATIONS:
                    tables.discard(i._meta.db_table)
                    ContentTypekeys.add(
                        ContentType.objects.get_for_model(i).pk)
            # Some tables need to be handled a bit special
            if "operationplan" in tables:
                tables.add("operationplanmaterial")
                tables.add("operationplanresource")
                tables.add("out_problem")
            if "resource" in tables and "out_resourceplan" not in tables:
                tables.add("out_resourceplan")
            if "demand" in tables and "out_constraint" not in tables:
                tables.add("out_constraint")
            tables.discard("auth_group_permissions")
            tables.discard("auth_permission")
            tables.discard("auth_group")
            tables.discard("django_session")
            tables.discard("common_user")
            tables.discard("common_user_groups")
            tables.discard("common_user_user_permissions")
            tables.discard("common_preference")
            tables.discard("django_content_type")
            tables.discard("execute_log")
            tables.discard("execute_schedule")
            tables.discard("common_scenario")

            # Delete all records from the tables.
            with transaction.atomic(using=database, savepoint=False):
                if ContentTypekeys:
                    if admin_log_positive:
                        cursor.execute(
                            "delete from django_admin_log where content_type_id = any(%s)",
                            (list(ContentTypekeys), ),
                        )
                    else:
                        cursor.execute(
                            "delete from django_admin_log where content_type_id != any(%s)",
                            (list(ContentTypekeys), ),
                        )
                if "common_bucket" in tables:
                    cursor.execute(
                        "update common_user set horizonbuckets = null")
                for stmt in connections[database].ops.sql_flush(
                        no_style(), tables, []):
                    cursor.execute(stmt)

            # Task update
            task.status = "Done"
            task.finished = datetime.now()
            task.processid = None
            task.save(using=database)

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
                task.processid = None
                task.save(using=database)
            raise CommandError("%s" % e)
示例#27
0
  def handle(self, **options):
    # Pick up options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None
    if 'models' in options and options['models']:
      models = options['models'].split(',')
    else:
      models = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'empty database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='empty database', submitted=now, started=now, status='0%', user=user)
      task.save(using=database)

      # Create a database connection
      cursor = connections[database].cursor()

      # Get a list of all django tables in the database
      tables = set(connections[database].introspection.django_table_names(only_existing=True))

      # Validate the user list of tables
      if models:
        models2tables = set()
        for m in models:
          try:
            x = m.split('.', 1)
            x = apps.get_model(x[0], x[1])
            if x in EXCLUDE_FROM_BULK_OPERATIONS:
              continue
            x = x._meta.db_table
            if not x in tables:
              raise
            models2tables.add(x)
          except Exception as e:
            raise CommandError("Invalid model to erase: %s" % m)
        tables = models2tables
      else:
        for i in EXCLUDE_FROM_BULK_OPERATIONS:
          tables.discard(i._meta.db_table)

      # Some tables need to be handled a bit special
      if "setupmatrix" in tables:
        tables.add("setuprule")
      tables.discard('auth_group_permissions')
      tables.discard('auth_permission')
      tables.discard('auth_group')
      tables.discard('django_session')
      tables.discard('common_user')
      tables.discard('common_user_groups')
      tables.discard('common_user_user_permissions')
      tables.discard('django_admin_log')
      tables.discard('django_content_type')
      tables.discard('execute_log')
      tables.discard('common_scenario')

      # Delete all records from the tables.
      with transaction.atomic(using=database, savepoint=False):
        if "common_bucket" in tables:
          cursor.execute('update common_user set horizonbuckets = null')
        for stmt in connections[database].ops.sql_flush(no_style(), tables, []):
          cursor.execute(stmt)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()
      task.save(using=database)

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.save(using=database)
      raise CommandError('%s' % e)
示例#28
0
    def handle(self, **options):
        # Pick up the options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" % database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if "task" in options and options["task"]:
                try:
                    task = Task.objects.all().using(database).get(pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (
                    task.started
                    or task.finished
                    or task.status != "Waiting"
                    or task.name not in ("frepple_backup", "backup")
                ):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(
                    name="backup", submitted=now, started=now, status="0%", user=user
                )

            # Choose the backup file name
            backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database)
            task.message = "Backup to file %s" % backupfile

            # Run the backup command
            # Commenting the next line is a little more secure, but requires you to
            # create a .pgpass file.
            os.environ["PGPASSWORD"] = settings.DATABASES[database]["PASSWORD"]
            args = [
                "pg_dump",
                "-Fc",
                "-w",
                "--username=%s" % settings.DATABASES[database]["USER"],
                "--file=%s"
                % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, backupfile)),
            ]
            if settings.DATABASES[database]["HOST"]:
                args.append("--host=%s" % settings.DATABASES[database]["HOST"])
            if settings.DATABASES[database]["PORT"]:
                args.append("--port=%s" % settings.DATABASES[database]["PORT"])
            args.append(settings.DATABASES[database]["NAME"])
            with subprocess.Popen(args) as p:
                try:
                    task.processid = p.pid
                    task.save(using=database)
                    p.wait()
                except Exception:
                    p.kill()
                    p.wait()
                    raise Exception("Run of run pg_dump failed")

            # Task update
            task.processid = None
            task.status = "99%"
            task.save(using=database)

            # Delete backups older than a month
            pattern = re.compile("database.*.*.*.dump")
            for f in os.listdir(settings.FREPPLE_LOGDIR):
                if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)):
                    # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date...
                    created = datetime.fromtimestamp(
                        os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime
                    )
                    if pattern.match(f) and (now - created).days > 31:
                        try:
                            os.remove(os.path.join(settings.FREPPLE_LOGDIR, f))
                        except Exception:
                            pass

            # Task update
            task.status = "Done"
            task.finished = datetime.now()
            task.processid = None

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
                task.processid = None
            raise e

        finally:
            if task:
                task.save(using=database)
示例#29
0
  def handle(self, *args, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'load XML file':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='load XML file', submitted=now, started=now, status='0%', user=user)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.save(using=database)
      transaction.commit(using=database)

      if not args:
        raise CommandError("No XML input file given")

      # Execute
      # TODO: if frePPLe is available as a module, we don't really need to spawn another process.
      os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace('\\', '\\\\')
      os.environ['FREPPLE_APP'] = settings.FREPPLE_APP
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ.keys():
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(os.environ['FREPPLE_HOME'], 'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(os.environ['FREPPLE_HOME'], 'python27.zip') + ';' + os.path.normpath(os.environ['FREPPLE_APP'])
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(os.environ['FREPPLE_APP'])
      cmdline = [ '"%s"' % i for i in args ]
      cmdline.insert(0, 'frepple')
      cmdline.append( '"%s"' % os.path.join(settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py') )
      ret = os.system(' '.join(cmdline))
      if ret:
        raise Exception('Exit code of the batch run is %d' % ret)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)
      try:
        transaction.commit(using=database)
      except:
        pass
      transaction.leave_transaction_management(using=database)
示例#30
0
    def handle(self, *args, **options):
        # Pick up the options
        self.database = options['database']
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)

        if options['user']:
            try:
                self.user = User.objects.all().using(
                    self.database).get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            self.user = None

        now = datetime.now()

        task = None
        self.logfile = None
        errors = 0
        try:
            # Initialize the task
            if options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'export to folder':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='export to folder',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=self.user)
            task.arguments = ' '.join(['"%s"' % i for i in args])
            task.save(using=self.database)

            # Execute
            if os.path.isdir(
                    settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

                # Open the logfile
                # The log file remains in the upload folder as different folders can be specified
                # We do not want t create one log file per folder
                self.logfile = open(
                    os.path.join(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER'],
                        'exporttofolder.log'), "a")
                print("%s Started export to folder\n" % datetime.now(),
                      file=self.logfile)

                cursor = connections[self.database].cursor()

                task.status = '0%'
                task.save(using=self.database)

                i = 0
                cnt = len(self.statements)

                for filename, export, sqlquery in self.statements:
                    print("%s Started export of %s" %
                          (datetime.now(), filename),
                          file=self.logfile)

                    #make sure export folder exists
                    exportFolder = os.path.join(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER'],
                        export)
                    if not os.path.isdir(exportFolder):
                        os.makedirs(exportFolder)

                    try:
                        if filename.lower().endswith(".gz"):
                            csv_datafile = gzip.open(
                                os.path.join(exportFolder, filename), "w")
                        else:
                            csv_datafile = open(
                                os.path.join(exportFolder, filename), "w")

                        cursor.copy_expert(sqlquery, csv_datafile)

                        csv_datafile.close()
                        i += 1

                    except Exception as e:
                        errors += 1
                        print("%s Failed to export to %s" %
                              (datetime.now(), filename),
                              file=self.logfile)
                        if task:
                            task.message = '%s' % e

                    task.status = str(int(i / cnt * 100)) + '%'
                    task.save(using=self.database)

                print("%s Exported %s file(s)\n" %
                      (datetime.now(), cnt - errors),
                      file=self.logfile)

            else:
                errors += 1
                print("%s Failed, folder does not exist" % datetime.now(),
                      file=self.logfile)
                task.message = "Destination folder does not exist"
                task.save(using=self.database)

        except Exception as e:
            print("%s Failed" % datetime.now(), file=self.logfile)
            errors += 1
            if task:
                task.message = '%s' % e

        finally:
            if task:
                if not errors:
                    task.status = '100%'
                    task.message = "Exported %s data files" % (cnt)
                else:
                    task.status = 'Failed'
                    #task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
                task.finished = datetime.now()
                task.save(using=self.database)

            if self.logfile:
                print('%s End of export to folder\n' % datetime.now(),
                      file=self.logfile)
                self.logfile.close()
示例#31
0
    def handle(self, **options):
        # Pick up the options
        database = options['database']
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_loadxml':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='frepple_loadxml',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.arguments = ' '.join(options['file'])
            task.save(using=database)

            # Execute
            # TODO: if frePPLe is available as a module, we don't really need to spawn another process.
            os.environ['FREPPLE_HOME'] = settings.FREPPLE_HOME.replace(
                '\\', '\\\\')
            os.environ['FREPPLE_APP'] = settings.FREPPLE_APP
            os.environ['FREPPLE_DATABASE'] = database
            os.environ[
                'PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ[
                    'PATH'] + os.pathsep + settings.FREPPLE_APP
            os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
            if 'DJANGO_SETTINGS_MODULE' not in os.environ:
                os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
            if os.path.exists(
                    os.path.join(os.environ['FREPPLE_HOME'], 'python35.zip')):
                # For the py2exe executable
                os.environ['PYTHONPATH'] = os.path.join(
                    os.environ['FREPPLE_HOME'], 'python%d%d.zip' %
                    (sys.version_info[0], sys.version_info[1])
                ) + os.pathsep + os.path.normpath(os.environ['FREPPLE_APP'])
            else:
                # Other executables
                os.environ['PYTHONPATH'] = os.path.normpath(
                    os.environ['FREPPLE_APP'])
            cmdline = ['"%s"' % i for i in options['file']]
            cmdline.insert(0, 'frepple')
            cmdline.append('"%s"' % os.path.join(
                settings.FREPPLE_APP, 'freppledb', 'execute', 'loadxml.py'))
            proc = subprocess.run(' '.join(cmdline))
            if proc.returncode:
                raise Exception('Exit code of the batch run is %d' %
                                proc.returncode)

            # Task update
            task.status = 'Done'
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.save(using=database)
示例#32
0
    def handle(self, **options):

        # Pick up the options
        if 'verbosity' in options:
            self.verbosity = int(options['verbosity'] or '1')
        else:
            self.verbosity = 1
        if 'user' in options: user = options['user']
        else: user = ''
        if 'database' in options:
            self.database = options['database'] or DEFAULT_DB_ALIAS
        else:
            self.database = DEFAULT_DB_ALIAS
        if not self.database in settings.DATABASES.keys():
            raise CommandError("No database settings known for '%s'" %
                               self.database)

        # Pick up configuration parameters
        self.openerp_user = Parameter.getValue("openerp.user", self.database)
        self.openerp_password = Parameter.getValue("openerp.password",
                                                   self.database)
        self.openerp_db = Parameter.getValue("openerp.db", self.database)
        self.openerp_url = Parameter.getValue("openerp.url", self.database)

        # Make sure the debug flag is not set!
        # When it is set, the django database wrapper collects a list of all SQL
        # statements executed and their timings. This consumes plenty of memory
        # and cpu time.
        tmp_debug = settings.DEBUG
        settings.DEBUG = False

        now = datetime.now()
        ac = transaction.get_autocommit(using=self.database)
        transaction.set_autocommit(False, using=self.database)
        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'OpenERP export':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='OpenERP edport',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.save(using=self.database)
            transaction.commit(using=self.database)

            # Log in to the openerp server
            sock_common = xmlrpclib.ServerProxy(self.openerp_url +
                                                'xmlrpc/common')
            self.uid = sock_common.login(self.openerp_db, self.openerp_user,
                                         self.openerp_password)

            # Connect to openerp server
            self.sock = xmlrpclib.ServerProxy(self.openerp_url +
                                              'xmlrpc/object')

            # Create a database connection to the frePPLe database
            self.cursor = connections[self.database].cursor()

            # Upload all data
            self.export_procurement_order()
            task.status = '50%'
            task.save(using=self.database)
            transaction.commit(using=self.database)
            self.export_sales_order()
            task.status = '100%'
            task.save(using=self.database)
            transaction.commit(using=self.database)

            # Log success
            task.status = 'Done'
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            if task: task.save(using=self.database)
            try:
                transaction.commit(using=self.database)
            except:
                pass
            settings.DEBUG = tmp_debug
            transaction.set_autocommit(ac, using=self.database)
示例#33
0
  def handle(self, *fixture_labels, **options):

    # get the database object
    database = options['database']
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'loaddata':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.save(using=database, update_fields=['started', 'status'])
      else:
        if options['user']:
          try:
            user = User.objects.all().using(database).get(username=options['user'])
          except:
            raise CommandError("User '%s' not found" % options['user'] )
        else:
          user = None
        task = Task(
          name='loaddata', submitted=now, started=now, status='0%',
          user=user, arguments=' '.join(fixture_labels)
          )
        task.save(using=database)

      # Excecute the standard django command
      super(Command, self).handle(*fixture_labels, **options)

      # if the fixture doesn't contain the 'demo' word, let's not apply loaddata post-treatments
      for f in fixture_labels:
        if '_demo' not in f.lower():
          return

      with transaction.atomic(using=database, savepoint=False):
        print('updating fixture to current date')

        cursor = connections[database].cursor()
        cursor.execute('''
          select to_timestamp(value,'YYYY-MM-DD hh24:mi:ss') from common_parameter where name = 'currentdate'
        ''')
        currentDate = cursor.fetchone()[0]
        now = datetime.now()
        offset = (now - currentDate).days

        #update currentdate to now
        cursor.execute('''
          update common_parameter set value = 'now' where name = 'currentdate'
        ''')

        #update demand due dates
        cursor.execute('''
          update demand set due = due + %s * interval '1 day'
        ''', (offset,))

        #update PO/DO/MO due dates
        cursor.execute('''
          update operationplan set startdate = startdate + %s * interval '1 day', enddate = enddate + %s * interval '1 day'
        ''', 2 * (offset,))

        # Task update
        task.status = 'Done'
        task.finished = datetime.now()
        task.save(using=database, update_fields=['status', 'finished'])

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.save(using=database, update_fields=['status', 'finished', 'message'])
      raise CommandError('%s' % e)
示例#34
0
    def handle(self, **options):
        # Pick up the options
        now = datetime.now()

        if 'database' in options:
            database = options['database'] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if 'user' in options and options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        timestamp = now.strftime("%Y%m%d%H%M%S")
        if database == DEFAULT_DB_ALIAS:
            logfile = 'frepple-%s.log' % timestamp
        else:
            logfile = 'frepple_%s-%s.log' % (database, timestamp)

        task = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name not in (
                        'runplan', 'frepple_run'):
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
                task.logfile = logfile
            else:
                task = Task(name='runplan',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user,
                            logfile=logfile)

            # Validate options
            if 'constraint' in options:
                constraint = int(options['constraint'])
                if constraint < 0 or constraint > 15:
                    raise ValueError("Invalid constraint: %s" %
                                     options['constraint'])
            else:
                constraint = 15
            if 'plantype' in options:
                plantype = int(options['plantype'])
            else:
                plantype = 1

            # Reset environment variables
            # TODO avoid having to delete the environment variables. Use options directly?
            PlanTaskRegistry.autodiscover()
            for i in PlanTaskRegistry.reg:
                if 'env' in options:
                    # Options specified
                    if i.label and i.label[0] in os.environ:
                        del os.environ[i.label[0]]
                elif i.label:
                    # No options specified - default to activate them all
                    os.environ[i.label[0]] = '1'

            # Set environment variables
            if options['env']:
                task.arguments = "--constraint=%d --plantype=%d --env=%s" % (
                    constraint, plantype, options['env'])
                for i in options['env'].split(','):
                    j = i.split('=')
                    if len(j) == 1:
                        os.environ[j[0]] = '1'
                    else:
                        os.environ[j[0]] = j[1]
            else:
                task.arguments = "--constraint=%d --plantype=%d" % (constraint,
                                                                    plantype)
            if options['background']:
                task.arguments += " --background"

            # Log task
            task.save(using=database)

            # Locate commands.py
            import freppledb.common.commands
            cmd = freppledb.common.commands.__file__

            # Prepare environment
            os.environ['FREPPLE_PLANTYPE'] = str(plantype)
            os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
            os.environ['FREPPLE_TASKID'] = str(task.id)
            os.environ['FREPPLE_DATABASE'] = database
            os.environ['FREPPLE_LOGFILE'] = logfile
            os.environ[
                'PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ[
                    'PATH'] + os.pathsep + settings.FREPPLE_APP
            if os.path.isfile(
                    os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
                os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
            if 'DJANGO_SETTINGS_MODULE' not in os.environ:
                os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
            os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)

            if options['background']:
                # Execute as background process on Windows
                if os.name == 'nt':
                    subprocess.Popen(['frepple', cmd],
                                     creationflags=0x08000000)
                else:
                    # Execute as background process on Linux
                    subprocess.Popen(['frepple', cmd])
            else:
                # Execute in foreground
                ret = subprocess.call(['frepple', cmd])
                if ret != 0 and ret != 2:
                    # Return code 0 is a successful run
                    # Return code is 2 is a run cancelled by a user. That's shown in the status field.
                    raise Exception('Failed with exit code %d' % ret)

                # Task update
                task.status = 'Done'
                task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.save(using=database)
示例#35
0
class Command(BaseCommand):
    help = """
      Update the ERP system with frePPLe planning information.
      """

    # For the display in the execution screen
    title = _("Export data to %(erp)s") % {"erp": "erp"}

    # For the display in the execution screen
    index = 1500

    requires_system_checks = False

    def get_version(self):
        return VERSION

    def add_arguments(self, parser):
        parser.add_argument("--user", help="User running the command")
        parser.add_argument(
            "--database",
            default=DEFAULT_DB_ALIAS,
            help="Nominates the frePPLe database to load",
        )
        parser.add_argument(
            "--task",
            type=int,
            help="Task identifier (generated automatically if not provided)",
        )

    @staticmethod
    def getHTML(request):
        if "freppledb.erpconnection" in settings.INSTALLED_APPS:
            context = RequestContext(request)

            template = Template("""
        {% load i18n %}
        <form role="form" method="post" action="{{request.prefix}}/execute/launch/erp2frepple/">{% csrf_token %}
        <table>
          <tr>
            <td style="vertical-align:top; padding: 15px">
               <button  class="btn btn-primary"  type="submit" value="{% trans "launch"|capfirst %}">{% trans "launch"|capfirst %}</button>
            </td>
            <td  style="padding: 0px 15px;">{% trans "Export erp data to frePPLe." %}
            </td>
          </tr>
        </table>
        </form>
      """)
            return template.render(context)
        else:
            return None

    def handle(self, **options):
        """
    Uploads approved operationplans to the ERP system.
    """

        # Select the correct frePPLe scenario database
        self.database = options["database"]
        if self.database not in settings.DATABASES.keys():
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        self.cursor_frepple = connections[self.database].cursor()

        # FrePPle user running this task
        if options["user"]:
            try:
                self.user = (User.objects.all().using(
                    self.database).get(username=options["user"]))
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            self.user = None

        # FrePPLe task identifier
        if options["task"]:
            try:
                self.task = (Task.objects.all().using(
                    self.database).get(pk=options["task"]))
            except Exception:
                raise CommandError("Task identifier not found")
            if (self.task.started or self.task.finished
                    or self.task.status != "Waiting"
                    or self.task.name != "frepple2erp"):
                raise CommandError("Invalid task identifier")
        else:
            now = datetime.now()
            self.task = Task(
                name="frepple2erp",
                submitted=now,
                started=now,
                status="0%",
                user=self.user,
            )
        self.task.processid = os.getpid()
        self.task.save(using=self.database)

        try:
            # Open database connection
            print("Connecting to the ERP database")
            with getERPconnection() as erp_connection:
                self.cursor_erp = erp_connection.cursor(self.database)
                try:
                    self.extractPurchaseOrders()
                    self.task.status = "33%"
                    self.task.save(using=self.database)

                    self.extractDistributionOrders()
                    self.task.status = "66%"
                    self.task.save(using=self.database)

                    self.extractManufacturingOrders()
                    self.task.status = "100%"
                    self.task.save(using=self.database)

                    # Optional extra planning output the ERP might be interested in:
                    #  - planned delivery date of sales orders
                    #  - safety stock (Enterprise Edition only)
                    #  - reorder quantities (Enterprise Edition only)
                    #  - forecast (Enterprise Edition only)
                    self.task.status = "Done"
                finally:
                    self.cursor_erp.close()
        except Exception as e:
            self.task.status = "Failed"
            self.task.message = "Failed: %s" % e
        self.task.finished = datetime.now()
        self.task.processid = None
        self.task.save(using=self.database)
        self.cursor_frepple.close()

    def extractPurchaseOrders(self):
        """
    Export purchase orders from frePPle.
    We export:
      - approved purchase orders.
      - proposed purchase orders that start within the next day and with a total cost less than 500$.
    """
        print("Start exporting purchase orders")
        self.cursor_frepple.execute("""
      select
        item_id, location_id, supplier_id, quantity, startdate, enddate
      from operationplan
      inner join item on item_id = item.name
      where type = 'PO'
        and (
          status = 'approved'
          or (status = 'proposed' and quantity * cost < 500 and startdate < now() + interval '1 day')
          )
      order by supplier_id
      """)
        output = [i for i in self.cursor_frepple.fetchall()]
        execute_batch(
            self.cursor_erp,
            """
            insert into test
            (item, location, location2, quantity, startdate, enddate)
            values (?, ?, ?, ?, ?, ?)
            """,
            output,
        )

    def extractDistributionOrders(self):
        """
        Export distribution orders from frePPle.
        We export:
          - approved distribution orders.
          - proposed distribution orders that start within the next day and with a total cost less than 500$.
        """
        print("Start exporting distribution orders")
        self.cursor_frepple.execute("""
      select
        item_id, destination_id, origin_id, quantity, startdate, enddate
      from operationplan
      inner join item on item_id = item.name
      where type = 'DO'
        and (
          status = 'approved' 
          or (status = 'proposed' and quantity * cost < 500 and startdate < now() + interval '1 day')
          )
      order by origin_id, destination_id
      """)
        output = [i for i in self.cursor_frepple.fetchall()]
        execute_batch(
            self.cursor_erp,
            """
            insert into test
            (item, location, location2, quantity, startdate, enddate)
            values (%s, %s, %s, %s, %s, %s)
            """,
            output,
        )

    def extractManufacturingOrders(self):
        """
    Export manufacturing orders from frePPle.
    We export:
      - approved manufacturing orders.
      - proposed manufacturing orders that start within the next day.
    """
        print("Start exporting manufacturing orders")
        self.cursor_frepple.execute("""
      select
        item_id, location_id, operation_id, quantity, startdate, enddate
      from operationplan
      where type = 'MO'
        and (
          status = 'approved'
          or (status = 'proposed' and startdate < now() + interval '1 day')
          )
      order by operation_id
      """)
        output = [i for i in self.cursor_frepple.fetchall()]
        execute_batch(
            self.cursor_erp,
            """
            insert into test
            (item, location, location2, quantity, startdate, enddate)
            values (%s, %s, %s, %s, %s, %s)
            """,
            output,
        )
示例#36
0
  def handle(self, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    force = options['force']
    test = 'FREPPLE_TEST' in os.environ
    if options['user']:
      try:
        user = User.objects.all().get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    # Synchronize the scenario table with the settings
    Scenario.syncWithSettings()

    # Initialize the task
    source = options['source']
    try:
      sourcescenario = Scenario.objects.get(pk=source)
    except:
      raise CommandError("No source database defined with name '%s'" % source)
    now = datetime.now()
    task = None
    if 'task' in options and options['task']:
      try:
        task = Task.objects.all().using(source).get(pk=options['task'])
      except:
        raise CommandError("Task identifier not found")
      if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_copy':
        raise CommandError("Invalid task identifier")
      task.status = '0%'
      task.started = now
    else:
      task = Task(name='frepple_copy', submitted=now, started=now, status='0%', user=user)
    task.save(using=source)

    # Validate the arguments
    destination = options['destination']
    destinationscenario = None
    try:
      task.arguments = "%s %s" % (source, destination)
      if options['description']:
        task.arguments += '--description="%s"' % options['description'].replace('"', '\\"')
      if force:
        task.arguments += " --force"
      task.save(using=source)
      try:
        destinationscenario = Scenario.objects.get(pk=destination)
      except:
        raise CommandError("No destination database defined with name '%s'" % destination)
      if source == destination:
        raise CommandError("Can't copy a schema on itself")
      if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
        raise CommandError("Source and destination scenarios have a different engine")
      if sourcescenario.status != 'In use':
        raise CommandError("Source scenario is not in use")
      if destinationscenario.status != 'Free' and not force:
        raise CommandError("Destination scenario is not free")

      # Logging message - always logging in the default database
      destinationscenario.status = 'Busy'
      destinationscenario.save()

      # Copying the data
      # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
      if settings.DATABASES[source]['PASSWORD']:
        os.environ['PGPASSWORD'] = settings.DATABASES[source]['PASSWORD']
      commandline = "pg_dump -c -Fp %s%s%s%s | psql %s%s%s%s" % (
        settings.DATABASES[source]['USER'] and ("-U %s " % settings.DATABASES[source]['USER']) or '',
        settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
        settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
        test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'],
        settings.DATABASES[destination]['USER'] and ("-U %s " % settings.DATABASES[destination]['USER']) or '',
        settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
        settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
        test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'],
        )

      ret = subprocess.call(commandline, shell=True, stdout=subprocess.DEVNULL , stderr=subprocess.STDOUT)

      if ret:
        raise Exception('Exit code of the database copy command is %d' % ret)

      # Update the scenario table
      destinationscenario.status = 'In use'
      destinationscenario.lastrefresh = datetime.today()
      if 'description' in options:
        destinationscenario.description = options['description']
      destinationscenario.save()

      # Give access to the destination scenario to:
      #  a) the user doing the copy
      #  b) all superusers from the source schema
      User.objects.using(destination).filter(is_superuser=True).update(is_active=True)
      User.objects.using(destination).filter(is_superuser=False).update(is_active=False)
      if user:
        User.objects.using(destination).filter(username=user.username).update(is_active=True)

      # Logging message
      task.status = 'Done'
      task.finished = datetime.now()

      # Update the task in the destination database
      task.message = "Scenario copied from %s" % source
      task.save(using=destination)
      task.message = "Scenario copied to %s" % destination
      
      # Delete any waiting tasks in the new copy.
      # This is needed for situations where the same source is copied to
      # multiple destinations at the same moment.
      Task.objects.all().using(destination).filter(id__gt=task.id).delete()
      
    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      if destinationscenario and destinationscenario.status == 'Busy':
        destinationscenario.status = 'Free'
        destinationscenario.save()
      raise e

    finally:
      if task:
        task.save(using=source)
      settings.DEBUG = tmp_debug
示例#37
0
    def handle(self, **options):
        # Pick up the options
        database = options['database']
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        now = datetime.now()
        task = None
        param = None
        try:
            # Initialize the task
            if options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name not in (
                        'frepple_simulation', 'simulation'):
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='simulation',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)

            # Validate options
            task.arguments = ""
            horizon = int(options['horizon'])
            if horizon < 0:
                raise ValueError("Invalid horizon: %s" % options['horizon'])
            task.arguments += "--horizon=%d" % horizon
            step = int(options['step'])
            if step < 0:
                raise ValueError("Invalid step: %s" % options['step'])
            task.arguments += " --step=%d" % step
            verbosity = int(options['verbosity'])

            # Log task
            task.save(using=database)

            # Load the initial status
            if options.get('initial', None):
                if verbosity > 0:
                    print("Erasing simulation database")
                management.call_command('empty',
                                        database=database,
                                        verbosity=verbosity)
                if verbosity > 0:
                    print("Loading initial data")
                management.call_command('loaddata',
                                        options.get('initial'),
                                        database=database,
                                        verbosity=verbosity)

            # Get current date
            param = Parameter.objects.all().using(database).get_or_create(
                name='currentdate')[0]
            try:
                curdate = datetime.strptime(param.value, "%Y-%m-%d %H:%M:%S")
            except:
                curdate = datetime.now()
            curdate = curdate.date()

            # Compute how many simulation steps we need
            bckt_list = []
            tmp = 0
            while tmp <= horizon:
                bckt_list.append(curdate + timedelta(days=tmp))
                tmp += step
            bckt_list_len = len(bckt_list)

            # Create the simulator class
            if options.get('simulator', None):
                cls = load_class(options['simulator'])
                simulator = cls(database=database, verbosity=verbosity)
            else:
                simulator = Simulator(database=database, verbosity=verbosity)
            simulator.buckets = 1

            # Loop over all dates in the simulation horizon
            idx = 0
            strt = None
            nd = None
            for bckt in bckt_list:
                if nd:
                    strt = nd
                    nd = bckt
                else:
                    nd = bckt
                    continue

                # Start message
                task.status = "%.0f%%" % (100.0 * idx / bckt_list_len)
                task.message = 'Simulating bucket from %s to %s ' % (strt, nd)
                task.save(using=database)
                idx += 1
                simulator.buckets += 1

                if verbosity > 0:
                    print(
                        "\nStart simulating bucket from %s to %s (%s out of %s)"
                        % (strt, nd, idx, bckt_list_len))

                # Update currentdate parameter
                param.value = strt.strftime("%Y-%m-%d %H:%M:%S")
                param.save(using=database)

                # Initialization of the bucket
                if verbosity > 1:
                    print("  Starting the bucket")
                with transaction.atomic(using=database):
                    simulator.start_bucket(strt, nd)

                # Generate new demand records
                if verbosity > 1:
                    print("  Receive new orders from customers")
                with transaction.atomic(using=database):
                    simulator.generate_customer_demand(strt, nd)

                # Generate the constrained plan
                if verbosity > 1:
                    print("  Generating plan...")
                management.call_command('runplan', database=database)

                if options['pause']:
                    print(
                        "\nYou can analyze the plan in the bucket in the user interface now..."
                    )
                    input("\nPress Enter to continue the simulation...\n")

                # Release new purchase orders
                if verbosity > 1:
                    print("  Create new purchase orders")
                with transaction.atomic(using=database):
                    simulator.create_purchase_orders(strt, nd)

                # Release new manufacturing orders
                if verbosity > 1:
                    print("  Create new manufacturing orders")
                with transaction.atomic(using=database):
                    simulator.create_manufacturing_orders(strt, nd)

                # Release new distribution orders
                if verbosity > 1:
                    print("  Create new distribution orders")
                with transaction.atomic(using=database):
                    simulator.create_distribution_orders(strt, nd)

                # Receive open purchase orders
                if verbosity > 1:
                    print("  Receive open purchase orders")
                with transaction.atomic(using=database):
                    simulator.receive_purchase_orders(strt, nd)

                # Receive open distribution orders
                if verbosity > 1:
                    print("  Receive open distribution orders")
                with transaction.atomic(using=database):
                    simulator.receive_distribution_orders(strt, nd)

                # Finish open manufacturing orders
                if verbosity > 1:
                    print("  Finish open manufacturing orders")
                with transaction.atomic(using=database):
                    simulator.finish_manufacturing_orders(strt, nd)

                # Ship demand to customers
                if verbosity > 1:
                    print("  Ship orders to customers")
                with transaction.atomic(using=database):
                    simulator.ship_customer_demand(strt, nd)

                # Finish of the bucket
                if verbosity > 1:
                    print("  Ending the bucket")
                with transaction.atomic(using=database):
                    simulator.end_bucket(strt, nd)

            # Report statistics from the simulation.
            # The simulator class collected these results during its run.
            if verbosity > 1:
                print("Displaying final simulation metrics")
            with transaction.atomic(using=database):
                simulator.show_metrics()

            # Task update
            task.status = 'Done'
            task.message = "Simulated from %s till %s" % (bckt_list[0],
                                                          bckt_list[-1])
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            # Final task status
            if task:
                task.save(using=database)
示例#38
0
    def handle(self, *args, **options):
        # Pick up the options
        if 'database' in options:
            self.database = options['database'] or DEFAULT_DB_ALIAS
        else:
            self.database = DEFAULT_DB_ALIAS
        if self.database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               self.database)
        if 'user' in options and options['user']:
            try:
                self.user = User.objects.all().using(
                    self.database).get(username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            self.user = None

        now = datetime.now()

        task = None
        self.logfile = None
        try:
            # Initialize the task
            if 'task' in options and options['task']:
                try:
                    task = Task.objects.all().using(
                        self.database).get(pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name != 'import from folder':
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='import from folder',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=self.user)
            task.arguments = ' '.join(['"%s"' % i for i in args])
            task.save(using=self.database)

            # Choose the right self.delimiter and language
            self.delimiter = get_format('DECIMAL_SEPARATOR',
                                        settings.LANGUAGE_CODE,
                                        True) == ',' and ';' or ','
            translation.activate(settings.LANGUAGE_CODE)

            # Execute
            errors = 0
            if os.path.isdir(
                    settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

                # Open the logfile
                self.logfile = open(
                    os.path.join(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER'],
                        'importfromfolder.log'), "a")
                print("%s Started import from folder\n" % datetime.now(),
                      file=self.logfile)

                all_models = [(ct.model_class(), ct.pk)
                              for ct in ContentType.objects.all()
                              if ct.model_class()]
                models = []
                for ifile in os.listdir(
                        settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
                    if not ifile.endswith('.csv'):
                        continue
                    filename0 = ifile.split('.')[0]

                    model = None
                    contenttype_id = None
                    for m, ct in all_models:
                        if filename0.lower() in (
                                m._meta.model_name.lower(),
                                m._meta.verbose_name.lower(),
                                m._meta.verbose_name_plural.lower()):
                            model = m
                            contenttype_id = ct
                            print("%s Matched a model to file: %s" %
                                  (datetime.now(), ifile),
                                  file=self.logfile)
                            break

                    if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                        print("%s Ignoring data in file: %s" %
                              (datetime.now(), ifile),
                              file=self.logfile)
                    elif self.user and not self.user.has_perm(
                            '%s.%s' %
                        (model._meta.app_label,
                         get_permission_codename('add', model._meta))):
                        # Check permissions
                        print("%s You don't have permissions to add: %s" %
                              (datetime.now(), ifile),
                              file=self.logfile)
                    else:
                        deps = set([model])
                        GridReport.dependent_models(model, deps)

                        models.append((ifile, model, contenttype_id, deps))

                # Sort the list of models, based on dependencies between models
                models = GridReport.sort_models(models)

                i = 0
                cnt = len(models)
                for ifile, model, contenttype_id, dependencies in models:
                    i += 1
                    print("%s Started processing data in file: %s" %
                          (datetime.now(), ifile),
                          file=self.logfile)
                    filetoparse = os.path.join(
                        os.path.abspath(settings.DATABASES[self.database]
                                        ['FILEUPLOADFOLDER']), ifile)
                    errors += self.parseCSVloadfromfolder(model, filetoparse)
                    print("%s Finished processing data in file: %s\n" %
                          (datetime.now(), ifile),
                          file=self.logfile)
                    task.status = str(int(10 + i / cnt * 80)) + '%'
                    task.save(using=self.database)

            else:
                errors += 1
                cnt = 0
                print("%s Failed, folder does not exist" % datetime.now(),
                      file=self.logfile)

            # Task update
            if errors:
                task.status = 'Failed'
                if not cnt:
                    task.message = "Destination folder does not exist"
                else:
                    task.message = "Uploaded %s data files with %s errors" % (
                        cnt, errors)
            else:
                task.status = 'Done'
                task.message = "Uploaded %s data files" % cnt
            task.finished = datetime.now()

        except Exception as e:
            print("%s Failed" % datetime.now(), file=self.logfile)
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
            raise e

        finally:
            if task:
                if not errors:
                    task.status = '100%'
                else:
                    task.status = 'Failed'
            task.finished = datetime.now()
            task.save(using=self.database)
            if self.logfile:
                print('%s End of import from folder\n' % datetime.now(),
                      file=self.logfile)
                self.logfile.close()
示例#39
0
  def handle(self, *args, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )

    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'exporttofolder-%s.log' % timestamp
    else:
      logfile = 'exporttofolder_%s-%s.log' % (self.database, timestamp)

    try:
      handler = logging.FileHandler(os.path.join(settings.FREPPLE_LOGDIR, logfile), encoding='utf-8')
      # handler.setFormatter(logging.Formatter(settings.LOGGING['formatters']['simple']['format']))
      logger.addHandler(handler)
      logger.propagate = False
    except Exception as e:
      print("%s Failed to open logfile %s: %s" % (datetime.now(), logfile, e))

    task = None
    errors = 0
    try:
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_exporttofolder', 'exporttofolder'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='exporttofolder', submitted=now, started=now, status='0%', user=self.user, logfile=logfile)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.processid = os.getpid()
      task.save(using=self.database)

      # Execute
      if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
        if not os.path.isdir(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export')):
          try:
            os.makedirs(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'export'))
          except OSError as exception:
            if exception.errno != errno.EEXIST:
              raise

        logger.info("%s Started export to folder\n" % datetime.now())

        cursor = connections[self.database].cursor()

        task.status = '0%'
        task.save(using=self.database)

        i = 0
        cnt = len(self.statements)

        # Calling all the pre-sql statements
        for stmt in self.pre_sql_statements:
          try:
            logger.info("Executing pre-statement '%s'" % stmt)
            cursor.execute(stmt)
            logger.info("%s record(s) modified" % cursor.rowcount)
          except:
            errors += 1
            logger.error("An error occurred when executing statement '%s'" % stmt)

        for cfg in self.statements:
          # Validate filename
          filename = cfg.get('filename', None)
          if not filename:
            raise Exception("Missing filename in export configuration")
          folder = cfg.get('folder', None)
          if not folder:
            raise Exception("Missing folder in export configuration for %s" % filename)
          logger.info("%s Started export of %s" % (datetime.now(), filename))

          # Make sure export folder exists
          exportFolder = os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], folder)
          if not os.path.isdir(exportFolder):
            os.makedirs(exportFolder)

          try:
            reportclass = cfg.get('report', None)
            sql = cfg.get('sql', None)
            if reportclass:
              # Export from report class

              # Create a dummy request
              factory = RequestFactory()
              request = factory.get("/dummy/", cfg.get('data', {}))
              if self.user:
                request.user = self.user
              else:
                request.user = User.objects.all().get(username="******")
              request.database = self.database
              request.LANGUAGE_CODE = settings.LANGUAGE_CODE
              request.prefs = cfg.get('prefs', None)

              # Initialize the report
              if hasattr(reportclass, "initialize"):
                reportclass.initialize(request)
              if not reportclass._attributes_added and reportclass.model:
                reportclass._attributes_added = True
                for f in reportclass.getAttributeFields(reportclass.model):
                  reportclass.rows += (f,)
              if reportclass.hasTimeBuckets:
                reportclass.getBuckets(request)

              # Write the report file
              datafile = open(os.path.join(exportFolder, filename), "wb")
              if filename.endswith(".xlsx"):
                reportclass._generate_spreadsheet_data(request, datafile, **cfg.get('data', {}))
              elif filename.endswith(".csv"):
                for r in reportclass._generate_csv_data(request, **cfg.get('data', {})):
                  datafile.write(
                    r.encode(settings.CSV_CHARSET)
                    if isinstance(r, str) else r
                    )
              else:
                raise Exception("Unknown output format for %s" % filename)
            elif sql:
              # Exporting using SQL
              if filename.lower().endswith(".gz"):
                datafile = gzip.open(os.path.join(exportFolder, filename), "w")
              else:
                datafile = open(os.path.join(exportFolder, filename), "w")
              cursor.copy_expert(sql, datafile)
            else:
              raise Exception("Unknown export type for %s" % filename)
            datafile.close()
            i += 1

          except Exception as e:
            errors += 1
            logger.error("%s Failed to export to %s" % (datetime.now(), filename))
            if task:
              task.message = 'Failed to export %s' % filename

          task.status = str(int(i / cnt * 100)) + '%'
          task.save(using=self.database)

        logger.info("%s Exported %s file(s)\n" % (datetime.now(), cnt - errors))

        for stmt in self.post_sql_statements:
          try:
            logger.info("Executing post-statement '%s'" % stmt)
            cursor.execute(stmt)
            logger.info("%s record(s) modified" % cursor.rowcount)
          except:
            errors += 1
            logger.error("An error occured when executing statement '%s'" % stmt)

      else:
        errors += 1
        logger.error("%s Failed, folder does not exist" % datetime.now())
        task.message = "Destination folder does not exist"
        task.save(using=self.database)

    except Exception as e:
      logger.error("%s Failed to export: %s" % (datetime.now(), e))
      errors += 1
      if task:
        task.message = 'Failed to export'

    finally:
      logger.info('%s End of export to folder\n' % datetime.now())
      if task:
        if not errors:
          task.status = '100%'
          task.message = "Exported %s data files" % (cnt)
        else:
          task.status = 'Failed'
          #  task.message = "Exported %s data files, %s failed" % (cnt-errors, errors)
        task.finished = datetime.now()
        task.processid = None
        task.save(using=self.database)
示例#40
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()

    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    timestamp = now.strftime("%Y%m%d%H%M%S")
    if database == DEFAULT_DB_ALIAS:
      logfile = 'frepple-%s.log' % timestamp
    else:
      logfile = 'frepple_%s-%s.log' % (database, timestamp)

    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('runplan', 'frepple_run'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
        task.logfile = logfile
      else:
        task = Task(name='runplan', submitted=now, started=now, status='0%', user=user, logfile=logfile)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else:
        constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
      else:
        plantype = 1

      # Reset environment variables
      # TODO avoid having to delete the environment variables. Use options directly?
      PlanTaskRegistry.autodiscover()
      for i in PlanTaskRegistry.reg:
        if 'env' in options:
          # Options specified
          if i.label and i.label[0] in os.environ:
            del os.environ[i.label[0]]
        elif i.label:
          # No options specified - default to activate them all
          os.environ[i.label[0]] = '1'

      # Set environment variables
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      if options['background']:
        task.arguments += " --background"

      # Log task
      # Different from the other tasks the frepple engine will write the processid
      task.save(using=database)

      # Locate commands.py
      import freppledb.common.commands
      cmd = freppledb.common.commands.__file__

      def setlimits():
        import resource
        if settings.MAXMEMORYSIZE:
          resource.setrlimit(
            resource.RLIMIT_AS,
            (settings.MAXMEMORYSIZE * 1024 * 1024, (settings.MAXMEMORYSIZE + 10) * 1024 * 1024)
            )
        if settings.MAXCPUTIME:
          resource.setrlimit(
            resource.RLIMIT_CPU,
            (settings.MAXCPUTIME, settings.MAXCPUTIME + 5)
            )
        # Limiting the file size is a bit tricky as this limit not only applies to the log
        # file, but also to temp files during the export
        # if settings.MAXTOTALLOGFILESIZE:
        #  resource.setrlimit(
        #    resource.RLIMIT_FSIZE,
        #   (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024)
        #   )

      # Prepare environment
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['FREPPLE_LOGFILE'] = logfile
      os.environ['FREPPLE_PROCESSNAME'] = settings.DATABASES[database]['NAME'].replace('demo', '')
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME), 'lib')
      if os.path.isdir(libdir):
        # Folders used by the Windows version
        os.environ['PYTHONPATH'] += os.pathsep + libdir
        if os.path.isfile(os.path.join(libdir, 'library.zip')):
          os.environ['PYTHONPATH'] += os.pathsep + os.path.join(libdir, 'library.zip')

      if options['background']:
        # Execute as background process on Windows
        if os.name == 'nt':
          subprocess.Popen(['frepple', cmd], creationflags=0x08000000)
        else:
          # Execute as background process on Linux
          subprocess.Popen(['frepple', cmd], preexec_fn=setlimits)
      else:
        if os.name == 'nt':
          # Execute in foreground on Windows
          ret = subprocess.call(['frepple', cmd])
        else:
          # Execute in foreground on Linux
          ret = subprocess.call(['frepple', cmd], preexec_fn=setlimits)
        if ret != 0 and ret != 2:
          # Return code 0 is a successful run
          # Return code is 2 is a run cancelled by a user. That's shown in the status field.
          raise Exception('Failed with exit code %d' % ret)

      # Reread the task from the database and update it
      if not options['background']:
        task = Task.objects.all().using(database).get(pk=task.id)
        task.processid = None
        task.status = 'Done'
        task.finished = datetime.now()
        task.save(using=database)

    except Exception as e:
      if task:
        task = Task.objects.all().using(database).get(pk=task.id)
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
        task.processid = None
        task.save(using=database)
      raise e
示例#41
0
def wrapTask(request, action):
    # Allow only post
    if request.method != "POST":
        raise Exception("Only post requests allowed")

    # Check user permissions
    if not request.user.has_perm("execute"):
        raise Exception("Missing execution privileges")

    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    # A
    if action == "frepple_run":
        if not request.user.has_perm("execute.generate_plan"):
            raise Exception("Missing execution privileges")
        constraint = 0
        for value in request.POST.getlist("constraint"):
            try:
                constraint += int(value)
            except:
                pass
        task = Task(name="generate plan", submitted=now, status="Waiting", user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (constraint, request.POST.get("plantype"))
        env = []
        if request.POST.get("odoo_read", None) == "1":
            env.append("odoo_read_1")
            request.session["odoo_read"] = True
        else:
            request.session["odoo_read"] = False
        if request.POST.get("odoo_write", None) == "1":
            env.append("odoo_write")
            request.session["odoo_write"] = True
        else:
            request.session["odoo_write"] = False
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ",".join(env))
        task.save(using=request.database)
        # Update the session object
        request.session["plantype"] = request.POST.get("plantype")
        request.session["constraint"] = constraint
    # B
    elif action == "frepple_createmodel":
        task = Task(name="generate model", submitted=now, status="Waiting", user=request.user)
        task.arguments = (
            "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s "
            "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s"
            % (
                request.POST["clusters"],
                request.POST["demands"],
                request.POST["fcst"],
                request.POST["levels"],
                request.POST["rsrc_number"],
                request.POST["rsrc_size"],
                request.POST["components"],
                request.POST["components_per"],
                request.POST["deliver_lt"],
                request.POST["procure_lt"],
            )
        )
        task.save(using=request.database)
    # C
    elif action == "frepple_flush":
        task = Task(name="empty database", submitted=now, status="Waiting", user=request.user)
        if not request.POST.get("all"):
            task.arguments = "--models=%s" % ",".join(request.POST.getlist("entities"))
        task.save(using=request.database)
    # D
    elif action == "loaddata":
        task = Task(
            name="load dataset", submitted=now, status="Waiting", user=request.user, arguments=request.POST["datafile"]
        )
        task.save(using=request.database)
    # E
    elif action == "frepple_copy":
        worker_database = DEFAULT_DB_ALIAS
        if "copy" in request.POST:
            if not request.user.has_perm("execute.copy_scenario"):
                raise Exception("Missing execution privileges")
            source = request.POST.get("source", DEFAULT_DB_ALIAS)
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, "off") == "on" and sc.status == "Free":
                    task = Task(
                        name="copy scenario",
                        submitted=now,
                        status="Waiting",
                        user=request.user,
                        arguments="%s %s" % (source, sc.name),
                    )
                    task.save()
        elif "release" in request.POST:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm("execute.release_scenario"):
                raise Exception("Missing execution privileges")
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, "off") == "on" and sc.status != "Free":
                    sc.status = "Free"
                    sc.lastrefresh = now
                    sc.save()
                    if request.database == sc.name:
                        # Erasing the database that is currently selected.
                        request.prefix = ""
        elif "update" in request.POST:
            # Note: update is immediate and synchronous.
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, "off") == "on":
                    sc.description = request.POST.get("description", None)
                    sc.save()
        else:
            raise Exception("Invalid scenario task")
    # F
    elif action == "frepple_backup":
        task = Task(name="backup database", submitted=now, status="Waiting", user=request.user)
        task.save(using=request.database)
    # G
    elif action == "frepple_createbuckets":
        task = Task(name="generate buckets", submitted=now, status="Waiting", user=request.user)
        task.arguments = "--start=%s --end=%s --weekstart=%s" % (
            request.POST["start"],
            request.POST["end"],
            request.POST["weekstart"],
        )
        task.save(using=request.database)
    # H
    elif action == "openbravo_import" and "freppledb.openbravo" in settings.INSTALLED_APPS:
        task = Task(name="Openbravo import", submitted=now, status="Waiting", user=request.user)
        task.arguments = "--delta=%s" % request.POST["delta"]
        task.save(using=request.database)
    # I
    elif action == "openbravo_export" and "freppledb.openbravo" in settings.INSTALLED_APPS:
        task = Task(name="Openbravo export", submitted=now, status="Waiting", user=request.user)
        if "filter_export" in request.POST:
            task.arguments = "--filter"
        task.save(using=request.database)
    elif action == "odoo_import" and "freppledb.odoo" in settings.INSTALLED_APPS:
        task = Task(name="Odoo import", submitted=now, status="Waiting", user=request.user)
        # task.arguments = "--filter"
        task.save(using=request.database)
    # J
    elif action == "odoo_export" and "freppledb.odoo" in settings.INSTALLED_APPS:
        task = Task(name="Odoo export", submitted=now, status="Waiting", user=request.user)
        if "filter_export" in request.POST:
            task.arguments = "--filter"
        task.save(using=request.database)
    # K
    elif action == "frepple_loadfromfolder":
        task = Task(name="load from folder", submitted=now, status="Waiting", user=request.user)
        task.save(using=request.database)
    else:
        # Task not recognized
        raise Exception("Invalid launching task")

    # Launch a worker process, making sure it inherits the right
    # environment variables from this parent
    os.environ["FREPPLE_CONFIGDIR"] = settings.FREPPLE_CONFIGDIR
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen(
                    [
                        sys.executable,  # Python executable
                        os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                        "frepple_runworker",
                        "--database=%s" % worker_database,
                    ]
                )
            else:
                # Deployment on Apache web server
                Popen(
                    [
                        "python",
                        os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                        "frepple_runworker",
                        "--database=%s" % worker_database,
                    ],
                    creationflags=0x08000000,
                )
        elif sys.executable.find("freppleserver.exe") >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace("freppleserver.exe", "frepplectl.exe"),  # frepplectl executable
                    "frepple_runworker",
                    "--database=%s" % worker_database,
                ],
                creationflags=0x08000000,
            )  # Do not create a console window
        else:
            # Linux standard installation
            Popen(["frepplectl", "frepple_runworker", "--database=%s" % worker_database])
    return task
示例#42
0
    def handle(self, **options):
        # Pick up the options
        database = options["database"]
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name not in ("frepple_loadxml", "loadxml")):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
            else:
                task = Task(name="loadxml",
                            submitted=now,
                            started=now,
                            status="0%",
                            user=user)
            task.arguments = " ".join(options["file"])
            task.processid = os.getpid()
            task.save(using=database)

            # Execute
            # TODO: if frePPLe is available as a module, we don't really need to spawn another process.
            os.environ["FREPPLE_HOME"] = settings.FREPPLE_HOME.replace(
                "\\", "\\\\")
            os.environ["FREPPLE_APP"] = settings.FREPPLE_APP
            os.environ["FREPPLE_DATABASE"] = database
            os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep +
                                  os.environ["PATH"] + os.pathsep +
                                  settings.FREPPLE_APP)
            os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME
            if "DJANGO_SETTINGS_MODULE" not in os.environ:
                os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings"
            if os.path.exists(
                    os.path.join(os.environ["FREPPLE_HOME"], "python36.zip")):
                # For the py2exe executable
                os.environ["PYTHONPATH"] = (os.path.join(
                    os.environ["FREPPLE_HOME"],
                    "python%d%d.zip" %
                    (sys.version_info[0], sys.version_info[1]),
                ) + os.pathsep + os.path.normpath(os.environ["FREPPLE_APP"]))
            else:
                # Other executables
                os.environ["PYTHONPATH"] = os.path.normpath(
                    os.environ["FREPPLE_APP"])
            cmdline = ['"%s"' % i for i in options["file"]]
            cmdline.insert(0, "frepple")
            cmdline.append('"%s"' % os.path.join(
                settings.FREPPLE_APP, "freppledb", "execute", "loadxml.py"))
            proc = subprocess.run(" ".join(cmdline))
            if proc.returncode:
                raise Exception("Exit code of the batch run is %d" %
                                proc.returncode)

            # Task update
            task.status = "Done"
            task.finished = datetime.now()

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=database)
示例#43
0
  def handle(self, *args, **options):
    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all sql
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    # Pick up options
    if 'force' in options:
      force = options['force']
    else:
      force = False
    test = 'FREPPLE_TEST' in os.environ
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    # Initialize the task
    now = datetime.now()
    task = None
    if 'task' in options and options['task']:
      try:
        task = Task.objects.all().get(pk=options['task'])
      except:
        raise CommandError("Task identifier not found")
      if task.started or task.finished or task.status != "Waiting" or task.name != 'copy scenario':
        raise CommandError("Invalid task identifier")
      task.status = '0%'
      task.started = now
    else:
      task = Task(name='copy scenario', submitted=now, started=now, status='0%', user=user)
    task.save()

    # Synchronize the scenario table with the settings
    Scenario.syncWithSettings()

    # Validate the arguments
    destinationscenario = None
    try:
      if len(args) != 2:
        raise CommandError("Command takes exactly 2 arguments.")
      task.arguments = "%s %s" % (args[0], args[1])
      task.save()
      source = args[0]
      try:
        sourcescenario = Scenario.objects.get(pk=source)
      except:
        raise CommandError("No source database defined with name '%s'" % source)
      destination = args[1]
      try:
        destinationscenario = Scenario.objects.get(pk=destination)
      except:
        raise CommandError("No destination database defined with name '%s'" % destination)
      if source == destination:
        raise CommandError("Can't copy a schema on itself")
      if settings.DATABASES[source]['ENGINE'] != settings.DATABASES[destination]['ENGINE']:
        raise CommandError("Source and destination scenarios have a different engine")
      if sourcescenario.status != 'In use':
        raise CommandError("Source scenario is not in use")
      if destinationscenario.status != 'Free' and not force:
        raise CommandError("Destination scenario is not free")

      # Logging message - always logging in the default database
      destinationscenario.status = 'Busy'
      destinationscenario.save()

      # Copying the data
      if settings.DATABASES[source]['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
        # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
        os.environ['PGPASSWORD'] = settings.DATABASES[source]['PASSWORD']
        ret = os.system("pg_dump -c -U%s -Fp %s%s%s | psql -U%s %s%s%s" % (
          settings.DATABASES[source]['USER'],
          settings.DATABASES[source]['HOST'] and ("-h %s " % settings.DATABASES[source]['HOST']) or '',
          settings.DATABASES[source]['PORT'] and ("-p %s " % settings.DATABASES[source]['PORT']) or '',
          test and settings.DATABASES[source]['TEST']['NAME'] or settings.DATABASES[source]['NAME'],
          settings.DATABASES[destination]['USER'],
          settings.DATABASES[destination]['HOST'] and ("-h %s " % settings.DATABASES[destination]['HOST']) or '',
          settings.DATABASES[destination]['PORT'] and ("-p %s " % settings.DATABASES[destination]['PORT']) or '',
          test and settings.DATABASES[destination]['TEST']['NAME'] or settings.DATABASES[destination]['NAME'],
          ))
        if ret:
          raise Exception('Exit code of the database copy command is %d' % ret)
      elif settings.DATABASES[source]['ENGINE'] == 'django.db.backends.sqlite3':
        # A plain copy of the database file
        if test:
          shutil.copy2(settings.DATABASES[source]['TEST']['NAME'], settings.DATABASES[destination]['TEST']['NAME'])
        else:
          shutil.copy2(settings.DATABASES[source]['NAME'], settings.DATABASES[destination]['NAME'])
      else:
        raise Exception('Copy command not supported for database engine %s' % settings.DATABASES[source]['ENGINE'])

      # Update the scenario table
      destinationscenario.status = 'In use'
      destinationscenario.lastrefresh = datetime.today()
      if 'description' in options:
        destinationscenario.description = options['description']
      else:
        destinationscenario.description = "Copied from scenario '%s'" % source
      destinationscenario.save()

      # Logging message
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      if destinationscenario and destinationscenario.status == 'Busy':
        destinationscenario.status = 'Free'
        destinationscenario.save()
      raise e

    finally:
      if task:
        task.save()
      settings.DEBUG = tmp_debug
示例#44
0
文件: views.py 项目: frePPLe/frePPLe
def wrapTask(request, action):
  # Allow only post
  if request.method != 'POST':
    raise Exception('Only post requests allowed')
  # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
  worker_database = request.database

  now = datetime.now()
  task = None
  args = request.POST or request.GET

  # A
  if action in ('frepple_run', 'runplan'):
    if not request.user.has_perm('auth.generate_plan'):
      raise Exception('Missing execution privileges')
    constraint = 0
    for value in args.getlist('constraint'):
      try:
        constraint += int(value)
      except:
        pass
    task = Task(name='runplan', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--constraint=%s --plantype=%s" % (constraint, args.get('plantype', 1))
    env = []
    for value in args.getlist('env'):
      env.append(value)
    if env:
      task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
    task.save(using=request.database)
  # C
  elif action in ('frepple_flush', 'empty'):
    if not request.user.has_perm('auth.run_db'):
      raise Exception('Missing execution privileges')
    task = Task(name='empty', submitted=now, status='Waiting', user=request.user)
    models = ','.join(args.getlist('models'))
    if models:
      task.arguments = "--models=%s" % (models)
    task.save(using=request.database)
  # D
  elif action == 'loaddata':
    if not request.user.has_perm('auth.run_db'):
      raise Exception('Missing execution privileges')
    task = Task(name='loaddata', submitted=now, status='Waiting', user=request.user, arguments=args['fixture'])
    task.save(using=request.database)
    # Also run the workflow upon loading of manufacturing_demo or distribution_demo
    if (args['regenerateplan'] == 'true'):
      active_modules = 'supply'
      task = Task(name='runplan', submitted=now, status='Waiting', user=request.user)
      task.arguments = "--constraint=15 --plantype=1 --env=%s --background" % (active_modules,)
      task.save(using=request.database)
  # E
  elif action in ('frepple_copy', 'scenario_copy'):
    worker_database = DEFAULT_DB_ALIAS
    if 'copy' in args:
      if not request.user.has_perm('auth.copy_scenario'):
        raise Exception('Missing execution privileges')
      source = args.get('source', DEFAULT_DB_ALIAS)
      worker_database = source
      destination = args.getlist('destination')
      force = args.get('force', False)
      for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
        arguments = "%s %s" % (source, sc.name)
        if force:
          arguments += ' --force'
        if args.get(sc.name, 'off') == 'on' or sc.name in destination:
          task = Task(name='scenario_copy', submitted=now, status='Waiting', user=request.user, arguments=arguments)
          task.save(using=source)
    elif 'release' in args:
      # Note: release is immediate and synchronous.
      if not request.user.has_perm('auth.release_scenario'):
        raise Exception('Missing execution privileges')
      for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
        if args.get(sc.name, 'off') == 'on' and sc.status != 'Free':
          sc.status = 'Free'
          sc.lastrefresh = now
          sc.save(using=DEFAULT_DB_ALIAS)
          if request.database == sc.name:
            # Erasing the database that is currently selected.
            request.prefix = ''
    elif 'update' in args:
      # Note: update is immediate and synchronous.
      if not request.user.has_perm('auth.release_scenario'):
        raise Exception('Missing execution privileges')
      for sc in Scenario.objects.using(DEFAULT_DB_ALIAS):
        if args.get(sc.name, 'off') == 'on':
          sc.description = args.get('description', None)
          sc.save(using=DEFAULT_DB_ALIAS)
    else:
      raise Exception('Invalid scenario task')
  # G
  elif action in ('frepple_createbuckets', 'createbuckets'):
    if not request.user.has_perm('auth.run_db'):
      raise Exception('Missing execution privileges')
    task = Task(name='createbuckets', submitted=now, status='Waiting', user=request.user)
    arguments = []
    start = args.get('start', None)
    if start:
      arguments.append("--start=%s" % start)
    end = args.get('end', None)
    if end:
      arguments.append("--end=%s" % end)
    weekstart = args.get('weekstart', None)
    if weekstart:
      arguments.append("--weekstart=%s" % weekstart)
    format_day = args.get('format-day', None)
    if format_day:
      arguments.append('--format-day="%s"' % format_day)
    format_week = args.get('format-week', None)
    if format_week:
      arguments.append('--format-week="%s"' % format_week)
    format_month = args.get('format-month', None)
    if format_month:
      arguments.append('--format-month="%s"' % format_month)
    format_quarter = args.get('format-quarter', None)
    if format_quarter:
      arguments.append('--format-quarter="%s"' % format_quarter)
    format_year = args.get('format-year', None)
    if format_year:
      arguments.append('--format-year="%s"' % format_year)
    if arguments:
      task.arguments = " ".join(arguments)
    task.save(using=request.database)
  else:
    # Generic task wrapper

    # Find the command and verify we have permissions to run it
    command = None
    for commandname, appname in get_commands().items():
      if commandname == action:
        try:
          c = getattr(import_module('%s.management.commands.%s' % (appname, commandname)), 'Command')
          if c.index >= 0:
            if getattr(c, 'getHTML', None) and c.getHTML(request):
              # Command class has getHTML method
              command = c
              break
            else:
              for p in c.__bases__:
                # Parent command class has getHTML method
                if getattr(p, 'getHTML', None) and p.getHTML(request):
                  command = c
                  break
              if command:
                break
        except Exception:
          pass  # Silently ignore failures
    if not command:
      raise Exception("Invalid task name '%s'" % action)
    # Create a task
    arguments = []
    for arg, val in args.lists():
      if arg != 'csrfmiddlewaretoken':
        arguments.append('--%s=%s' % (arg, ','.join(val)))
    task = Task(name=action, submitted=now, status='Waiting', user=request.user)
    if arguments:
      task.arguments = " ".join(arguments)
    task.save(using=request.database)

  # Launch a worker process, making sure it inherits the right
  # environment variables from this parent
  os.environ['FREPPLE_CONFIGDIR'] = settings.FREPPLE_CONFIGDIR
  if task and not checkActive(worker_database):
    if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
      if "python" in sys.executable:
        # Development layout
        Popen([
          sys.executable,  # Python executable
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "runworker",
          "--database=%s" % worker_database
          ])
      else:
        # Deployment on Apache web server
        Popen([
          "python",
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "runworker",
          "--database=%s" % worker_database
          ], creationflags=0x08000000)
    elif sys.executable.find('freppleserver.exe') >= 0:
      # Py2exe executable
      Popen([
        sys.executable.replace('freppleserver.exe', 'frepplectl.exe'),  # frepplectl executable
        "runworker",
        "--database=%s" % worker_database
        ], creationflags=0x08000000)  # Do not create a console window
    else:
      # Linux standard installation
      Popen([
        "frepplectl",
        "runworker",
        "--database=%s" % worker_database
        ])
  return task
示例#45
0
  def handle(self, *args, **options):

    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'restore database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='restore database', submitted=now, started=now, status='0%', user=user)
      task.arguments = args and args[0] or None
      task.save(using=database)

      # Validate options
      if not args:
        raise CommandError("No dump file specified")
      if not os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, args[0])):
        raise CommandError("Dump file not found")

      # Run the restore command
      # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
      if settings.DATABASES[database]['PASSWORD']:
        os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
      cmd = [ "psql", ]
      if settings.DATABASES[database]['USER']:
        cmd.append("--username=%s" % settings.DATABASES[database]['USER'])
      if settings.DATABASES[database]['HOST']:
        cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
      if settings.DATABASES[database]['PORT']:
        cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
      cmd.append(settings.DATABASES[database]['NAME'])
      cmd.append('<%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, args[0])))
      ret = subprocess.call(cmd, shell=True)  # Shell needs to be True in order to interpret the < character
      if ret:
        raise Exception("Run of run psql failed")

      # Task update
      # We need to recreate a new task record, since the previous one is lost during the restoration.
      task = Task(
        name='restore database', submitted=task.submitted, started=task.started,
        arguments=task.arguments, status='Done', finished=datetime.now(),
        user=task.user
        )

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Commit it all, even in case of exceptions
      if task:
        task.save(using=database)
示例#46
0
  def handle(self, **options):
    # Pick up the options
    database = options['database']
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_backup':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='frepple_backup', submitted=now, started=now, status='0%', user=user)

      # Choose the backup file name
      backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database)
      task.message = 'Backup to file %s' % backupfile
      task.save(using=database)

      # Run the backup command
      # Commenting the next line is a little more secure, but requires you to
      # create a .pgpass file.
      os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
      args = [
        "pg_dump",
        "-b", "-w",
        '--username=%s' % settings.DATABASES[database]['USER'],
        '--file=%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, backupfile))
        ]
      if settings.DATABASES[database]['HOST']:
        args.append("--host=%s" % settings.DATABASES[database]['HOST'])
      if settings.DATABASES[database]['PORT']:
        args.append("--port=%s " % settings.DATABASES[database]['PORT'])
      args.append(settings.DATABASES[database]['NAME'])
      ret = subprocess.call(args)
      if ret:
        raise Exception("Run of run pg_dump failed")

      # Task update
      task.status = '99%'
      task.save(using=database)

      # Delete backups older than a month
      pattern = re.compile("database.*.*.*.dump")
      for f in os.listdir(settings.FREPPLE_LOGDIR):
        if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)):
          # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date...
          created = datetime.fromtimestamp(os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime)
          if pattern.match(f) and (now - created).days > 31:
            try:
              os.remove(os.path.join(settings.FREPPLE_LOGDIR, f))
            except:
              pass

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)
示例#47
0
  def handle(self, **options):

    # Pick up the options
    if 'verbosity' in options:
      self.verbosity = int(options['verbosity'] or '1')
    else:
      self.verbosity = 1
    if 'user' in options:
      user = options['user']
    else:
      user = ''
    if 'database' in options:
      self.database = options['database'] or DEFAULT_DB_ALIAS
    else:
      self.database = DEFAULT_DB_ALIAS
    if self.database not in settings.DATABASES.keys():
      raise CommandError("No database settings known for '%s'" % self.database )

    # Pick up configuration parameters
    self.openbravo_user = Parameter.getValue("openbravo.user", self.database)
    # Passwords in djangosettings file are preferably used
    if settings.OPENBRAVO_PASSWORDS.get(db) == '':
        self.openbravo_password = Parameter.getValue("openbravo.password", self.database)
    else:
        self.openbravo_password = settings.OPENBRAVO_PASSWORDS.get(db)
    self.openbravo_host = Parameter.getValue("openbravo.host", self.database)
    self.openbravo_organization = Parameter.getValue("openbravo.organization", self.database)
    if not self.openbravo_user:
      raise CommandError("Missing or invalid parameter openbravo_user")
    if not self.openbravo_password:
      raise CommandError("Missing or invalid parameter openbravo_password")
    if not self.openbravo_host:
      raise CommandError("Missing or invalid parameter openbravo_host")
    if not self.openbravo_organization:
      raise CommandError("Missing or invalid parameter openbravo_organization")

    # Make sure the debug flag is not set!
    # When it is set, the django database wrapper collects a list of all SQL
    # statements executed and their timings. This consumes plenty of memory
    # and cpu time.
    tmp_debug = settings.DEBUG
    settings.DEBUG = False

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'Openbravo export':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='Openbravo export', submitted=now, started=now, status='0%', user=user)
      task.save(using=self.database)

      # Create a database connection to the frePPLe database
      cursor = connections[self.database].cursor()

      # Look up the id of the Openbravo user
      query = urllib.quote("name='%s'" % self.openbravo_user.encode('utf8'))
      conn = self.get_data("/openbravo/ws/dal/ADUser?where=%s&includeChildren=false" % query)[0]
      self.user_id = None
      for event, elem in conn:
        if event != 'end' or elem.tag != 'ADUser':
          continue
        self.user_id = elem.get('id')
      if not self.user_id:
        raise CommandError("Can't find user id in Openbravo")

      # Look up the id of the Openbravo organization id
      query = urllib.quote("name='%s'" % self.openbravo_organization.encode('utf8'))
      conn = self.get_data("/openbravo/ws/dal/Organization?where=%s&includeChildren=false" % query)[0]
      self.organization_id = None
      for event, elem in conn:
        if event != 'end' or elem.tag != 'Organization':
          continue
        self.organization_id = elem.get('id')
      if not self.organization_id:
        raise CommandError("Can't find organization id in Openbravo")

      # Upload all data
      self.export_procurement_order(cursor)
      self.export_work_order(cursor)
      self.export_sales_order(cursor)

      # Log success
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=self.database)
      settings.DEBUG = tmp_debug
示例#48
0
def wrapTask(request, action):
    # Allow only post
    if request.method != 'POST':
        raise Exception('Only post requests allowed')

    # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
    worker_database = request.database

    now = datetime.now()
    task = None
    # A
    if action == 'frepple_run':
        if not request.user.has_perm('auth.generate_plan'):
            raise Exception('Missing execution privileges')
        constraint = 0
        for value in request.POST.getlist('constraint'):
            try:
                constraint += int(value)
            except:
                pass
        task = Task(name='generate plan',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.arguments = "--constraint=%s --plantype=%s" % (
            constraint, request.POST.get('plantype', 1))
        env = []
        for value in request.POST.getlist('env'):
            env.append(value)
        if env:
            task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
        request.session['env'] = env
        task.save(using=request.database)
        # Update the session object
        request.session['plantype'] = request.POST.get('plantype')
        request.session['constraint'] = constraint
    # C
    elif action == 'frepple_flush':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='empty database',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        models = ','.join(request.POST.getlist('models'))
        if models:
            task.arguments = "--models=%s" % models
        task.save(using=request.database)
    # D
    elif action == 'loaddata':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='load dataset',
                    submitted=now,
                    status='Waiting',
                    user=request.user,
                    arguments=request.POST['fixture'])
        task.save(using=request.database)
    # E
    elif action == 'frepple_copy':
        worker_database = DEFAULT_DB_ALIAS
        if 'copy' in request.POST:
            if not request.user.has_perm('auth.copy_scenario'):
                raise Exception('Missing execution privileges')
            source = request.POST.get('source', DEFAULT_DB_ALIAS)
            destination = request.POST.getlist('destination')
            force = request.POST.get('force', False)
            for sc in Scenario.objects.all():
                arguments = "%s %s" % (source, sc.name)
                if force:
                    arguments += ' --force'
                if request.POST.get(sc.name,
                                    'off') == 'on' or sc.name in destination:
                    task = Task(name='copy scenario',
                                submitted=now,
                                status='Waiting',
                                user=request.user,
                                arguments=arguments)
                    task.save()
        elif 'release' in request.POST:
            # Note: release is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name,
                                    'off') == 'on' and sc.status != 'Free':
                    sc.status = 'Free'
                    sc.lastrefresh = now
                    sc.save()
                    if request.database == sc.name:
                        # Erasing the database that is currently selected.
                        request.prefix = ''
        elif 'update' in request.POST:
            # Note: update is immediate and synchronous.
            if not request.user.has_perm('auth.release_scenario'):
                raise Exception('Missing execution privileges')
            for sc in Scenario.objects.all():
                if request.POST.get(sc.name, 'off') == 'on':
                    sc.description = request.POST.get('description', None)
                    sc.save()
        else:
            raise Exception('Invalid scenario task')
    # F
    elif action == 'frepple_backup':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='backup database',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    # G
    elif action == 'frepple_createbuckets':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='generate buckets',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        arguments = []
        start = request.POST.get('start', None)
        if start:
            arguments.append("--start=%s" % start)
        end = request.POST.get('end', None)
        if end:
            arguments.append("--end=%s" % end)
        weekstart = request.POST.get('weekstart', None)
        if weekstart:
            arguments.append("--weekstart=%s" % weekstart)
        if arguments:
            task.arguments = " ".join(arguments)
        task.save(using=request.database)
    # J
    elif action == 'odoo_import' and 'freppledb.odoo' in settings.INSTALLED_APPS:
        task = Task(name='Odoo import',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    # M
    elif action == 'frepple_importfromfolder':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='import from folder',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    # N
    elif action == 'frepple_exporttofolder':
        if not request.user.has_perm('auth.run_db'):
            raise Exception('Missing execution privileges')
        task = Task(name='export to folder',
                    submitted=now,
                    status='Waiting',
                    user=request.user)
        task.save(using=request.database)
    else:
        # Task not recognized
        raise Exception("Invalid task name '%s'" % action)

    # Launch a worker process, making sure it inherits the right
    # environment variables from this parent
    os.environ['FREPPLE_CONFIGDIR'] = settings.FREPPLE_CONFIGDIR
    if task and not checkActive(worker_database):
        if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
            if "python" in sys.executable:
                # Development layout
                Popen([
                    sys.executable,  # Python executable
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ])
            else:
                # Deployment on Apache web server
                Popen([
                    "python",
                    os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ],
                      creationflags=0x08000000)
        elif sys.executable.find('freppleserver.exe') >= 0:
            # Py2exe executable
            Popen(
                [
                    sys.executable.replace(
                        'freppleserver.exe',
                        'frepplectl.exe'),  # frepplectl executable
                    "frepple_runworker",
                    "--database=%s" % worker_database
                ],
                creationflags=0x08000000)  # Do not create a console window
        else:
            # Linux standard installation
            Popen([
                "frepplectl", "frepple_runworker",
                "--database=%s" % worker_database
            ])
    return task
示例#49
0
  def handle(self, *args, **options):
    # Pick up the options
    if 'database' in options:
      self.database = options['database'] or DEFAULT_DB_ALIAS
    else:
      self.database = DEFAULT_DB_ALIAS
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if 'user' in options and options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    now = datetime.now()

    task = None
    self.logfile = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'load from folder':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='load from folder', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.save(using=self.database)

      # Choose the right self.delimiter and language
      self.delimiter = get_format('DECIMAL_SEPARATOR', settings.LANGUAGE_CODE, True) == ',' and ';' or ','
      translation.activate(settings.LANGUAGE_CODE)

      # Execute
      if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):

        # Open the logfile
        self.logfile = open(os.path.join(settings.DATABASES[self.database]['FILEUPLOADFOLDER'], 'loadfromfolder.log'), "a")
        print("%s Started upload from folder\n" % datetime.now(), file=self.logfile)

        all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
        models = []
        for ifile in os.listdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
          if not ifile.endswith('.csv'):
            continue
          filename0 = ifile.split('.')[0]

          model = None
          contenttype_id = None
          for m, ct in all_models:
            if filename0.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
              model = m
              contenttype_id = ct
              print("%s Matched a model to file: %s" % (datetime.now(),ifile), file=self.logfile)
              break

          if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
            print("%s Ignoring data in file: %s" % (datetime.now(),ifile), file=self.logfile)
          elif self.user and not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
            # Check permissions
            print("%s You don't have permissions to add: %s" % (datetime.now(),ifile), file=self.logfile)
          else:
            deps = set([model])
            GridReport.dependent_models(model, deps)

            models.append( (ifile, model, contenttype_id, deps) )

        # Sort the list of models, based on dependencies between models
        cnt = len(models)
        ok = False
        while not ok:
          ok = True
          for i in range(cnt):
            for j in range(i + 1, cnt):
              if models[i][1] != models[j][1] and models[i][1] in models[j][3]:
                # A subsequent model i depends on model i. The list ordering is
                # thus not ok yet. We move this element to the end of the list.
                models.append(models.pop(i))
                ok = False
        task.status = '10%'
        task.save(using=self.database)

        i=0
        errors = 0
        for ifile, model, contenttype_id, dependencies in models:
          i += 1
          print("%s Started processing data in file: %s" % (datetime.now(),ifile), file=self.logfile)
          filetoparse=os.path.join(os.path.abspath(settings.DATABASES[self.database]['FILEUPLOADFOLDER']), ifile)
          errors += self.parseCSVloadfromfolder(model, filetoparse)
          print("%s Finished processing data in file: %s\n" % (datetime.now(),ifile), file=self.logfile)
          task.status = str(int(10+i/cnt*80))+'%'
          task.save(using=self.database)

      # Task update
      if errors:
        task.status = 'Failed'
        task.message = "Uploaded %s data files with %s errors" % (cnt, errors)
      else:
        task.status = 'Done'
        task.message = "Uploaded %s data file" % cnt
      task.finished = datetime.now()

    except Exception as e:
      print("%s Failed" % datetime.now(), file=self.logfile)
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.status = '100%'
        task.save(using=self.database)
      if self.logfile:
        print('%s End of upload from folder\n' % datetime.now(), file=self.logfile)
        self.logfile.close()
示例#50
0
    def handle(self, **options):

        if options["user"]:
            try:
                user = User.objects.all().get(username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        # Synchronize the scenario table with the settings
        Scenario.syncWithSettings()

        now = datetime.now()
        task = None
        if "task" in options and options["task"]:
            try:
                task = (
                    Task.objects.all().using(DEFAULT_DB_ALIAS).get(pk=options["task"])
                )
            except Exception:
                raise CommandError("Task identifier not found")
            if (
                task.started
                or task.finished
                or task.status != "Waiting"
                or task.name != "scenario_release"
            ):
                raise CommandError("Invalid task identifier")
            task.status = "0%"
            task.started = now
        else:
            task = Task(
                name="scenario_release",
                submitted=now,
                started=now,
                status="0%",
                user=user,
            )
        task.processid = os.getpid()
        task.save(using=DEFAULT_DB_ALIAS)

        # Validate the arguments
        destination = options["destination"]
        destinationscenario = None
        try:
            task.arguments = "%s" % (destination,)
            task.save(using=DEFAULT_DB_ALIAS)
            try:
                destinationscenario = Scenario.objects.using(DEFAULT_DB_ALIAS).get(
                    pk=destination
                )
            except Exception:
                raise CommandError(
                    "No destination database defined with name '%s'" % destination
                )
            if destinationscenario.status != "In use":
                raise CommandError("Scenario to release is not in use")

            if destination == DEFAULT_DB_ALIAS:
                raise CommandError("Production scenario cannot be released")

            # Update the scenario table, set it free in the production database
            destinationscenario.status = "Free"
            destinationscenario.lastrefresh = datetime.today()
            destinationscenario.save(using=DEFAULT_DB_ALIAS)

            # Killing webservice
            if "freppledb.webservice" in settings.INSTALLED_APPS:
                management.call_command(
                    "stopwebservice", force=True, database=destination
                )

            # Logging message
            task.processid = None
            task.status = "Done"
            task.finished = datetime.now()

            # Update the task in the destination database
            task.message = "Scenario %s released" % (destination,)
            task.save(using=DEFAULT_DB_ALIAS)

        except Exception as e:
            if task:
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
            if destinationscenario and destinationscenario.status == "Busy":
                if destination == DEFAULT_DB_ALIAS:
                    destinationscenario.status = "In use"
                else:
                    destinationscenario.status = "Free"
                destinationscenario.save(using=DEFAULT_DB_ALIAS)
            raise e

        finally:
            if task:
                task.processid = None
                task.save(using=DEFAULT_DB_ALIAS)
示例#51
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(managed=False, using=database)
    transaction.managed(False, using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else: constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else: plantype = 1

      # Log task
      task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      task.save(using=database)
      transaction.commit(using=database)

      # Locate commands.py
      cmd = None
      for app in settings.INSTALLED_APPS:
        mod = import_module(app)
        if os.path.exists(os.path.join(os.path.dirname(mod.__file__),'commands.py')):
          cmd = os.path.join(os.path.dirname(mod.__file__),'commands.py')
          break
      if not cmd: raise Exception("Can't locate commands.py")

      # Execute
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME,'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(settings.FREPPLE_HOME,'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(settings.FREPPLE_HOME,'python27.zip') + os.pathsep + os.path.normpath(settings.FREPPLE_APP)
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      ret = os.system('frepple "%s"' % cmd.replace('\\','\\\\'))
      if ret != 0 and ret != 2:
        # Return code 0 is a successful run
        # Return code is 2 is a run cancelled by a user. That's shown in the status field.
        raise Exception('Failed with exit code %d' % ret)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      transaction.leave_transaction_management(using=database)
示例#52
0
  def handle(self, **options):
    # Pick up the options
    now = datetime.now()
    self.database = options['database']
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None
    timestamp = now.strftime("%Y%m%d%H%M%S")
    if self.database == DEFAULT_DB_ALIAS:
      logfile = 'importworkbook-%s.log' % timestamp
    else:
      logfile = 'importworkbook_%s-%s.log' % (self.database, timestamp)

    task = None
    try:
      setattr(_thread_locals, 'database', self.database)
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'frepple_importworkbook':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='frepple_importworkbook', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(options['file'])
      task.save(using=self.database)

      all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
      try:
        with transaction.atomic(using=self.database):
          # Find all models in the workbook
          for file in filename:
            wb = load_workbook(filename=file, read_only=True, data_only=True)
            models = []
            for ws_name in wb.get_sheet_names():
              # Find the model
              model = None
              contenttype_id = None
              for m, ct in all_models:
                # Try with translated model names
                if ws_name.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
                  model = m
                  contenttype_id = ct
                  break
                # Try with English model names
                with translation.override('en'):
                  if ws_name.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
                    model = m
                    contenttype_id = ct
                    break
              if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
                print(force_text(_("Ignoring data in worksheet: %s") % ws_name))
                # yield '<div class="alert alert-warning">' + force_text(_("Ignoring data in worksheet: %s") % ws_name) + '</div>'
              elif not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
                # Check permissions
                print(force_text(_("You don't permissions to add: %s") % ws_name))
                # yield '<div class="alert alert-danger">' + force_text(_("You don't permissions to add: %s") % ws_name) + '</div>'
              else:
                deps = set([model])
                GridReport.dependent_models(model, deps)
                models.append( (ws_name, model, contenttype_id, deps) )

            # Sort the list of models, based on dependencies between models
            models = GridReport.sort_models(models)
            print('197----', models)
            # Process all rows in each worksheet
            for ws_name, model, contenttype_id, dependencies in models:
              print(force_text(_("Processing data in worksheet: %s") % ws_name))
              # yield '<strong>' + force_text(_("Processing data in worksheet: %s") % ws_name) + '</strong></br>'
              # yield ('<div class="table-responsive">'
                     # '<table class="table table-condensed" style="white-space: nowrap;"><tbody>')
              numerrors = 0
              numwarnings = 0
              firsterror = True
              ws = wb.get_sheet_by_name(name=ws_name)
              for error in parseExcelWorksheet(model, ws, user=self.user, database=self.database, ping=True):
                if error[0] == DEBUG:
                  # Yield some result so we can detect disconnect clients and interrupt the upload
                  # yield ' '
                  continue
                if firsterror and error[0] in (ERROR, WARNING):
                  print('%s %s %s %s %s%s%s' % (
                    capfirst(_("worksheet")), capfirst(_("row")),
                    capfirst(_("field")), capfirst(_("value")),
                    capfirst(_("error")), " / ", capfirst(_("warning"))
                    ))
                  # yield '<tr><th class="sr-only">%s</th><th>%s</th><th>%s</th><th>%s</th><th>%s%s%s</th></tr>' % (
                  #   capfirst(_("worksheet")), capfirst(_("row")),
                  #   capfirst(_("field")), capfirst(_("value")),
                  #   capfirst(_("error")), " / ", capfirst(_("warning"))
                  #   )
                  firsterror = False
                if error[0] == ERROR:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('error')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('error')),
                  #   error[4]
                  #   )
                  numerrors += 1
                elif error[1] == WARNING:
                  print('%s %s %s %s %s: %s' % (
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    capfirst(_('warning')),
                    error[4]
                    ))
                  # yield '<tr><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s: %s</td></tr>' % (
                  #   ws_name,
                  #   error[1] if error[1] else '',
                  #   error[2] if error[2] else '',
                  #   error[3] if error[3] else '',
                  #   capfirst(_('warning')),
                  #   error[4]
                  #   )
                  numwarnings += 1
                else:
                  print('%s %s %s %s %s %s' % (
                    "danger" if numerrors > 0 else 'success',
                    ws_name,
                    error[1] if error[1] else '',
                    error[2] if error[2] else '',
                    error[3] if error[3] else '',
                    error[4]
                    ))
              #     yield '<tr class=%s><td class="sr-only">%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td></tr>' % (
              #       "danger" if numerrors > 0 else 'success',
              #       ws_name,
              #       error[1] if error[1] else '',
              #       error[2] if error[2] else '',
              #       error[3] if error[3] else '',
              #       error[4]
              #       )
              # yield '</tbody></table></div>'
            print('%s' % _("Done"))
            # yield '<div><strong>%s</strong></div>' % _("Done")
      except GeneratorExit:
        logger.warning('Connection Aborted')
    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      setattr(_thread_locals, 'database', None)
      if task:
        task.save(using=self.database)

    return _("Done")
示例#53
0
  def handle(self, **options):

    # Pick up the options
    database = options['database']
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name not in ('frepple_restore', 'restore'):
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='restore', submitted=now, started=now, status='0%', user=user)
      task.arguments = options['dump']
      task.processid = os.getpid()
      task.save(using=database)

      # Validate options
      dumpfile = os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, options['dump']))
      if not os.path.isfile(dumpfile):
        raise CommandError("Dump file not found")

      # Run the restore command
      # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
      if settings.DATABASES[database]['PASSWORD']:
        os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
      cmd = [ "pg_restore", "-n", "public", "-Fc", "-c", "--if-exists" ]
      if settings.DATABASES[database]['USER']:
        cmd.append("--username=%s" % settings.DATABASES[database]['USER'])
      if settings.DATABASES[database]['HOST']:
        cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
      if settings.DATABASES[database]['PORT']:
        cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
      cmd.append("-d")
      cmd.append(settings.DATABASES[database]['NAME'])
      cmd.append('<%s' % dumpfile)
      # Shell needs to be True in order to interpret the < character
      with subprocess.Popen(cmd, shell=True) as p:
        try:
          task.processid = p.pid
          task.save(using=database)
          p.wait()
        except:
          p.kill()
          p.wait()
          raise Exception("Database restoration failed")

      # Task update
      # We need to recreate a new task record, since the previous one is lost during the restoration.
      task = Task(
        name='restore', submitted=task.submitted, started=task.started,
        arguments=task.arguments, status='Done', finished=datetime.now(),
        user=task.user
        )

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      # Commit it all, even in case of exceptions
      if task:
        task.processid = None
        task.save(using=database)
示例#54
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try: user = User.objects.all().using(database).get(username=options['user'])
      except: raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    transaction.enter_transaction_management(using=database)
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try: task = Task.objects.all().using(database).get(pk=options['task'])
        except: raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else: constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else: plantype = 1
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)

      # Log task
      task.save(using=database)
      transaction.commit(using=database)

      # Locate commands.py
      cmd = None
      for app in settings.INSTALLED_APPS:
        mod = import_module(app)
        if os.path.exists(os.path.join(os.path.dirname(mod.__file__),'commands.py')):
          cmd = os.path.join(os.path.dirname(mod.__file__),'commands.py')
          break
      if not cmd: raise Exception("Can't locate commands.py")

      # Execute
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME,'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(settings.FREPPLE_HOME,'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(settings.FREPPLE_HOME,'python27.zip') + os.pathsep + os.path.normpath(settings.FREPPLE_APP)
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)
      ret = os.system('frepple "%s"' % cmd.replace('\\','\\\\'))
      if ret != 0 and ret != 2:
        # Return code 0 is a successful run
        # Return code is 2 is a run cancelled by a user. That's shown in the status field.
        raise Exception('Failed with exit code %d' % ret)

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task: task.save(using=database)
      try: transaction.commit(using=database)
      except: pass
      transaction.leave_transaction_management(using=database)
示例#55
0
  def handle(self, **options):

    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'backup database':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='backup database', submitted=now, started=now, status='0%', user=user)

      # Choose the backup file name
      backupfile = now.strftime("database.%s.%%Y%%m%%d.%%H%%M%%S.dump" % database)
      task.message = 'Backup to file %s' % backupfile
      task.save(using=database)

      # Run the backup command
      # Commenting the next line is a little more secure, but requires you to
      # create a .pgpass file.
      os.environ['PGPASSWORD'] = settings.DATABASES[database]['PASSWORD']
      args = [
        "pg_dump",
        "-b", "-w",
        '--username=%s' % settings.DATABASES[database]['USER'],
        '--file=%s' % os.path.abspath(os.path.join(settings.FREPPLE_LOGDIR, backupfile))
        ]
      if settings.DATABASES[database]['HOST']:
        args.append("--host=%s" % settings.DATABASES[database]['HOST'])
      if settings.DATABASES[database]['PORT']:
        args.append("--port=%s " % settings.DATABASES[database]['PORT'])
      args.append(settings.DATABASES[database]['NAME'])
      ret = subprocess.call(args)
      if ret:
        raise Exception("Run of run pg_dump failed")

      # Task update
      task.status = '99%'
      task.save(using=database)

      # Delete backups older than a month
      pattern = re.compile("database.*.*.*.dump")
      for f in os.listdir(settings.FREPPLE_LOGDIR):
        if os.path.isfile(os.path.join(settings.FREPPLE_LOGDIR, f)):
          # Note this is NOT 100% correct on UNIX. st_ctime is not alawys the creation date...
          created = datetime.fromtimestamp(os.stat(os.path.join(settings.FREPPLE_LOGDIR, f)).st_ctime)
          if pattern.match(f) and (now - created).days > 31:
            try:
              os.remove(os.path.join(settings.FREPPLE_LOGDIR, f))
            except:
              pass

      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)
示例#56
0
    def handle(self, **options):
        # Pick up the options
        now = datetime.now()

        if "database" in options:
            database = options["database"] or DEFAULT_DB_ALIAS
        else:
            database = DEFAULT_DB_ALIAS
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if "user" in options and options["user"]:
            try:
                user = User.objects.all().using(database).get(
                    username=options["user"])
            except Exception:
                raise CommandError("User '%s' not found" % options["user"])
        else:
            user = None

        timestamp = now.strftime("%Y%m%d%H%M%S")
        if database == DEFAULT_DB_ALIAS:
            logfile = "frepple-%s.log" % timestamp
        else:
            logfile = "frepple_%s-%s.log" % (database, timestamp)

        task = None
        try:
            # Initialize the task
            setattr(_thread_locals, "database", database)
            if "task" in options and options["task"]:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options["task"])
                except Exception:
                    raise CommandError("Task identifier not found")
                if (task.started or task.finished or task.status != "Waiting"
                        or task.name != "runplan"):
                    raise CommandError("Invalid task identifier")
                task.status = "0%"
                task.started = now
                task.logfile = logfile
            else:
                task = Task(
                    name="runplan",
                    submitted=now,
                    started=now,
                    status="0%",
                    user=user,
                    logfile=logfile,
                )

            # Validate options
            if "constraint" in options:
                constraint = int(options["constraint"])
                if constraint < 0 or constraint > 15:
                    raise ValueError("Invalid constraint: %s" %
                                     options["constraint"])
            else:
                constraint = 15
            if "plantype" in options:
                plantype = int(options["plantype"])
            else:
                plantype = 1

            # Reset environment variables
            # TODO avoid having to delete the environment variables. Use options directly?
            for label in freppledb.common.commands.PlanTaskRegistry.getLabels(
            ):
                if "env" in options:
                    # Options specified
                    if label[0] in os.environ:
                        del os.environ[label[0]]
                else:
                    # No options specified - default to activate them all
                    os.environ[label[0]] = "1"

            # Set environment variables
            if options["env"]:
                task.arguments = "--constraint=%d --plantype=%d --env=%s" % (
                    constraint,
                    plantype,
                    options["env"],
                )
                for i in options["env"].split(","):
                    j = i.split("=")
                    if len(j) == 1:
                        os.environ[j[0]] = "1"
                    else:
                        os.environ[j[0]] = j[1]
            else:
                task.arguments = "--constraint=%d --plantype=%d" % (
                    constraint,
                    plantype,
                )
            if options["background"]:
                task.arguments += " --background"

            # Log task
            # Different from the other tasks the frepple engine will write the processid
            task.save(using=database)

            # Locate commands.py
            cmd = freppledb.common.commands.__file__

            def setlimits():
                import resource

                if settings.MAXMEMORYSIZE:
                    resource.setrlimit(
                        resource.RLIMIT_AS,
                        (
                            settings.MAXMEMORYSIZE * 1024 * 1024,
                            (settings.MAXMEMORYSIZE + 10) * 1024 * 1024,
                        ),
                    )
                if settings.MAXCPUTIME:
                    resource.setrlimit(
                        resource.RLIMIT_CPU,
                        (settings.MAXCPUTIME, settings.MAXCPUTIME + 5),
                    )
                # Limiting the file size is a bit tricky as this limit not only applies to the log
                # file, but also to temp files during the export
                # if settings.MAXTOTALLOGFILESIZE:
                #  resource.setrlimit(
                #    resource.RLIMIT_FSIZE,
                #   (settings.MAXTOTALLOGFILESIZE * 1024 * 1024, (settings.MAXTOTALLOGFILESIZE + 1) * 1024 * 1024)
                #   )

            # Prepare environment
            os.environ["FREPPLE_PLANTYPE"] = str(plantype)
            os.environ["FREPPLE_CONSTRAINT"] = str(constraint)
            os.environ["FREPPLE_TASKID"] = str(task.id)
            os.environ["FREPPLE_DATABASE"] = database
            os.environ["FREPPLE_LOGFILE"] = logfile
            os.environ["FREPPLE_PROCESSNAME"] = settings.DATABASES[database][
                "NAME"].replace("demo", "")
            os.environ["PATH"] = (settings.FREPPLE_HOME + os.pathsep +
                                  os.environ["PATH"] + os.pathsep +
                                  settings.FREPPLE_APP)
            if os.path.isfile(
                    os.path.join(settings.FREPPLE_HOME, "libfrepple.so")):
                os.environ["LD_LIBRARY_PATH"] = settings.FREPPLE_HOME
            if "DJANGO_SETTINGS_MODULE" not in os.environ:
                os.environ["DJANGO_SETTINGS_MODULE"] = "freppledb.settings"
            os.environ["PYTHONPATH"] = os.path.normpath(settings.FREPPLE_APP)
            libdir = os.path.join(os.path.normpath(settings.FREPPLE_HOME),
                                  "lib")
            if os.path.isdir(libdir):
                # Folders used by the Windows version
                os.environ["PYTHONPATH"] += os.pathsep + libdir
                if os.path.isfile(os.path.join(libdir, "library.zip")):
                    os.environ["PYTHONPATH"] += os.pathsep + os.path.join(
                        libdir, "library.zip")

            if options["background"]:
                # Execute as background process on Windows
                if os.name == "nt":
                    startupinfo = subprocess.STARTUPINFO()
                    startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
                    subprocess.Popen(
                        ["frepple", cmd],
                        creationflags=0x08000000,
                        startupinfo=startupinfo,
                    )
                else:
                    # Execute as background process on Linux
                    subprocess.Popen(["frepple", cmd], preexec_fn=setlimits)
            else:
                if os.name == "nt":
                    # Execute in foreground on Windows
                    startupinfo = subprocess.STARTUPINFO()
                    startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
                    ret = subprocess.call(["frepple", cmd],
                                          startupinfo=startupinfo)
                else:
                    # Execute in foreground on Linux
                    ret = subprocess.call(["frepple", cmd],
                                          preexec_fn=setlimits)
                if ret != 0 and ret != 2:
                    # Return code 0 is a successful run
                    # Return code is 2 is a run cancelled by a user. That's shown in the status field.
                    raise Exception("Failed with exit code %d" % ret)

            # Reread the task from the database and update it
            if not options["background"]:
                task = Task.objects.all().using(database).get(pk=task.id)
                task.processid = None
                task.status = "Done"
                task.finished = datetime.now()
                task.save(using=database)

        except Exception as e:
            if task:
                task = Task.objects.all().using(database).get(pk=task.id)
                task.status = "Failed"
                task.message = "%s" % e
                task.finished = datetime.now()
                task.processid = None
                task.save(using=database)
            raise e

        finally:
            setattr(_thread_locals, "database", None)
示例#57
0
  def handle(self, **options):
    # Pick up the options
    if 'database' in options:
      database = options['database'] or DEFAULT_DB_ALIAS
    else:
      database = DEFAULT_DB_ALIAS
    if not database in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % database )
    if 'user' in options and options['user']:
      try:
        user = User.objects.all().using(database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      user = None

    now = datetime.now()
    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'generate plan':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='generate plan', submitted=now, started=now, status='0%', user=user)

      # Validate options
      if 'constraint' in options:
        constraint = int(options['constraint'])
        if constraint < 0 or constraint > 15:
          raise ValueError("Invalid constraint: %s" % options['constraint'])
      else:
        constraint = 15
      if 'plantype' in options:
        plantype = int(options['plantype'])
        if plantype < 1 or plantype > 2:
          raise ValueError("Invalid plan type: %s" % options['plantype'])
      else:
        plantype = 1
      if options['env']:
        task.arguments = "--constraint=%d --plantype=%d --env=%s" % (constraint, plantype, options['env'])
        for i in options['env'].split(','):
          j = i.split('=')
          if len(j) == 1:
            os.environ[j[0]] = '1'
          else:
            os.environ[j[0]] = j[1]
      else:
        task.arguments = "--constraint=%d --plantype=%d" % (constraint, plantype)
      if options['background']:
        task.arguments += " --background"

      # Log task
      task.save(using=database)

      # Locate commands.py
      cmd = None
      for app in settings.INSTALLED_APPS:
        mod = import_module(app)
        if os.path.exists(os.path.join(os.path.dirname(mod.__file__), 'commands.py')):
          cmd = os.path.join(os.path.dirname(mod.__file__), 'commands.py')
          break
      if not cmd:
        raise Exception("Can't locate commands.py")

      # Prepare environment
      os.environ['FREPPLE_PLANTYPE'] = str(plantype)
      os.environ['FREPPLE_CONSTRAINT'] = str(constraint)
      os.environ['FREPPLE_TASKID'] = str(task.id)
      os.environ['FREPPLE_DATABASE'] = database
      os.environ['PATH'] = settings.FREPPLE_HOME + os.pathsep + os.environ['PATH'] + os.pathsep + settings.FREPPLE_APP
      if os.path.isfile(os.path.join(settings.FREPPLE_HOME, 'libfrepple.so')):
        os.environ['LD_LIBRARY_PATH'] = settings.FREPPLE_HOME
      if 'DJANGO_SETTINGS_MODULE' not in os.environ:
        os.environ['DJANGO_SETTINGS_MODULE'] = 'freppledb.settings'
      if os.path.exists(os.path.join(settings.FREPPLE_HOME, 'python27.zip')):
        # For the py2exe executable
        os.environ['PYTHONPATH'] = os.path.join(
          settings.FREPPLE_HOME,
          'python%d%d.zip' %(sys.version_info[0], sys.version_info[1])
          ) + os.pathsep + os.path.normpath(settings.FREPPLE_APP)
      else:
        # Other executables
        os.environ['PYTHONPATH'] = os.path.normpath(settings.FREPPLE_APP)

      if options['background']:
        # Execute as background process on Windows
        if os.name == 'nt':
          subprocess.Popen(['frepple', cmd], creationflags=0x08000000)
        else:
          # Execute as background process on Linux
          subprocess.Popen(['frepple', cmd]).pid
      else:
        # Execute in foreground
        ret = subprocess.call(['frepple', cmd])
        if ret != 0 and ret != 2:
          # Return code 0 is a successful run
          # Return code is 2 is a run cancelled by a user. That's shown in the status field.
          raise Exception('Failed with exit code %d' % ret)

        # Task update
        task.status = 'Done'
        task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=database)
示例#58
0
    def handle(self, **options):

        # Pick up the options
        database = options['database']
        if database not in settings.DATABASES:
            raise CommandError("No database settings known for '%s'" %
                               database)
        if options['user']:
            try:
                user = User.objects.all().using(database).get(
                    username=options['user'])
            except:
                raise CommandError("User '%s' not found" % options['user'])
        else:
            user = None

        now = datetime.now()
        task = None
        try:
            # Initialize the task
            if options['task']:
                try:
                    task = Task.objects.all().using(database).get(
                        pk=options['task'])
                except:
                    raise CommandError("Task identifier not found")
                if task.started or task.finished or task.status != "Waiting" or task.name not in (
                        'frepple_restore', 'restore'):
                    raise CommandError("Invalid task identifier")
                task.status = '0%'
                task.started = now
            else:
                task = Task(name='restore',
                            submitted=now,
                            started=now,
                            status='0%',
                            user=user)
            task.arguments = options['dump']
            task.save(using=database)

            # Validate options
            if not os.path.isfile(
                    os.path.join(settings.FREPPLE_LOGDIR, options['dump'])):
                raise CommandError("Dump file not found")

            # Run the restore command
            # Commenting the next line is a little more secure, but requires you to create a .pgpass file.
            if settings.DATABASES[database]['PASSWORD']:
                os.environ['PGPASSWORD'] = settings.DATABASES[database][
                    'PASSWORD']
            cmd = [
                "psql",
            ]
            if settings.DATABASES[database]['USER']:
                cmd.append("--username=%s" %
                           settings.DATABASES[database]['USER'])
            if settings.DATABASES[database]['HOST']:
                cmd.append("--host=%s" % settings.DATABASES[database]['HOST'])
            if settings.DATABASES[database]['PORT']:
                cmd.append("--port=%s " % settings.DATABASES[database]['PORT'])
            cmd.append(settings.DATABASES[database]['NAME'])
            cmd.append('<%s' % os.path.abspath(
                os.path.join(settings.FREPPLE_LOGDIR, options['dump'])))
            ret = subprocess.call(
                cmd, shell=True
            )  # Shell needs to be True in order to interpret the < character
            if ret:
                raise Exception("Run of psql failed")

            # Task update
            # We need to recreate a new task record, since the previous one is lost during the restoration.
            task = Task(name='restore',
                        submitted=task.submitted,
                        started=task.started,
                        arguments=task.arguments,
                        status='Done',
                        finished=datetime.now(),
                        user=task.user)

        except Exception as e:
            if task:
                task.status = 'Failed'
                task.message = '%s' % e
                task.finished = datetime.now()
            raise e

        finally:
            # Commit it all, even in case of exceptions
            if task:
                task.save(using=database)
示例#59
0
def wrapTask(request, action):
  # Allow only post
  if request.method != 'POST':
    raise Exception('Only post requests allowed')

  # Check user permissions
  if not request.user.has_perm('execute'):
    raise Exception('Missing execution privileges')

  # Parse the posted parameters as arguments for an asynchronous task to add to the queue.    TODO MAKE MODULAR WITH SEPERATE TASK CLASS
  worker_database = request.database

  now = datetime.now()
  task = None
  # A
  if action == 'frepple_run':
    if not request.user.has_perm('execute.generate_plan'):
      raise Exception('Missing execution privileges')
    constraint = 0
    for value in request.POST.getlist('constraint'):
      try:
        constraint += int(value)
      except:
        pass
    task = Task(name='generate plan', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--constraint=%s --plantype=%s" % (constraint, request.POST.get('plantype'))
    env = []
    if request.POST.get('odoo_read', None) == '1':
      env.append("odoo_read")
      request.session['odoo_read'] = True
    else:
      request.session['odoo_read'] = False
    if request.POST.get('odoo_write', None) == '1':
      env.append("odoo_write")
      request.session['odoo_write'] = True
    else:
      request.session['odoo_write'] = False
    if env:
      task.arguments = "%s --env=%s" % (task.arguments, ','.join(env))
    task.save(using=request.database)
    # Update the session object
    request.session['plantype'] = request.POST.get('plantype')
    request.session['constraint'] = constraint
  # B
  elif action == 'frepple_createmodel':
    task = Task(name='generate model', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--cluster=%s --demand=%s --forecast_per_item=%s --level=%s --resource=%s " \
      "--resource_size=%s --components=%s --components_per=%s --deliver_lt=%s --procure_lt=%s" % (
        request.POST['clusters'], request.POST['demands'], request.POST['fcst'], request.POST['levels'],
        request.POST['rsrc_number'], request.POST['rsrc_size'], request.POST['components'],
        request.POST['components_per'], request.POST['deliver_lt'], request.POST['procure_lt']
        )
    task.save(using=request.database)
  # C
  elif action == 'frepple_flush':
    task = Task(name='empty database', submitted=now, status='Waiting', user=request.user)
    if not request.POST.get('all'):
      task.arguments = "--models=%s" % ','.join(request.POST.getlist('entities'))
    task.save(using=request.database)
  # D
  elif action == 'loaddata':
    task = Task(name='load dataset', submitted=now, status='Waiting', user=request.user, arguments=request.POST['datafile'])
    task.save(using=request.database)
  # E
  elif action == 'frepple_copy':
    worker_database = DEFAULT_DB_ALIAS
    if 'copy' in request.POST:
      if not request.user.has_perm('execute.copy_scenario'):
        raise Exception('Missing execution privileges')
      source = request.POST.get('source', DEFAULT_DB_ALIAS)
      for sc in Scenario.objects.all():
        if request.POST.get(sc.name, 'off') == 'on' and sc.status == 'Free':
          task = Task(name='copy scenario', submitted=now, status='Waiting', user=request.user, arguments="%s %s" % (source, sc.name))
          task.save()
    elif 'release' in request.POST:
      # Note: release is immediate and synchronous.
      if not request.user.has_perm('execute.release_scenario'):
        raise Exception('Missing execution privileges')
      for sc in Scenario.objects.all():
        if request.POST.get(sc.name, 'off') == 'on' and sc.status != 'Free':
          sc.status = 'Free'
          sc.lastrefresh = now
          sc.save()
          if request.database == sc.name:
            # Erasing the database that is currently selected.
            request.prefix = ''
    elif 'update' in request.POST:
      # Note: update is immediate and synchronous.
      for sc in Scenario.objects.all():
        if request.POST.get(sc.name, 'off') == 'on':
          sc.description = request.POST.get('description', None)
          sc.save()
    else:
      raise Exception('Invalid scenario task')
  # F
  elif action == 'frepple_backup':
    task = Task(name='backup database', submitted=now, status='Waiting', user=request.user)
    task.save(using=request.database)
  # G
  elif action == 'frepple_createbuckets':
    task = Task(name='generate buckets', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--start=%s --end=%s --weekstart=%s" % (
      request.POST['start'], request.POST['end'], request.POST['weekstart']
      )
    task.save(using=request.database)
  # H
  elif action == 'openbravo_import' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
    task = Task(name='Openbravo import', submitted=now, status='Waiting', user=request.user)
    task.arguments = "--delta=%s" % request.POST['delta']
    task.save(using=request.database)
  # I
  elif action == 'openbravo_export' and 'freppledb.openbravo' in settings.INSTALLED_APPS:
    task = Task(name='Openbravo export', submitted=now, status='Waiting', user=request.user)
    task.save(using=request.database)
  else:
    # Task not recognized
    raise Exception('Invalid launching task')

  # Launch a worker process
  if task and not checkActive(worker_database):
    if os.path.isfile(os.path.join(settings.FREPPLE_APP, "frepplectl.py")):
      if "python" in sys.executable:
        # Development layout
        Popen([
          sys.executable,  # Python executable
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "frepple_runworker",
          "--database=%s" % worker_database
          ])
      else:
        # Deployment on Apache web server
        Popen([
          "python",
          os.path.join(settings.FREPPLE_APP, "frepplectl.py"),
          "frepple_runworker",
          "--database=%s" % worker_database
          ], creationflags=0x08000000)
    elif sys.executable.find('freppleserver.exe') >= 0:
      # Py2exe executable
      Popen([
        sys.executable.replace('freppleserver.exe', 'frepplectl.exe'),  # frepplectl executable
        "frepple_runworker",
        "--database=%s" % worker_database
        ], creationflags=0x08000000)  # Do not create a console window
    else:
      # Linux standard installation
      Popen([
        "frepplectl",
        "frepple_runworker",
        "--database=%s" % worker_database
        ])
  return task
  def handle(self, *args, **options):
    # Pick up the options
    if 'database' in options:
      self.database = options['database'] or DEFAULT_DB_ALIAS
    else:
      self.database = DEFAULT_DB_ALIAS
    if self.database not in settings.DATABASES:
      raise CommandError("No database settings known for '%s'" % self.database )
    if 'user' in options and options['user']:
      try:
        self.user = User.objects.all().using(self.database).get(username=options['user'])
      except:
        raise CommandError("User '%s' not found" % options['user'] )
    else:
      self.user = None

    now = datetime.now()

    task = None
    try:
      # Initialize the task
      if 'task' in options and options['task']:
        try:
          task = Task.objects.all().using(self.database).get(pk=options['task'])
        except:
          raise CommandError("Task identifier not found")
        if task.started or task.finished or task.status != "Waiting" or task.name != 'load from folder':
          raise CommandError("Invalid task identifier")
        task.status = '0%'
        task.started = now
      else:
        task = Task(name='load from folder', submitted=now, started=now, status='0%', user=self.user)
      task.arguments = ' '.join(['"%s"' % i for i in args])
      task.save(using=self.database)
      
      # Choose the right self.delimiter and language
      self.delimiter = get_format('DECIMAL_SEPARATOR', settings.LANGUAGE_CODE, True) == ',' and ';' or ','
      translation.activate(settings.LANGUAGE_CODE)
      
      # Execute        
      filestoupload = list()
      if os.path.isdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
        thisfolder = settings.DATABASES[self.database]['FILEUPLOADFOLDER']
        for fileindir in os.listdir(settings.DATABASES[self.database]['FILEUPLOADFOLDER']):
          if fileindir.endswith('.csv'):
            filestoupload.append(fileindir)
            #filestoupload.append([file,strftime("%Y-%m-%d %H:%M:%S",localtime(os.stat(os.path.join(thisfolder, file)).st_mtime)),sizeof_fmt(os.stat(os.path.join(thisfolder, file)).st_size, 'B')])
        
        all_models = [ (ct.model_class(), ct.pk) for ct in ContentType.objects.all() if ct.model_class() ]
        models = []
        for ifile in filestoupload:
          
          filename0 = ifile.split('.')[0]
          
          model = None
          contenttype_id = None
          for m, ct in all_models:
            if filename0.lower() in (m._meta.model_name.lower(), m._meta.verbose_name.lower(), m._meta.verbose_name_plural.lower()):
              model = m
              contenttype_id = ct
              break
            
          if not model or model in EXCLUDE_FROM_BULK_OPERATIONS:
            print("Ignoring data in file: %s" % ifile)
          elif not self.user==None and not self.user.has_perm('%s.%s' % (model._meta.app_label, get_permission_codename('add', model._meta))):
            # Check permissions
            print("You don't permissions to add: %s" % ifile)
          else:
            deps = set([model])
            GridReport.dependent_models(model, deps)
            models.append( (ifile, model, contenttype_id, deps) )

    # Sort the list of models, based on dependencies between models
        cnt = len(models)
        ok = False
        while not ok:
          ok = True
          for i in range(cnt):
            for j in range(i + 1, cnt):
              if models[i][1] in models[j][3]:
                # A subsequent model i depends on model i. The list ordering is
                # thus not ok yet. We move this element to the end of the list.
                models.append(models.pop(i))
                ok = False

        for ifile, model, contenttype_id, dependencies in models:
          
          print("Processing data in file: %s" % ifile)
          rownum = 0
          has_pk_field = False
          headers = []
          uploadform = None
          changed = 0
          added = 0
          numerrors = 0
          
          #Will the permissions have to be checked table by table?
          permname = get_permission_codename('add', model._meta)
          if not self.user == None and not self.user.has_perm('%s.%s' % (model._meta.app_label, permname)):
            print('Permission denied')
            return
          

          filetoparse=os.path.join(os.path.abspath(thisfolder), ifile)
          self.parseCSVloadfromfolder(model, filetoparse)
            
      # Task update
      task.status = 'Done'
      task.finished = datetime.now()

    except Exception as e:
      if task:
        task.status = 'Failed'
        task.message = '%s' % e
        task.finished = datetime.now()
      raise e

    finally:
      if task:
        task.save(using=self.database)