Пример #1
0
 def __init__(self, transientAttrs = set(), toPublicAttrs = set(), **kw):
   "Initialize streamer and declare transient variables."
   Logger.__init__(self, kw)
   self.transientAttrs = set(transientAttrs) | {'_readVersion',}
   self.toPublicAttrs = set(toPublicAttrs)
   from Gaugi import checkForUnusedVars
   checkForUnusedVars( kw, self._logger.warning )
Пример #2
0
    def __init__(self,
                 generator,
                 etbins,
                 etabins,
                 x_bin_size,
                 y_bin_size,
                 ymin,
                 ymax,
                 false_alarm_limit=0.5,
                 level=LoggingLevel.INFO,
                 xmin_percentage=1,
                 xmax_percentage=99,
                 plot_stage='Internal',
                 palette=kBlackBody,
                 xmin=None,
                 xmax=None):

        # init base class
        Logger.__init__(self, level=level)
        self.__generator = generator
        self.__etbins = etbins
        self.__etabins = etabins
        self.__ymin = ymin
        self.__ymax = ymax
        self.__x_bin_size = x_bin_size
        self.__y_bin_size = y_bin_size
        self.__false_alarm_limit = false_alarm_limit
        self.__xmin_percentage = xmin_percentage
        self.__xmax_percentage = xmax_percentage
        self.__plot_stage = plot_stage
        self.__xmin = xmin
        self.__xmax = xmax
        self.__palette = palette
Пример #3
0
    def __init__(self, db, args=None):

        Logger.__init__(self)
        self.__db = db
        if args:

            run_parser = argparse.ArgumentParser(
                description='Run pilot command lines.', add_help=False)

            run_parser.add_argument(
                '-n',
                '--node',
                action='store',
                dest='node',
                required=False,
                default=socket.gethostname(),
                help="The node name registered into the database.")
            run_parser.add_argument(
                '-m',
                '--master',
                action='store_true',
                dest='master',
                required=False,
                help="This is a master branch. One node must be a master.")

            parent = argparse.ArgumentParser(description='', add_help=False)
            subparser = parent.add_subparsers(dest='option')
            subparser.add_parser('run', parents=[run_parser])
            args.add_parser('pilot', parents=[parent])
Пример #4
0
    def __init__(self, outputFile, **kw):
        Logger.__init__(self, **kw)
        if not outputFile.endswith('.root'):
            outputFile += '.root'
        from Gaugi.utilities import retrieve_kw
        # Use this property to rebuild the storegate from a root file
        self._restoreStoreGate = retrieve_kw(kw, 'restoreStoreGate', False)
        filterDirs = retrieve_kw(kw, 'filterDirs', None)
        #Create TFile object to hold everything
        from ROOT import TFile
        from Gaugi import expandPath
        outputFile = expandPath(outputFile)
        if self._restoreStoreGate:
            import os.path
            if not os.path.exists(outputFile):
                raise ValueError("File '%s' does not exist" % outputFile)
            self._file = TFile(outputFile, "read")
        else:
            self._file = TFile(outputFile, "recreate")

        self._currentDir = ""
        self._objects = dict()
        self._dirs = list()
        import os
        self._outputFile = os.path.abspath(outputFile)

        if self._restoreStoreGate:
            retrievedObjs = self.__restore(self._file, filterDirs=filterDirs)
            for name, obj in retrievedObjs:
                self._dirs.append(name)
                self._objects[name] = obj
Пример #5
0
 def __init__(self, cls, id, queue_size=1):
     Logger.__init__(self)
     Process.__init__(self)
     self._queue = Queue(queue_size)
     self._cls = cls
     self._id = id
     self._is_alive_event = Event()
Пример #6
0
  def __init__(self , pattern_generator, crossval, **kw ):

    Logger.__init__(self)

    self.__pattern_generator = pattern_generator
    self.crossval = crossval


    self.optimizer       = retrieve_kw( kw, 'optimizer'      , 'adam'                )
    self.loss            = retrieve_kw( kw, 'loss'           , 'binary_crossentropy' )
    self.epochs          = retrieve_kw( kw, 'epochs'         , 1000                  )
    self.batch_size      = retrieve_kw( kw, 'batch_size'     , 1024                  )
    self.callbacks       = retrieve_kw( kw, 'callbacks'      , []                    )
    self.metrics         = retrieve_kw( kw, 'metrics'        , []                    )
    self.sorts           = retrieve_kw( kw, 'sorts'          , range(1)              )
    self.inits           = retrieve_kw( kw, 'inits'          , 1                     )
    job_auto_config      = retrieve_kw( kw, 'job'            , None                  )
    self.__verbose       = retrieve_kw( kw, 'verbose'        , True                  )
    self.__class_weight  = retrieve_kw( kw, 'class_weight'   , False                 )
    self.__save_history  = retrieve_kw( kw, 'save_history'   , True                  )
    self.decorators      = retrieve_kw( kw, 'decorators'     , []                    )


    # read the job configuration from file
    if job_auto_config:
      if type(job_auto_config) is str:
        MSG_INFO( self, 'Reading job configuration from: %s', job_auto_config )
        from saphyra.core.readers import JobReader
        job = JobReader().load( job_auto_config )
      else:
        job = job_auto_config
      # retrive sort/init lists from file
      self.sorts = job.getSorts()
      self.inits = job.getInits()
      self.__models, self.__id_models = job.getModels()
      self.__jobId = job.id()


    # get model and tag from model file or lists
    models = retrieve_kw( kw, 'models', None )
    if models:
      self.__models = models
      self.__id_models = [id for id in range(len(models))]
      self.__jobId = 0



    self.__outputfile = retrieve_kw( kw, 'outputFile' , None           )

    if self.__outputfile:
      from saphyra.core.readers.versions import TunedData_v1
      self.__tunedData = TunedData_v1()

    checkForUnusedVars(kw)


    from saphyra import Context
    self.__context = Context()
    self.__index_from_cv = None
Пример #7
0
 def __init__(self, t):
     Logger.__init__(self)
     import collections
     self._containers = collections.OrderedDict()
     self._tree = NotSet
     self._decorations = dict()
     self._current_entry = NotSet
     self._tree = t
Пример #8
0
    def __init__(self, etbins, etabins, fill_colors=fill_colors, line_colors=line_colors):

        Logger.__init__(self)
        self.__etbins = etbins
        self.__etabins = etabins
        self.__these_fill_colors=fill_colors
        self.__these_line_colors=line_colors
        self.__hist = {}
Пример #9
0
    def __init__(self, db, args=None):

        Logger.__init__(self)
        self.__db = db
        if args:
            # Upload dataset using the dataset CLI
            registry_parser = argparse.ArgumentParser(
                description='Dataset registry command lines.', add_help=False)
            registry_parser.add_argument(
                '-d',
                '--dataset',
                action='store',
                dest='datasetname',
                required=True,
                help=
                "The dataset name used to registry into the database. (e.g: user.jodafons...)"
            )
            registry_parser.add_argument('-p',
                                         '--path',
                                         action='store',
                                         dest='path',
                                         required=True,
                                         help="The path to the dataset")

            # Delete dataset using the dataset CLI
            unregistry_parser = argparse.ArgumentParser(
                description='Dataset unregistry command lines.',
                add_help=False)
            unregistry_parser.add_argument(
                '-d',
                '--dataset',
                action='store',
                dest='datasetname',
                required=True,
                help="The dataset name to be removed")

            # Delete dataset using the dataset CLI
            list_parser = argparse.ArgumentParser(
                description='Dataset List command lines.', add_help=False)
            list_parser.add_argument(
                '-u',
                '--user',
                action='store',
                dest='username',
                required=False,
                default=config['username'],
                help="List all datasets for a selected user.")

            parent = argparse.ArgumentParser(description='', add_help=False)
            subparser = parent.add_subparsers(dest='option')

            # Datasets
            subparser.add_parser('registry', parents=[registry_parser])
            subparser.add_parser('unregistry', parents=[unregistry_parser])
            subparser.add_parser('list', parents=[list_parser])
            args.add_parser('castor', parents=[parent])
Пример #10
0
 def __init__(self, nthreads, **kw):
     Logger.__init__(self, **kw)
     self._nthreads = nthreads
     self._nFilesPerJob = 20
     self._skip_these_keys = [
         "features", "etBins", "etaBins", "etBinIdx", "etaBinIdx"
     ]
     import re
     self._pat = re.compile(
         r'.+(?P<binID>et(?P<etBinIdx>\d+).eta(?P<etaBinIdx>\d+))\..+$')
Пример #11
0
 def __init__(self, command, njobs, maxJobs, output):
     Logger.__init__(self)
     self.process_pipe = []
     self.output_to_merge = []
     import random
     import time
     random.seed(time.time())
     self._base_id = random.randrange(100000)
     self._jobList = list(range(njobs))
     self._maxJobs = maxJobs
     self._command = command
     self._output = output
Пример #12
0
    def __init__(self, db, args=None):

        Logger.__init__(self)
        self.__db = db
        if args:

            create_parser = argparse.ArgumentParser(
                description='User create command lines.', add_help=False)
            create_parser.add_argument('-n',
                                       '--name',
                                       action='store',
                                       dest='name',
                                       required=True,
                                       help="The name of the user.")
            create_parser.add_argument('-e',
                                       '--email',
                                       action='store',
                                       dest='email',
                                       required=True,
                                       help="The user email.")

            delete_parser = argparse.ArgumentParser(
                description='User remove command lines.', add_help=False)
            delete_parser.add_argument('-n',
                                       '--name',
                                       action='store',
                                       dest='name',
                                       required=True,
                                       help="The dataset name to be removed")

            # Delete dataset using the dataset CLI
            list_parser = argparse.ArgumentParser(
                description='List all users command lines.', add_help=False)
            list_parser.add_argument('-u',
                                     '--user',
                                     action='store',
                                     dest='name',
                                     required=False,
                                     help="List all attributes for this user")

            init_parser = argparse.ArgumentParser(
                description='Initialize the database.', add_help=False)

            parent = argparse.ArgumentParser(description='', add_help=False)
            subparser = parent.add_subparsers(dest='option')

            # Datasets
            subparser.add_parser('create', parents=[create_parser])
            subparser.add_parser('delete', parents=[delete_parser])
            subparser.add_parser('list', parents=[list_parser])
            subparser.add_parser('init', parents=[init_parser])
            args.add_parser('user', parents=[parent])
Пример #13
0
    def __init__(self, fList):

        Logger.__init__(self)
        from Gaugi import csvStr2List
        from Gaugi import expandFolders
        self.fList = csvStr2List(fList)
        self.fList = expandFolders(fList)
        self.process_pipe = []
        self.output_stack = []
        import random
        import time
        random.seed(time.time())
        self._base_id = random.randrange(100000)
Пример #14
0
    def __init__(self):

        Logger.__init__(self)
        self._idx = 0
        self._is_hlt = False
        self._decoration = dict()
        self._tree = None
        self._event = None
        self._context = None
        # this is used for metadata properties
        self._useMetadataParams = False
        self._metadataParams = {}
        self._branches = list()  # hold all branches from the body class
Пример #15
0
  def __init__(self, name):
    Logger.__init__(self)
    self._name = name
    # flags
    self._wtd  = StatusWTD.DISABLE
    self._status = StatusTool.ENABLE
    self._initialized = StatusTool.NOT_INITIALIZED
    self._finalized = StatusTool.NOT_FINALIZED
    # services and EDMs
    self._context   = NotSet
    self._storegateSvc = NotSet
    self._dataframe = NotSet

    # property 
    self.__property = {}
Пример #16
0
 def __init__(self,
              verbose=False,
              save_the_best=False,
              patience=False,
              **kw):
     super(Callback, self).__init__()
     Logger.__init__(self, **kw)
     self.__verbose = verbose
     self.__patience = patience
     self.__ipatience = 0
     self.__best_sp = 0.0
     self.__save_the_best = save_the_best
     self.__best_weights = NotSet
     self.__best_epoch = 0
     self._validation_data = NotSet
Пример #17
0
 def __init__(self, ignoreAttrs = set(), toProtectedAttrs = set(), ignoreRawChildren = False, **kw ):
   """
     -> ignoreAttrs: not consider this attributes on the dictionary values.
     -> toProtectedAttrs: change public attributes to protected or private
       attributes. That is, suppose the dictionary value is 'val' and the class
       value should be _val or __val, then add toProtectedAttrs = ['_val'] or
       '__val'.
     -> ignoreRawChildren: Do not attempt to conver raw children to higher level object.
   """
   Logger.__init__(self, kw)
   ignoreAttrs = list(set(ignoreAttrs) | RawDictCnv.baseAttrs)
   import re
   self.ignoreAttrs = [re.compile(ignoreAttr) for ignoreAttr in ignoreAttrs]
   self.toProtectedAttrs = set(toProtectedAttrs)
   self.ignoreRawChildren = ignoreRawChildren
   from Gaugi import checkForUnusedVars
   checkForUnusedVars( kw, self._logger.warning )
Пример #18
0
    def __init__(self, generator, etbins, etabins, x_bin_size, y_bin_size, ymin, ymax,
                 false_alarm_limit=0.5,
                 level=LoggingLevel.INFO,
                 xmin_percentage=1,
                 xmax_percentage=99):

        # init base class
        Logger.__init__(self, level=level)
        self.__generator = generator
        self.__etbins = etbins
        self.__etabins = etabins
        self.__ymin = ymin
        self.__ymax = ymax
        self.__x_bin_size = x_bin_size
        self.__y_bin_size = y_bin_size
        self.__false_alarm_limit = false_alarm_limit
        self.__xmin_percentage=xmin_percentage
        self.__xmax_percentage=xmax_percentage
Пример #19
0
    def __init__(self, fList, reader, nFilesPerJob, nthreads):

        Logger.__init__(self)
        from Gaugi import csvStr2List
        from Gaugi import expandFolders
        fList = csvStr2List(fList)
        self._fList = expandFolders(fList)

        def chunks(l, n):
            """Yield successive n-sized chunks from l."""
            for i in range(0, len(l), n):
                yield l[i:i + n]

        self._fList = [l for l in chunks(self._fList, nFilesPerJob)]
        self.process_pipe = []
        self._outputs = []
        self._nthreads = nthreads
        self._reader = reader
Пример #20
0
 def __init__(self, d={}, **kw):
     if None in self._contextManager._acceptedTypes:
         self._contextManager._acceptedTypes = TexObject,
     if (not isinstance(self, TexObjectCollection)
             and not hasattr(self, '_preamble')
             and not hasattr(self, '_enclosure')
             and not hasattr(self, '_body')
             and not hasattr(self, '_footer')
             and not hasattr(self, '_appendix')):
         raise TexException(
             self, 'Class %s does not write any tex code.' %
             self.__class__.__name__)
     d.update(kw)
     Logger.__init__(self, d)
     if hasattr(self, '_body'):
         self._body = formatTex(self._body, retrieve_kw(d, 'textWidth', 80))
     self._stream = kw.pop('stream', tss)
     self._keywords = {
         key: val
         for key, val in d.items() if not key.startswith('_')
     }
     self._keywords.update({
         key: val
         for key, val in self.__dict__.items() if not key.startswith('_')
     })
     if 'star' in self._keywords and self._keywords['star']:
         self._keywords['star'] = '*'
     else:
         self._keywords['star'] = ''
     if hasattr(self, '_assertVars'):
         for key in self._assertVars:
             if not key in self._keywords:
                 raise TexException(self, "Assert var %s failed." % key)
     gcc.set(self)
     self._contextManaged = d.pop('_contextManaged', True)
     self._context = self._contextManager()
     self._isInContext = self._context is not None
     if (self._isInContext
             and isinstance(self._context, TexObjectCollection)
             and self._contextManaged):
         self._context += self
     if self._isInContext:
         self._stream = self._context._stream
    def __init__(self, **kw):

        Logger.__init__(self)

        self.epochs = retrieve_kw(kw, 'epochs', 1000)
        self.batch_size = retrieve_kw(kw, 'batch_size', 1024)
        self.lambda_disco = retrieve_kw(kw, 'lambda_disco', 300)
        self.callbacks = retrieve_kw(kw, 'callbacks', [])
        self.metrics = retrieve_kw(kw, 'metrics', [])
        job_auto_config = retrieve_kw(kw, 'job', None)
        self.sorts = retrieve_kw(kw, 'sorts', range(1))
        self.inits = retrieve_kw(kw, 'inits', 1)
        self.__verbose = retrieve_kw(kw, 'verbose', True)
        self.__model_generator = retrieve_kw(kw, 'model_generator', None)
        self.total = 100000
        self.background_percent = 0.99
        self.test_size = 0.3

        # read the job configuration from file
        if job_auto_config:
            if type(job_auto_config) is str:
                MSG_INFO(self, 'Reading job configuration from: %s',
                         job_auto_config)
                from saphyra.core.readers import JobReader
                job = JobReader().load(job_auto_config)
            else:
                job = job_auto_config

            # retrive sort/init lists from file
            self.sorts = job.getSorts()
            self.inits = job.getInits()
            self.__models, self.__id_models = job.getModels()
            self.__jobId = job.id()

        # get model and tag from model file or lists
        models = retrieve_kw(kw, 'models', None)
        if models:
            self.__models = models
            self.__id_models = [id for id in range(len(models))]
            self.__jobId = 0

        checkForUnusedVars(kw)
Пример #22
0
    def __init__(self, db, args=None):

        Logger.__init__(self)
        self.__db = db
        if args:

            create_parser = argparse.ArgumentParser(
                description='Node create command lines.', add_help=False)
            create_parser.add_argument('-n',
                                       '--name',
                                       action='store',
                                       dest='name',
                                       required=True,
                                       help="The name of the node.")
            create_parser.add_argument('-ec',
                                       '--enabledCPUSlots',
                                       action='store',
                                       dest='enabledCPUSlots',
                                       required=True,
                                       help="The number of CPU enabled slots.")
            create_parser.add_argument(
                '-mc',
                '--maxNumberOfCPUSlots',
                action='store',
                dest='maxNumberOfCPUSlots',
                required=True,
                help="The total number of CPU slots for this node.")

            create_parser.add_argument('-eg',
                                       '--enabledGPUSlots',
                                       action='store',
                                       dest='enabledGPUSlots',
                                       required=True,
                                       help="The number of GPU enabled slots.")
            create_parser.add_argument(
                '-mg',
                '--maxNumberOfGPUSlots',
                action='store',
                dest='maxNumberOfGPUSlots',
                required=True,
                help="The total number of GPU  slots for this node.")

            delete_parser = argparse.ArgumentParser(
                description='Node remove command lines.', add_help=False)
            delete_parser.add_argument('-n',
                                       '--name',
                                       action='store',
                                       dest='name',
                                       required=True,
                                       help="The node name to be removed")

            stop_parser = argparse.ArgumentParser(
                description='Node stop command lines.', add_help=False)
            stop_parser.add_argument('-n',
                                     '--name',
                                     action='store',
                                     dest='name',
                                     required=True,
                                     help="The node name to be stop")

            # Delete dataset using the dataset CLI
            list_parser = argparse.ArgumentParser(
                description='List all users command lines.', add_help=False)

            parent = argparse.ArgumentParser(description='', add_help=False)
            subparser = parent.add_subparsers(dest='option')

            # Datasets
            subparser.add_parser('create', parents=[create_parser])
            subparser.add_parser('delete', parents=[delete_parser])
            subparser.add_parser('list', parents=[list_parser])
            subparser.add_parser('stop', parents=[stop_parser])
            args.add_parser('node', parents=[parent])
Пример #23
0
 def __init__(self, name):
     Logger.__init__(self)
     import collections
     self._name = name
     self._tools = collections.OrderedDict()
Пример #24
0
 def __init__(self, d = {}, **kw): 
   #RawDictStreamable.__init__(self, d, **kw)
   Logger.__init__(self, d, **kw)
Пример #25
0
    def __init__(self, **kw):

        Logger.__init__(self, **kw)
Пример #26
0
    def __init__(self, trigger):

        Logger.__init__(self)
        # Compile all internal variables
        self.compile(trigger)
Пример #27
0
 def __init__(self, outputFile):
     Logger.__init__(self)
     if not outputFile:
         raise TexException(self, 'Cannot stream to empty file path.')
     self.outputFile = ensureExtension(outputFile, self._outputExtension)
Пример #28
0
 def __init__(self):
   Logger.__init__(self) 
   self.__containers = collections.OrderedDict()
Пример #29
0
 def __init__(self, skip_these_keys, **kw):
     Logger.__init__(self, kw)
     self._skip_these_keys = skip_these_keys
Пример #30
0
    def __init__(self, db, args=None):

        Logger.__init__(self)
        self.__db = db

        if args:

            # Create Task
            create_parser = argparse.ArgumentParser(description='',
                                                    add_help=False)

            create_parser.add_argument('-v',
                                       '--volume',
                                       action='store',
                                       dest='volume',
                                       required=True,
                                       help="The volume")
            create_parser.add_argument(
                '-t',
                '--task',
                action='store',
                dest='taskname',
                required=True,
                help="The task name to be append into the db.")
            create_parser.add_argument(
                '-c',
                '--configFile',
                action='store',
                dest='configFile',
                required=True,
                help=
                "The job config file that will be used to configure the job (sort and init)."
            )
            create_parser.add_argument(
                '-d',
                '--dataFile',
                action='store',
                dest='dataFile',
                required=True,
                help="The data/target file used to train the model.")
            create_parser.add_argument(
                '--sd',
                '--secondaryDS',
                action='store',
                dest='secondaryDS',
                required=False,
                default="{}",
                help=
                "The secondary datasets to be append in the --exec command. This should be:"
                + "--secondaryData='{'REF':'path/to/my/extra/data',...}'")
            create_parser.add_argument('--exec',
                                       action='store',
                                       dest='execCommand',
                                       required=True,
                                       help="The exec command")
            create_parser.add_argument('--queue',
                                       action='store',
                                       dest='queue',
                                       required=True,
                                       default='gpu',
                                       help="The cluste queue [gpu or cpu]")
            create_parser.add_argument('--dry_run',
                                       action='store_true',
                                       dest='dry_run',
                                       required=False,
                                       default=False,
                                       help="Use this as debugger.")
            create_parser.add_argument('--bypass',
                                       action='store_true',
                                       dest='bypass',
                                       required=False,
                                       default=False,
                                       help="Bypass the job test.")

            # Create Task
            repro_parser = argparse.ArgumentParser(description='',
                                                   add_help=False)

            repro_parser.add_argument('-v',
                                      '--volume',
                                      action='store',
                                      dest='volume',
                                      required=True,
                                      help="The volume")
            repro_parser.add_argument(
                '--new_task',
                action='store',
                dest='new_taskname',
                required=True,
                help="The new task name after the reprocessing phase")
            repro_parser.add_argument(
                '--old_task',
                action='store',
                dest='old_taskname',
                required=True,
                help=
                "The old task name that will be used into the reprocessing phase"
            )
            repro_parser.add_argument(
                '-d',
                '--dataFile',
                action='store',
                dest='dataFile',
                required=True,
                help="The data/target file used to train the model.")
            repro_parser.add_argument(
                '--sd',
                '--secondaryDS',
                action='store',
                dest='secondaryDS',
                required=False,
                default="{}",
                help=
                "The secondary datasets to be append in the --exec command. This should be:"
                + "--secondaryData='{'REF':'path/to/my/extra/data',...}'")
            repro_parser.add_argument('--exec',
                                      action='store',
                                      dest='execCommand',
                                      required=True,
                                      help="The exec command")
            repro_parser.add_argument('--queue',
                                      action='store',
                                      dest='queue',
                                      required=True,
                                      default='gpu',
                                      help="The cluste queue [gpu or cpu]")
            repro_parser.add_argument('--dry_run',
                                      action='store_true',
                                      dest='dry_run',
                                      required=False,
                                      default=False,
                                      help="Use this as debugger.")

            retry_parser = argparse.ArgumentParser(description='',
                                                   add_help=False)
            retry_parser.add_argument('--id',
                                      action='store',
                                      nargs='+',
                                      dest='id_list',
                                      required=False,
                                      default=None,
                                      help="All task ids to be removed",
                                      type=int)

            retry_parser.add_argument(
                '--id_min',
                action='store',
                dest='id_min',
                required=False,
                help="Down taks id limit to apply on the loop",
                type=int,
                default=None)

            retry_parser.add_argument(
                '--id_max',
                action='store',
                dest='id_max',
                required=False,
                help="Upper task id limit to apply on the loop",
                type=int,
                default=None)

            delete_parser = argparse.ArgumentParser(description='',
                                                    add_help=False)
            delete_parser.add_argument('--id',
                                       action='store',
                                       nargs='+',
                                       dest='id_list',
                                       required=False,
                                       default=None,
                                       help="All task ids to be removed",
                                       type=int)

            delete_parser.add_argument(
                '--id_min',
                action='store',
                dest='id_min',
                required=False,
                help="Down taks id limit to apply on the loop",
                type=int,
                default=None)

            delete_parser.add_argument(
                '--id_max',
                action='store',
                dest='id_max',
                required=False,
                help="Upper task id limit to apply on the loop",
                type=int,
                default=None)
            delete_parser.add_argument(
                '--remove',
                action='store_true',
                dest='remove',
                required=False,
                help=
                "Remove all files for this task into the storage. Beware when use this flag becouse you will lost your data too."
            )
            delete_parser.add_argument('--force',
                                       action='store_true',
                                       dest='force',
                                       required=False,
                                       help="Force delete.")

            list_parser = argparse.ArgumentParser(description='',
                                                  add_help=False)
            list_parser.add_argument('-u',
                                     '--user',
                                     action='store',
                                     dest='username',
                                     required=False,
                                     default=config['username'],
                                     help="The username.")
            list_parser.add_argument('-a',
                                     '--all',
                                     action='store_true',
                                     dest='all',
                                     required=False,
                                     help="List all tasks.")
            list_parser.add_argument('-i',
                                     '--interactive',
                                     action='store_true',
                                     dest='interactive',
                                     required=False,
                                     help="List all tasks interactive mode.")

            kill_parser = argparse.ArgumentParser(description='',
                                                  add_help=False)
            kill_parser.add_argument('--id',
                                     action='store',
                                     nargs='+',
                                     dest='id_list',
                                     required=False,
                                     default=None,
                                     help="All task ids to be removed",
                                     type=int)
            kill_parser.add_argument(
                '--id_min',
                action='store',
                dest='id_min',
                required=False,
                help="Down taks id limit to apply on the loop",
                type=int,
                default=None)
            kill_parser.add_argument(
                '--id_max',
                action='store',
                dest='id_max',
                required=False,
                help="Upper task id limit to apply on the loop",
                type=int,
                default=None)

            queue_parser = argparse.ArgumentParser(description='',
                                                   add_help=False)
            queue_parser.add_argument('-n',
                                      '--name',
                                      action='store',
                                      dest='name',
                                      required=False,
                                      help="The queue name")

            parent = argparse.ArgumentParser(description='', add_help=False)
            subparser = parent.add_subparsers(dest='option')

            # Datasets
            subparser.add_parser('create', parents=[create_parser])
            subparser.add_parser('repro', parents=[repro_parser])
            subparser.add_parser('retry', parents=[retry_parser])
            subparser.add_parser('delete', parents=[delete_parser])
            subparser.add_parser('list', parents=[list_parser])
            subparser.add_parser('kill', parents=[kill_parser])
            subparser.add_parser('queue', parents=[queue_parser])
            args.add_parser('task', parents=[parent])