コード例 #1
0
    def __init__(self, name=None, project=None, filename=None, folder=None):
        if name is None: name = 'Default'
        self.projectref = sc.Link(project)  # Store pointer for the project
        self.name = sc.uniquename(
            name, namelist=self.projectref().burdensets.keys(
            ))  # Name of the parameter set, e.g. 'default'
        self.uid = sc.uuid()  # ID
        self.created = sc.now()  # Date created
        self.modified = sc.now()  # Date modified

        # Define hard-coded column names
        self.colnames = sc.odict([
            ('active', 'Active'),
            #('code',       'Code'),
            ('cause', 'Cause'),
            ('dalys', 'DALYs'),
            ('deaths', 'Deaths'),
            ('prevalence', 'Prevalence')
        ])

        # Load data, if provided
        self.data = None
        if filename is not None:
            self.loaddata(filename=filename, folder=folder)

        return None
コード例 #2
0
 def launch_task(task_id='', func_name='', args=[], kwargs={}):
     
     match_taskrec = datastore.loadtask(task_id) # Find a matching task record (if any) to the task_id.
     
     if match_taskrec is None: # If we did not find a match...
         if not func_name in task_func_dict: # If the function name is not in the task function dictionary, return an error.
             return_dict = {'error': 'Could not find requested async task function "%s"' % func_name}
         
         else:
             new_task_record = Task(task_id) # Create a new TaskRecord.
             
             # Initialize the TaskRecord with available information.
             new_task_record.status = 'queued'
             new_task_record.queue_time = sc.now()
             new_task_record.func_name = func_name
             new_task_record.args = args
             new_task_record.kwargs = kwargs
             datastore.savetask(new_task_record)  # Add the TaskRecord to the TaskDict.
             
             # Queue up run_task() for Celery.
             my_result = run_task.delay(task_id, func_name, args, kwargs)
             new_task_record.result_id = my_result.id # Add the result ID to the TaskRecord, and update the DataStore.
             datastore.savetask(new_task_record)
             return_dict = new_task_record.jsonify() # Create the return dict from the user repr.
     
     else: # Otherwise (there is a matching task)...
         if match_taskrec.status == 'completed' or match_taskrec.status == 'error': # If the TaskRecord indicates the task has been completed or thrown an error...   
             if match_taskrec.result_id is not None: # If we have a result ID, erase the result from Redis.
                 result = celery_instance.AsyncResult(match_taskrec.result_id)
                 result.forget()
                 match_taskrec.result_id = None
                        
             # Initialize the TaskRecord to start the task again (though possibly with a new function and arguments).
             match_taskrec.status = 'queued'
             match_taskrec.queue_time = sc.now()
             match_taskrec.start_time = None
             match_taskrec.stop_time = None
             match_taskrec.pending_time = None
             match_taskrec.execution_time = None                
             match_taskrec.func_name = func_name
             match_taskrec.args = args
             match_taskrec.kwargs = kwargs
             
             # Queue up run_task() for Celery.   
             my_result = run_task.delay(task_id, func_name, args, kwargs)             
             match_taskrec.result_id = my_result.id # Add the new result ID to the TaskRecord, and update the DataStore.
             datastore.savetask(match_taskrec)
             return_dict = match_taskrec.jsonify() # Create the return dict from the user repr.
         
         else: # Else (the task is not completed)...
             return_dict = {'error': 'Task is already %s' % match_taskrec.status}
     
     return return_dict   # Return our result.
コード例 #3
0
ファイル: sim.py プロジェクト: wpettine/covasim
 def set_metadata(self, filename):
     ''' Set the metadata for the simulation -- creation time and filename '''
     self.created = sc.now()
     if filename is None:
         datestr = sc.getdate(obj=self.created, dateformat='%Y-%b-%d_%H.%M.%S')
         self.filename = f'covasim_{datestr}.sim'
     return
コード例 #4
0
ファイル: run.py プロジェクト: rnunez-IDM/covasim
    def __init__(self,
                 sim=None,
                 metapars=None,
                 scenarios=None,
                 basepars=None,
                 filename=None):

        # For this object, metapars are the foundation
        default_pars = make_metapars()  # Start with default pars
        super().__init__(
            default_pars)  # Initialize and set the parameters as attributes

        # Handle filename
        self.created = sc.now()
        if filename is None:
            datestr = sc.getdate(obj=self.created,
                                 dateformat='%Y-%b-%d_%H.%M.%S')
            filename = f'covasim_scenarios_{datestr}.scens'
        self.filename = filename

        # Handle scenarios -- by default, create a baseline scenario
        if scenarios is None:
            scenarios = sc.dcp(default_scenario)
        self.scenarios = scenarios

        # Handle metapars
        if metapars is None:
            metapars = {}
        self.metapars = metapars
        self.update_pars(self.metapars)

        # Create the simulation and handle basepars
        if sim is None:
            sim = cvsim.Sim()
        self.base_sim = sim
        if basepars is None:
            basepars = {}
        self.basepars = basepars
        self.base_sim.update_pars(self.basepars)
        self.base_sim.validate_pars()
        self.base_sim.init_results()

        # Copy quantities from the base sim to the main object
        self.npts = self.base_sim.npts
        self.tvec = self.base_sim.tvec
        self.reskeys = self.base_sim.reskeys

        # Create the results object; order is: results key, scenario, best/low/high
        self.sims = sc.objdict()
        self.allres = sc.objdict()
        for reskey in self.reskeys:
            self.allres[reskey] = sc.objdict()
            for scenkey in scenarios.keys():
                self.allres[reskey][scenkey] = sc.objdict()
                for nblh in ['name', 'best', 'low', 'high']:
                    self.allres[reskey][scenkey][
                        nblh] = None  # This will get populated below
        return
コード例 #5
0
 def set_metadata(self, simfile, label):
     ''' Set the metadata for the simulation -- creation time and filename '''
     self.created = sc.now()
     self.version = cvv.__version__
     self.git_info = cvm.git_info()
     if simfile is None:
         datestr = sc.getdate(obj=self.created, dateformat='%Y-%b-%d_%H.%M.%S')
         self.simfile = f'covasim_{datestr}.sim'
     if label is not None:
         self.label = label
     return
コード例 #6
0
def check_task(task_id, verbose=False): 
    match_taskrec = datastore.loadtask(task_id) # Find a matching task record (if any) to the task_id.
    if match_taskrec is None: # Check to see if the task exists, and if not, return an error.
        errormsg = {'error': 'No task found for specified task ID (%s)' % task_id}
        if verbose: print(errormsg)
        return errormsg
    else: # Update the elapsed times.
        if match_taskrec.pending_time is not None: # If we are no longer pending...
            pending_time = match_taskrec.pending_time # Use the existing pending_time.
            if match_taskrec.execution_time is not None: # If we have finished executing...
                execution_time = match_taskrec.execution_time # Use the execution time in the record.
            else: # Else (we are still executing)...
                execution_time = (sc.now() - match_taskrec.start_time).total_seconds()
        else: # Else (we are still pending)...
            pending_time = (sc.now() - match_taskrec.queue_time).total_seconds()
            execution_time = 0
        taskrec_dict = match_taskrec.jsonify() # Create the return dict from the user repr.
        taskrec_dict['pendingTime'] = pending_time
        taskrec_dict['executionTime'] = execution_time
        if verbose: sc.pp(taskrec_dict)
        return taskrec_dict    # Return the has record information and elapsed times.     
コード例 #7
0
    def __init__(self,
                 project=None,
                 name=None,
                 burdenset=None,
                 intervset=None,
                 makepackage=None):
        self.name = name  # Name of the parameter set, e.g. 'default'
        self.uid = sc.uuid()  # ID
        self.projectref = sc.Link(
            project)  # Store pointer for the project, if available
        self.created = sc.now()  # Date created
        self.modified = sc.now()  # Date modified
        self.eps = 1e-4  # A nonzero value to help with division
        self.burdenset = burdenset
        self.intervset = intervset
        self.budget = None
        self.frpwt = None
        self.equitywt = None
        self.data = None  # The data

        if makepackage: self.makepackage()
        return None
コード例 #8
0
ファイル: project.py プロジェクト: sciris/hiptool
    def __init__(self,
                 name='Default',
                 burdenfile=None,
                 interventionsfile=None,
                 country=None,
                 makepackage=True,
                 verbose=2):
        ''' Initialize the project '''

        ## Define the structure sets
        self.burdensets = sc.odict()
        self.intervsets = sc.odict()
        self.packagesets = sc.odict()

        ## Define other quantities
        self.name = name
        self.country = country
        self.uid = sc.uuid()
        self.created = sc.now()
        self.modified = sc.now()
        self.version = hp.version
        self.gitinfo = sc.gitinfo(__file__)
        self.filename = None  # File path, only present if self.save() is used

        ## Load burden spreadsheet, if available
        if burdenfile:
            self.loadburden(filename=burdenfile, verbose=verbose)

        ## Load interventions spreadsheet, if available
        if interventionsfile:
            self.loadinterventions(filename=interventionsfile, verbose=verbose)

        ## Combine into health package, if available
        if makepackage and burdenfile and interventionsfile:
            self.makepackage()

        return None
コード例 #9
0
 def __init__(self, name=None, project=None, filename=None, folder=None):
     if name is None: name = 'Default'
     self.projectref = sc.Link(project) # Store pointer for the project
     self.name       = sc.uniquename(name, namelist=self.projectref().intervsets.keys()) # Name of the parameter set, e.g. 'default'
     self.uid        = sc.uuid() # ID
     self.created    = sc.now() # Date created
     self.modified   = sc.now() # Date modified
     
     # Define hard-coded column names
     self.colnames = sc.odict([('active',   'Active'),
                               ('shortname','Short name'),
                               ('platform', 'Platform'),
                               ('burdencov','Causes of burden (max coverage)'),
                               ('icer',     'ICER'),
                               ('unitcost', 'Unit cost'),
                               ('spend',    'Spending'),
                               ('frp',      'FRP'),
                               ('equity',   'Equity'),
                               ])
     
     self.data       = None
     if filename is not None:
         self.loaddata(filename=filename, folder=folder)
     return None
コード例 #10
0
ファイル: sw_datastore.py プロジェクト: sciris/scirisweb
    def __init__(self, obj=None, key=None, objtype=None, uid=None, force=True):
        # Handle input arguments
        if uid is None:
            if force:
                uid = sc.uuid()
            else:
                errormsg = 'DataStore: Not creating a new Blob UUID since force is set to False: key=%s, objtype=%s, uid=%s, obj=%s' % (
                    key, objtype, uid, obj)
                raise Exception(errormsg)
        if not key: key = '%s%s%s' % (objtype, default_separator, uid)

        # Set attributes
        self.key = key
        self.objtype = objtype
        self.uid = uid
        self.created = sc.now()
        self.modified = [self.created]
        self.obj = obj
        return None
コード例 #11
0
ファイル: sim.py プロジェクト: haohu1/covasim
 def __init__(self, pars=None, datafile=None, filename=None):
     default_pars = cvpars.make_pars()  # Start with default pars
     super().__init__(
         default_pars)  # Initialize and set the parameters as attributes
     self.datafile = datafile  # Store this
     self.data = None
     if datafile is not None:  # If a data file is provided, load it
         self.data = cvpars.load_data(datafile)
     self.created = sc.now()
     if filename is None:
         datestr = sc.getdate(obj=self.created,
                              dateformat='%Y-%b-%d_%H.%M.%S')
         filename = f'covasim_{datestr}.sim'
     self.filename = filename
     self.stopped = None  # If the simulation has stopped
     self.results_ready = False  # Whether or not results are ready
     self.people = {}
     self.results = {}
     self.calculated = {}
     if pars is not None:
         self.update_pars(pars)
     return
コード例 #12
0
def get_fn():
    ''' Get a filename from the date '''
    string = sc.sanitizefilename(str(sc.now())).replace(' ', '_')
    return string
コード例 #13
0
def save_project(project, die=None):  # NB, only for saving an existing project
    project.modified = sc.now()
    output = datastore.saveblob(obj=project, objtype='project', die=die)
    return output
コード例 #14
0
def set_metadata(obj):
    ''' Set standard metadata for an object '''
    obj.version = spv.__version__
    obj.created = sc.now()
    obj.git_info = sc.gitinfo(__file__, verbose=False)
    return
コード例 #15
0
ファイル: sw_datastore.py プロジェクト: sciris/scirisweb
 def update(self):
     ''' When the object is updated, append the current time to the modified list '''
     now = sc.now()
     self.modified.append(now)
     return now
コード例 #16
0
ファイル: sw_app.py プロジェクト: sciris/scirisweb
    def _do_RPC(self, verbose=False):
        # Check to see whether the RPC is getting passed in in request.form.
        # If so, we are doing an upload, and we want to download the RPC
        # request info from the form, not request.data.
        if 'funcname' in request.form:  # Pull out the function name, args, and kwargs
            fn_name = request.form.get('funcname')
            try:
                args = json.loads(request.form.get('args', "[]"),
                                  object_pairs_hook=OrderedDict)
            except:
                args = []  # May or may not be present
            try:
                kwargs = json.loads(request.form.get('kwargs', "{}"),
                                    object_pairs_hook=OrderedDict)
            except:
                kwargs = {}  # May or may not be present
        else:  # Otherwise, we have a normal or download RPC, which means we pull the RPC request info from request.data.
            reqdict = json.loads(request.data, object_pairs_hook=OrderedDict)
            fn_name = reqdict['funcname']
            args = reqdict.get('args', [])
            kwargs = reqdict.get('kwargs', {})
        if verbose:
            print('RPC(): RPC properties:')
            print('  fn_name: %s' % fn_name)
            print('     args: %s' % args)
            print('   kwargs: %s' % kwargs)

        # If the function name is not in the RPC dictionary, return an error.
        if not sc.isstring(fn_name):
            return robustjsonify(
                {'error': 'Invalid RPC - must be a string (%s)' % fn_name})

        if not fn_name in self.RPC_dict:
            return robustjsonify(
                {'error': 'Could not find requested RPC "%s"' % fn_name})

        found_RPC = self.RPC_dict[fn_name]  # Get the RPC we've found.

        ## Do any validation checks we need to do and return errors if they don't pass.

        # If the RPC is disabled, always return a Status 403 (Forbidden)
        if found_RPC.validation == 'disabled':
            if verbose: print('RPC(): RPC disabled')
            abort(403)

        # Only do other validation if DataStore and users are included -- NOTE: Any "unknown" validation values are treated like 'none'.
        if self.config['USE_DATASTORE'] and self.config['USE_USERS']:
            if found_RPC.validation == 'any' and not (
                    current_user.is_anonymous
                    or current_user.is_authenticated):
                abort(
                    401
                )  # If the RPC should be executable by any user, including an anonymous one, but there is no authorization or anonymous login, return a Status 401 (Unauthorized)
            elif found_RPC.validation == 'named' and (
                    current_user.is_anonymous
                    or not current_user.is_authenticated):
                abort(
                    401
                )  # Else if the RPC should be executable by any non-anonymous user, but there is no authorization or there is an anonymous login, return a Status 401 (Unauthorized)
            elif found_RPC.validation == 'admin':  # Else if the RPC should be executable by any admin user, but there is no admin login or it's an anonymous login...
                if current_user.is_anonymous or not current_user.is_authenticated:
                    abort(
                        401
                    )  # If the user is anonymous or no authenticated user is logged in, return Status 401 (Unauthorized).
                elif not current_user.is_admin:
                    abort(
                        403
                    )  # Else, if the user is not an admin user, return Status 403 (Forbidden).

        # If we are doing an upload...
        if found_RPC.call_type == 'upload':
            if verbose: print('Starting upload...')
            thisfile = request.files[
                'uploadfile']  # Grab the formData file that was uploaded.
            filename = secure_filename(
                thisfile.filename
            )  # Extract a sanitized filename from the one we start with.
            try:
                uploaded_fname = os.path.join(
                    self.datastore.tempfolder,
                    filename)  # Generate a full upload path/file name.
            except Exception as E:
                errormsg = 'Could not create filename for uploaded file: %s' % str(
                    E)
                raise Exception(errormsg)
            try:
                thisfile.save(
                    uploaded_fname)  # Save the file to the uploads directory
            except Exception as E:
                errormsg = 'Could not save uploaded file: %s' % str(E)
                raise Exception(errormsg)
            args.insert(
                0, uploaded_fname)  # Prepend the file name to the args list.

        # Show the call of the function.
        callcolor = ['cyan', 'bgblue']
        successcolor = ['green', 'bgblue']
        failcolor = ['gray', 'bgred']
        timestr = '[%s]' % sc.now(astype='str')
        try:
            userstr = ' <%s>' % current_user.username
        except:
            userstr = ' <no user>'
        RPCinfo = sc.objdict({
            'time': timestr,
            'user': userstr,
            'module': found_RPC.call_func.__module__,
            'name': found_RPC.funcname
        })

        if self.config['LOGGING_MODE'] == 'FULL':
            string = '%s%s RPC called: "%s.%s"' % (
                RPCinfo.time, RPCinfo.user, RPCinfo.module, RPCinfo.name)
            sc.colorize(callcolor, string, enable=self.colorize)

        # Execute the function to get the results, putting it in a try block in case there are errors in what's being called.
        try:
            if verbose: print('RPC(): Starting RPC...')
            T = sc.tic()
            result = found_RPC.call_func(*args, **kwargs)
            if isinstance(
                    result, dict
            ) and 'error' in result:  # If the RPC returns an error, return it
                return robustjsonify({'error': result['error']})
            elapsed = sc.toc(T, output=True)
            if self.config['LOGGING_MODE'] == 'FULL':
                string = '%s%s RPC finished in %0.2f s: "%s.%s"' % (
                    RPCinfo.time, RPCinfo.user, elapsed, RPCinfo.module,
                    RPCinfo.name)
                sc.colorize(successcolor, string, enable=self.colorize)
        except Exception as E:
            if verbose: print('RPC(): Exception encountered...')
            shortmsg = str(E)
            exception = traceback.format_exc()  # Grab the trackback stack
            hostname = '|%s| ' % socket.gethostname()
            tracemsg = '%s%s%s Exception during RPC "%s.%s" \nRequest: %s \n%.10000s' % (
                hostname, RPCinfo.time, RPCinfo.user, RPCinfo.module,
                RPCinfo.name, request, exception)
            sc.colorize(
                failcolor, tracemsg, enable=self.colorize
            )  # Post an error to the Flask logger limiting the exception information to 10000 characters maximum (to prevent monstrous sqlalchemy outputs)
            if self.config['SLACK']:
                self.slacknotification(tracemsg)
            if isinstance(
                    E, HTTPException
            ):  # If we have a werkzeug exception, pass it on up to werkzeug to resolve and reply to.
                raise E
            code = 500  # Send back a response with status 500 that includes the exception traceback.
            fullmsg = shortmsg + '\n\nException details:\n' + tracemsg
            reply = {
                'exception': fullmsg
            }  # NB, not sure how to actually access 'traceback' on the FE, but keeping it here for future
            return make_response(robustjsonify(reply), code)

        # If we are doing a download, prepare the response and send it off.
        if found_RPC.call_type == 'download':
            # To download a file, use `this.$sciris.download` instead of `this.$sciris.rpc`. Decorate the RPC with
            # `@RPC(call_type='download')`. Finally, the RPC needs to specify the file and optionally the filename.
            # This is done with tuple unpacking. The following outputs are supported from `rpc_function()`
            #
            # 1 - filename_on_disk
            # 2 - BytesIO
            # 3 - filename_on_disk, download_filename
            # 4- BytesIO, download_filename
            #
            # Examples return values from the RPC are as follows
            #
            # 1 - "E:/test.xlsx" (uses "test.xlsx")
            # 2 - <BytesIO> (default filename will be generated in this function)
            # 3 - ("E:/test.xlsx","foo.xlsx")
            # 4 - (<BytesIO>,"foo.xlsx")
            #
            # On the RPC end, the most common cases would be it might look like
            #
            # return "E:/test.xlsx"
            #
            # OR
            #
            # return Blobject.to_file(), "foo.xlsx"

            if verbose: print('RPC(): Starting download...')

            if result is None:  # If we got None for a result (the full file name), return an error to the client.
                return robustjsonify({
                    'error':
                    'Could not find resource to download from RPC "%s": result is None'
                    % fn_name
                })
            elif sc.isstring(result):
                from_file = True
                dir_name, file_name = os.path.split(result)
                output_name = file_name
            elif isinstance(result, io.BytesIO):
                from_file = False
                bytesio = result
                output_name = 'download.obj'
            else:
                try:
                    content = result[0]
                    output_name = result[1]
                    if sc.isstring(content):
                        from_file = True
                        dir_name, file_name = os.path.split(content)
                    elif isinstance(content, io.BytesIO):
                        from_file = False
                        bytesio = content
                    else:
                        return robustjsonify(
                            {'error': 'Unrecognized RPC output'})
                except Exception as E:
                    return robustjsonify(
                        {'error': 'Error reading RPC result (%s)' % E})

            if from_file:
                response = send_from_directory(dir_name,
                                               file_name,
                                               as_attachment=True)
                response.status_code = 201  # Status 201 = Created
                # Unfortunately, we cannot remove the actual file at this point
                # because it is in use during the actual download, so we rely on
                # later cleanup to remove download files.
            else:
                response = send_file(bytesio,
                                     as_attachment=True,
                                     attachment_filename=output_name)
            response.headers['filename'] = output_name
            print(response)
            return response  # Return the response message.

        # Otherwise (normal and upload RPCs),
        else:
            if found_RPC.call_type == 'upload':  # If we are doing an upload....
                try:
                    os.remove(
                        uploaded_fname
                    )  # Erase the physical uploaded file, since it is no longer needed.
                    if verbose:
                        print('RPC(): Removed uploaded file: %s' %
                              uploaded_fname)
                except Exception as E:
                    if verbose:
                        print('RPC(): Could not remove uploaded file: %s' %
                              str(E))  # Probably since moved by the user
            if result is None:  # If None was returned by the RPC function, return ''.
                if verbose: print('RPC(): RPC finished, returning None')
                return ''
            else:  # Otherwise, convert the result (probably a dict) to JSON and return it.
                output = robustjsonify(result)
                if verbose: print('RPC(): RPC finished, returning result')
                return output
コード例 #17
0
def set_metadata(obj):
    ''' Set standard metadata for an object '''
    obj.created = sc.now()
    obj.version = cvv.__version__
    obj.git_info = cvm.git_info()
    return
コード例 #18
0
    def run_task(task_id, func_name, args, kwargs):
        if kwargs is None: kwargs = {} # So **kwargs works below
        
        if verbose: print('C>> Starting run_task() for %s' % task_id)

        # Check if run_task() locked and wait until it isn't, then lock it for
        # other run_task() instances in this Celery worker.
        # NOTE: We may want to resurrect this, perhaps using the task_id as the lock, if we find out there are
        # conflicts with access to the TaskRecords.
        # lock_run_task(task_id)

        # Find a matching task record (if any) to the task_id.
        match_taskrec = datastore.loadtask(task_id)
        if match_taskrec is None:
            if verbose: print('C>> Failed to find task record for %s' % task_id)
            # unlock_run_task(task_id)  # uncomment if there are conflicts
            return { 'error': 'Could not access Task' }
    
        # Set the TaskRecord to indicate start of the task.
        match_taskrec.status = 'started'
        match_taskrec.start_time = sc.now()
        match_taskrec.pending_time = (match_taskrec.start_time - match_taskrec.queue_time).total_seconds()
            
        # Do the actual update of the TaskRecord.
        datastore.savetask(match_taskrec)
        if verbose: print('C>> Saved task for %s' % task_id)
        
        # Make the actual function call, inside a try block in case there is 
        # an exception thrown.
        # NOTE: This block is likely to run for several seconds or even 
        # minutes or hours, depending on the task.
        try:
            result = task_func_dict[func_name](*args, **kwargs)
            match_taskrec.status = 'completed'
            if verbose: print('C>> Successfully completed task %s! :)' % task_id)
        except Exception as e: # If there's an exception, grab the stack track and set the TaskRecord to have stopped on in error.
            error_text = traceback.format_exc()
            match_taskrec.status = 'error'
            match_taskrec.error_text = error_text
            match_taskrec.error_msg = str(e)
            result = error_text
            if verbose: print('C>> Failed task %s! :(' % task_id)
        
        # Set the TaskRecord to indicate end of the task.
        match_taskrec.stop_time = sc.now()
        match_taskrec.execution_time = (match_taskrec.stop_time - match_taskrec.start_time).total_seconds()
        
        # Do the actual update of the TaskRecord.  Do this in a try / except 
        # block because this step may fail.  For example, if a TaskRecord is 
        # deleted by the webapp, the update here will crash.
        try:
            datastore.savetask(match_taskrec)
        except Exception as e:  # If there's an exception, grab the stack track and set the TaskRecord to have stopped on in error.
            error_text = traceback.format_exc()
            match_taskrec.status = 'error'
            match_taskrec.error_text = error_text
            match_taskrec.error_msg = str(e)
            result = error_text
            if verbose: print('C>> Failed to save task %s! :(' % task_id)            
            
        if verbose: print('C>> End of run_task() for %s' % task_id)

        # Unlock run-task() for other run_task() instances running on the same
        # Celery worker.
        # unlock_run_task(task_id)  # uncomment if there are conflicts

        # Return the result.
        return result