Example #1
0
def loadAllDatasets(request):
    """ Updates the database from datasets stored in the FTP area.
        I multiple versions of a dataset are in the FTP area only the latest 
        will be loaded.
    """
    error_message = None # initially.
    #
    if request.method == "GET":
        form = forms.LoadAllDatasetsForm()
        contextinstance = {'form'   : form,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("datasets_load_all.html", contextinstance)
    elif request.method == "POST":
        error_message = None # initially.
        #
        form = forms.LoadAllDatasetsForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            password = request.POST['password']
            if password != settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.get(user, None):
                error_message = u'Not a valid user or password. Please try again...'   
            #
            #
            #
            if error_message == None:
                logrow_id = admin_models.createLogRow(command = u'Load all datasets from FTP to DB', status = u'RUNNING', user = user)
                try:
                    error_counter = dataset_utils.DatasetUtils().writeLatestDatasetsInfoToDb(logrow_id, user)
                    if error_counter > 0:
                        admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED (Errors: ' + unicode(error_counter) + u')')
                    else:
                        admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')
                except Exception as e:
                    error_message = u"Can't load datasets and save to the database."
                    admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                    admin_models.addResultLog(logrow_id, result_log = error_message)
                    if settings.DEBUG: print(u'\nError: ' + error_message + u'\nException: ' + unicode(e) + u'\n')
                    settings.LOGGER.error(u'\nError: ' + error_message + u'\nException: ' + unicode(e) + u'\n')                    
            #
#             if error_message == None:
#                 if ('update_metadata' in request.POST) and (request.POST['update_metadata'] == u'on'):
#                     dataset_utils.DatasetUtils().updateMetadataForAllDatasetsInThread(user)
            #
#             if error_message == None:
#                 if ('generate_archives' in request.POST) and (request.POST['generate_archives'] == u'on'):
#                     dataset_utils.DatasetUtils().generateArchivesForAllDatasetsInThread(user)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/sharkdataadmin")
        #
        contextinstance = {'form'   : form,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("datasets_load_all.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/sharkdataadmin")
Example #2
0
def deleteResource(request, resource_id):
    """ Deletes one row in the database. The FTP area is not affected. """
    resource = models.Resources.objects.get(id=resource_id)
    #
    if request.method == "GET":
        form = forms.DeleteResourceForm()
        contextinstance = {'form'   : form,
                           'resource' : resource,
                           'error_message' : None}
        contextinstance.update(csrf(request))
        return render_to_response("delete_resource.html",  contextinstance)
    elif request.method == "POST":
        # Reloads db-stored data.
        resources_utils.ResourcesUtils().clear()
        #
        error_message = None # initially.
        #
        form = forms.DeleteResourceForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            if user not in settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.keys():
                error_message = u'Not a valid user. Please try again...'   
            #
            if error_message == None:
                if ('delete_ftp' in request.POST) and (request.POST['delete_ftp'] == u'on'):
                    logrow_id = admin_models.createLogRow(command = u'Delete resource (FTP)', status = u'RUNNING', user = user)
                    try:
                        resources_utils.ResourcesUtils().deleteFileFromFtp(resource.resource_file_name)
                        admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')
                    except:
                        error_message = u"Can't delete resources from the database."
                        admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                        admin_models.addResultLog(logrow_id, result_log = error_message)
            #
            if error_message == None:
                logrow_id = admin_models.createLogRow(command = u'Delete resource (DB)', status = u'RUNNING', user = user)
                try:
                    resource = models.Resources.objects.get(id=resource_id)
                    resource.delete()
                    admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')
                except:
                    error_message = u"Can't delete resource from the database."
                    admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                    admin_models.addResultLog(logrow_id, result_log = error_message)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/resources")
        #
        contextinstance = {'form'   : form,
                           'resource' : resource,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("delete_resource.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/resources")
Example #3
0
def deleteResources(request):
    """ Deletes all rows in the database. The FTP area is not affected. """
    error_message = None # initially.
    #
    if request.method == "GET":
        #
        form = forms.DeleteAllResourcesForm()
        contextinstance = {'form'   : form,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("resources_delete_all.html", contextinstance)
    elif request.method == "POST":
        # Reloads db-stored data.
        resources_utils.ResourcesUtils().clear()
        #
        error_message = None # initially.
        #
        form = forms.DeleteAllResourcesForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            password = request.POST['password']
            if password != settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.get(user, None):
                error_message = u'Not a valid user or password. Please try again...'   
            #
            if error_message == None:
                if ('delete_ftp' in request.POST) and (request.POST['delete_ftp'] == u'on'):
                    logrow_id = admin_models.createLogRow(command = u'Delete all resources (FTP)', status = u'RUNNING', user = user)
                    try:
                        resources_utils.ResourcesUtils().deleteAllFilesFromFtp()
                        admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')
                    except:
                        error_message = u"Can't delete resources from the database."
                        admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                        admin_models.addResultLog(logrow_id, result_log = error_message)
            #
            if error_message == None:
                logrow_id = admin_models.createLogRow(command = u'Delete all resources (DB)', status = u'RUNNING', user = user)
                try:
                    resources_models.Resources.objects.all().delete()
                    admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')
                except:
                    error_message = u"Can't delete resources from the database."
                    admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                    admin_models.addResultLog(logrow_id, result_log = error_message)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/sharkdataadmin")
        #
        contextinstance = {'form'   : form,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("resources_delete_all.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/sharkdataadmin")
Example #4
0
def deleteDataset(request, dataset_id):
    """ Deletes one row in the database. """
    dataset = models.Datasets.objects.get(id=dataset_id)
    #
    if request.method == "GET":
        form = forms.DeleteDatasetForm()
        contextinstance = {'form'   : form,
                           'dataset' : dataset,
                           'error_message' : None}
        contextinstance.update(csrf(request))
        return render_to_response("delete_dataset.html", contextinstance)
    elif request.method == "POST":
        error_message = None # initially.
        #
        form = forms.DeleteDatasetForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            password = request.POST['password']
            if password != settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.get(user, None):
                error_message = 'Not a valid user or password. Please try again...'   
            #
            if error_message == None:
                if ('delete_ftp' in request.POST) and (request.POST['delete_ftp'] == 'on'):
                    # Delete the marked dataset version. Earlier versions vill be used, if there are any. 
                    logrow_id = admin_models.createLogRow(command = 'Delete dataset (FTP)', status = 'RUNNING', user = user)
                    try:
                        error_message = sharkdata_core.DatasetUtils().deleteFileFromFtp(dataset.dataset_file_name)
                        error_message = sharkdata_core.DatasetUtils().writeLatestDatasetsInfoToDb(user)
                        admin_models.changeLogRowStatus(logrow_id, status = 'FINISHED')
                    except:
                        error_message = u"Can't delete dataset from the ftp."
                        admin_models.changeLogRowStatus(logrow_id, status = 'FAILED')
                        admin_models.addResultLog(logrow_id, result_log = error_message)
                else:
                    logrow_id = admin_models.createLogRow(command = 'Delete dataset (DB)', status = 'RUNNING', user = user)
                    try:
                        dataset = models.Datasets.objects.get(id=dataset_id)
                        dataset.delete()
                    except:
                        error_message = u"Can't delete dataset from the database."
                        admin_models.changeLogRowStatus(logrow_id, status = 'FAILED')
                        admin_models.addResultLog(logrow_id, result_log = error_message)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/datasets")
        #
        contextinstance = {'form'   : form,
                           'dataset' : dataset,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("delete_dataset.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/datasets")
 def writeFileInfoToDb(self, file_name, logrow_id = None, user = ''):
     """ Extracts info from the dataset filename and from the zip file content and adds to database. """
     try:
         #
         ftp_file_path = os.path.join(self._ftp_dir_path, file_name)
         # Extract info from file name.
         dataset_name, datatype, version = self.splitFilename(file_name)
         # Extract metadata parts.
         metadata = ''
         metadata_auto = ''
         columndata_available = False
         #
         zipreader = sharkdata_core.SharkArchiveFileReader(file_name, self._ftp_dir_path)
         try:
             zipreader.open()
             #
             try:
                 metadata = zipreader.getMetadataAsText()            
                 encoding = 'cp1252'
                 metadata = unicode(metadata, encoding, 'strict')   
             except Exception as e:
                     admin_models.addResultLog(logrow_id, result_log = 'WARNING: ' + unicode(e))
             #
             try:
                 metadata_auto = zipreader.getMetadataAutoAsText()                       
                 encoding = 'cp1252'
                 metadata_auto = unicode(metadata_auto, encoding, 'strict')
             except Exception as e:
                     admin_models.addResultLog(logrow_id, result_log = 'WARNING: ' + unicode(e))
             #
             columndata_available = zipreader.isDataColumnsAvailable()
         finally:
             zipreader.close()                        
         # Save to db.
         dataset = datasets_models.Datasets(
                       dataset_name = dataset_name,
                       datatype = datatype,
                       version = version,
                       dataset_file_name = file_name,
                       ftp_file_path = ftp_file_path,
                       content_data = 'NOT USED',
                       content_metadata = metadata,
                       content_metadata_auto = metadata_auto,
                       #
                       column_data_available = columndata_available,
                       dwc_archive_eurobis_available = False,
                       dwc_archive_eurobis_file_path = '',
                       )
         dataset.save()
         #
         return None # No error message.
     #
     except Exception as e:
         return unicode(e)                
Example #6
0
def deleteExportFiles(request):
    """ Deletes rows in the database. """
    error_message = None
    #
    if request.method == "GET":
        #
        form = forms.DeleteExportFilesForm()
        contextinstance = {'form'   : form,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("exportfiles_delete_all.html", contextinstance)
    elif request.method == "POST":
        #
        form = forms.DeleteExportFilesForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            password = request.POST['password']
            if password != settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.get(user, None):
                error_message = 'Not a valid user or password. Please try again...'   
            #
#             if error_message == None:
#                 if ('delete_ftp' in request.POST) and (request.POST['delete_ftp'] == 'on'):
#                     logrow_id = admin_models.createLogRow(command = 'Delete all datasets from FTP', status = 'RUNNING', user = user)
#                     try:
#                         error_message = dataset_utils.DatasetUtils().deleteAllFilesFromFtp()
#                         admin_models.changeLogRowStatus(logrow_id, status = 'FINISHED')
#                     except:
#                         error_message = u"Can't delete datasets from the FTP area."
#                         admin_models.changeLogRowStatus(logrow_id, status = 'FAILED')
#                         admin_models.addResultLog(logrow_id, result_log = error_message)
            #
            if error_message == None:
                logrow_id = admin_models.createLogRow(command = 'Delete all ICES-XML files from DB', status = 'RUNNING', user = user)
                try:
                    exportformats_models.ExportFiles.objects.all().delete()
                    admin_models.changeLogRowStatus(logrow_id, status = 'FINISHED')
                except:
                    error_message = u"Can't delete datasets from the database."
                    admin_models.changeLogRowStatus(logrow_id, status = 'FAILED')
                    admin_models.addResultLog(logrow_id, result_log = error_message)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/sharkdataadmin")
        #
        contextinstance = {'form'   : form,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("exportfiles_delete_all.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/sharkdataadmin")
Example #7
0
def loadAllResources(request):
    """ Updates the database from resources stored in the FTP area.
    """
    error_message = None # initially.
    #
    if request.method == "GET":
        form = forms.LoadAllResourcesForm()
        contextinstance = {'form': form,
                           'error_message': error_message}
        contextinstance.update(csrf(request))
        return render_to_response("resources_load_all.html", contextinstance)
    elif request.method == "POST":
        # Reloads db-stored data.
        resources_utils.ResourcesUtils().clear()
        #
        error_message = None # initially.
        #
        form = forms.LoadAllResourcesForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            password = request.POST['password']
            if password != settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.get(user, None):
                error_message = u'Not a valid user or password. Please try again...'   
            #
            if error_message == None:
                logrow_id = admin_models.createLogRow(command = u'Load all resources from FTP to DB', status = u'RUNNING', user = user)
                try:
                    resources_utils.ResourcesUtils().writeResourcesInfoToDb(user)
                    admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')
                except:
                    error_message = u"Can't load resources and save to the database."
                    admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                    admin_models.addResultLog(logrow_id, result_log = error_message)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/sharkdataadmin")
        #
        contextinstance = {'form': form,
                           'error_message': error_message}
        contextinstance.update(csrf(request))
        return render_to_response("resources_load_all.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/sharkdataadmin")
Example #8
0
def deleteExportFile(request, export_name):
    """ Deletes one row in the database. """
    exportfile = models.ExportFiles.objects.get(export_name = export_name)
    #
    if request.method == "GET":
        form = forms.DeleteExportFileForm()
        contextinstance = {'form'   : form,
                           'exportfile' : exportfile,
                           'error_message' : None}
        contextinstance.update(csrf(request))
        return render_to_response("delete_exportfile.html", contextinstance)
    elif request.method == "POST":
        error_message = None # initially.
        #
        form = forms.DeleteExportFileForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            password = request.POST['password']
            if password != settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.get(user, None):
                error_message = 'Not a valid user or password. Please try again...'   
            #
            if error_message == None:
                logrow_id = admin_models.createLogRow(command = 'Delete exportfile (DB)', status = 'RUNNING', user = user)
                try:
                    exportfile = models.ExportFiles.objects.get(export_name = export_name)
                    exportfile.delete()
                    admin_models.changeLogRowStatus(logrow_id, status = 'FINISHED')
                except:
                    error_message = u"Can't delete exportfile from the database."
                    admin_models.changeLogRowStatus(logrow_id, status = 'FAILED')
                    admin_models.addResultLog(logrow_id, result_log = error_message)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/exportformats")
        #
        contextinstance = {'form'   : form,
                           'exportfile' : exportfile,
                           'error_message' : error_message}
        contextinstance.update(csrf(request))
        return render_to_response("delete_exportfile.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/exportformats")
Example #9
0
def cleanUpSpeciesObs(request):
    """ Removes species observations with status='DELETED'. """
    error_message = None # initially.
    #
    if request.method == "GET":
        #
        form = forms.CleanUpSpeciesObsForm()
        contextinstance = {'form': form,
                           'error_message': error_message}
        contextinstance.update(csrf(request))
        return render_to_response("speciesobs_cleanup.html", contextinstance)
    elif request.method == "POST":
        form = forms.CleanUpSpeciesObsForm(request.POST)
        if form.is_valid():
            #
            user = request.POST['user']
            password = request.POST['password']
            if password != settings.APPS_VALID_USERS_AND_PASSWORDS_FOR_TEST.get(user, None):
                error_message = u'Not a valid user or password. Please try again...'   
            #
            if error_message == None:
                logrow_id = admin_models.createLogRow(command = u'Clean up species observations', status = u'RUNNING', user = user)
                try:
                    error_message = speciesobs_utils.SpeciesObsUtils().cleanUpSpeciesObsInThread(logrow_id)
                    # admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')
                except:
                    error_message = u"Can't clean up species observations."
                    admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                    admin_models.addResultLog(logrow_id, result_log = error_message)
            # OK.
            if error_message == None:
                return HttpResponseRedirect("/sharkdataadmin")
        #
        contextinstance = {'form': form,
                           'error_message': error_message}
        contextinstance.update(csrf(request))
        return render_to_response("speciesobs_cleanup.html", contextinstance)
    # Not a valid request method.
    return HttpResponseRedirect("/sharkdataadmin")


    
Example #10
0
 def writeLatestDatasetsInfoToDb(self, logrow_id = None, user = u''):
     """ Updates the database from datasets stored in the FTP area.
         I multiple versions of a dataset are in the FTP area only the latest 
         will be loaded.
     """
     error_counter = 0
     # Remove all db rows. 
     datasets_models.Datasets.objects.all().delete()
     # Get latest datasets from FTP archive.
     archive = shark_utils.SharkArchive(self._ftp_dir_path)
     for file_name in sorted(archive.getLatestSharkArchiveFilenames()):
         if logrow_id:
             admin_models.addResultLog(logrow_id, result_log = u'Loading file: ' + file_name + u'...')                
         try:
             self.writeFileInfoToDb(file_name, user)
         except Exception as e:
             error_counter += 1 
             admin_models.addResultLog(logrow_id, result_log = u'ERROR: Failed to load: ' + file_name + u'.')                
     #
     return error_counter
Example #11
0
 def generateArchivesForAllDatasetsInThread(self, user):
     """ """
     
     logrow_id = admin_models.createLogRow(command = u'Generate archives (DwC, etc.)', status = u'RUNNING', user = user)
     try:
         # Check if generate_archives thread is running.
         if self._generate_archives_thread:
             if self._generate_archives_thread.is_alive():
                 error_message = u"Generate archives is already running. Please try again later."
                 admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
                 admin_models.addResultLog(logrow_id, result_log = error_message)
                 #
                 return
         # Use a thread to relese the user. This will take some time.
         self._generate_archives_thread = threading.Thread(target = self.generateArchivesForAllDatasets, args=(logrow_id, user, ))
         self._generate_archives_thread.start()
     except:
         error_message = u"Can't generate_archives."
         admin_models.changeLogRowStatus(logrow_id, status = u'FAILED')
         admin_models.addResultLog(logrow_id, result_log = error_message)
     #
     return None # No error message.
 def validateIcesXmlInThread(self, datatype_list, user):
     """ """
     logrow_id = admin_models.createLogRow(command = 'Validate ICES-XML file.', status = 'RUNNING', user = user)
     try:
         # Check if thread is running.
         if self._validate_ices_xml_thread:
             if self._validate_ices_xml_thread.is_alive():
                 error_message = u"Validate ICES-XML file is already running. Please try again later."
                 admin_models.changeLogRowStatus(logrow_id, status = 'FAILED')
                 admin_models.addResultLog(logrow_id, result_log = error_message)
                 #
                 return
         # Use a thread to relese the user.
         self._validate_ices_xml_thread = threading.Thread(target = sharkdata_core.ValidateIcesXml().validateIcesXml, 
                                                           args=(logrow_id, datatype_list, user ))
         self._validate_ices_xml_thread.start()
     except Exception as e:
         error_message = u"Can't validate ICES-XML file." + '\nException: ' + unicode(e) + '\n'
         admin_models.changeLogRowStatus(logrow_id, status = 'FAILED')
         admin_models.addResultLog(logrow_id, result_log = error_message)
     #
     return None # No error message.
 def generateIcesXmlInThread(self, datatype_list, year_from, year_to, status, user):
     """ """
     logrow_id = admin_models.createLogRow(command="Generate ICES-XML file.", status="RUNNING", user=user)
     try:
         # Check if thread is running.
         if self._generate_ices_xml_thread:
             if self._generate_ices_xml_thread.is_alive():
                 error_message = "Generate ICES-XML file is already running. Please try again later."
                 admin_models.changeLogRowStatus(logrow_id, status="FAILED")
                 admin_models.addResultLog(logrow_id, result_log=error_message)
                 #
                 return
         # Use a thread to relese the user.
         self._generate_ices_xml_thread = threading.Thread(
             target=sharkdata_core.GenerateIcesXml().generateIcesXml,
             args=(logrow_id, datatype_list, year_from, year_to, status, user),
         )
         self._generate_ices_xml_thread.start()
     except Exception as e:
         error_message = "Can't generate ICES-XML file." + "\nException: " + unicode(e) + "\n"
         admin_models.changeLogRowStatus(logrow_id, status="FAILED")
         admin_models.addResultLog(logrow_id, result_log=error_message)
     #
     return None  # No error message.
    def updateSpeciesObs(self, log_row_id):
        """ """
        #
        print('Species observations update. Started.')
        
        try:
            # Load resource file containing WoRMS info for taxa.
            worms_info_object = sharkdata_core.SpeciesWormsInfo()
            worms_info_object.loadSpeciesFromResource()
            
            # Mark all rows in db table before update starts.
            speciesobs_models.SpeciesObs.objects.all().update(last_update_date = '0000-00-00')
    
            # Loop over all datasets.
            valid_datatypes = [
                               'Epibenthos', 
                               'GreySeal', 
                               'HarbourSeal', 
                               'Phytoplankton', 
                               'RingedSeal',
                               'Zoobenthos', 
                               'Zooplankton', 
                               ###'Speciesobs', 
                               ]
            #
            for dataset_queryset in datasets_models.Datasets.objects.all():
                
                if dataset_queryset.datatype in valid_datatypes: 
                    print('Loading data from: ' + dataset_queryset.ftp_file_path)
                else:
                    print('Skipped (wrong datatype): ' + dataset_queryset.ftp_file_path)
                    continue
                #

                admin_models.addResultLog(log_row_id, result_log = 'Extracting species obs from: ' + dataset_queryset.dataset_file_name + '...')

                #
                zipreader = sharkdata_core.SharkArchiveFileReader(dataset_queryset.ftp_file_path)
                try:
                    zipreader.open()
                    data = zipreader.getDataAsText()
                finally:
                    zipreader.close()                        
                #
                encoding = 'cp1252'
                rowseparator = '\n'
                fieldseparator = '\t'
                #
                data = unicode(data, encoding, 'strict')
                datarows = (item.strip() for item in data.split(rowseparator)) # Generator instead of list.
                #
                for rowindex, datarow in enumerate(datarows):
                    #
                    try:
                        if len(datarow) == 0:
                            continue
                        #  
                        row = [item.strip() for item in datarow.split(fieldseparator)]
                        if rowindex == 0:
                            header = row
                        else:
                            header = self.cleanUpHeader(header)
                            rowdict = dict(zip(header, row))
                            
                            # Check if position is valid. Skip row if not.
        #                     lat_dd = rowdict.get('sample_latitude_dd', '').replace(',', '.')
        #                     long_dd = rowdict.get('sample_longitude_dd', '').replace(',', '.')
                            lat_dd = rowdict.get('latitude_dd', '').replace(',', '.')
                            long_dd = rowdict.get('longitude_dd', '').replace(',', '.')
        #                     if self.isFloat(lat_dd) and self.isFloat(long_dd):
                            if True:
                                if (float(lat_dd) > 70.0) or (float(lat_dd) < 50.0) or (float(long_dd) > 25.0) or (float(long_dd) < 5.0):
                                    # Don't add to SpeciesObs if lat_dd/long_dd is outside the box.
                                    print('Row skipped, position outside box. Latitude: ' + lat_dd + ' Longitude: ' + long_dd + ' Row: ' + unicode(rowindex))
                                    continue
                            else:
                                # Don't add to SpeciesObs if lat_dd/long_dd is invalid.
                                continue
                            #
                            tmp_date = rowdict.get('sampling_date', '')
                            tmp_year = ''
                            tmp_month = ''
                            tmp_day = ''
                            if len(tmp_date) >= 10:
                                tmp_year = tmp_date[0:4]
                                tmp_month = tmp_date[5:7]
                                tmp_day = tmp_date[8:10] 
                                
                            scientificname = rowdict.get('scientific_name', '-') if rowdict.get('scientific_name') else '-'
                            scientificauthority = rowdict.get('scientific_authority', '-') if rowdict.get('scientific_authority') else '-'
                            taxon_worms_info = worms_info_object.getTaxonInfoDict(scientificname)
                            if taxon_worms_info:
                                taxonkingdom = taxon_worms_info.get('kingdom', '-') if taxon_worms_info.get('kingdom') else '-'
                                taxonphylum = taxon_worms_info.get('phylum', '-') if taxon_worms_info.get('phylum') else '-'
                                taxonclass = taxon_worms_info.get('class', '-') if taxon_worms_info.get('class') else '-'
                                taxonorder = taxon_worms_info.get('order', '-') if taxon_worms_info.get('order') else '-'
                                taxonfamily = taxon_worms_info.get('family', '-') if taxon_worms_info.get('family') else '-'
                                taxongenus = taxon_worms_info.get('genus', '-') if taxon_worms_info.get('genus') else '-'
                            else:
                                taxonkingdom = '-'
                                taxonphylum = '-'
                                taxonclass = '-'
                                taxonorder = '-'
                                taxonfamily = '-'
                                taxongenus = '-'

                              
                            speciesobs = speciesobs_models.SpeciesObs(
                                data_type = rowdict.get('data_type', ''),
                                scientific_name = scientificname, 
                                scientific_authority = scientificauthority, 
        #                         latitude_dd = rowdict.get('sample_latitude_dd', '').replace(',', '.'),
        #                         longitude_dd = rowdict.get('sample_longitude_dd', '').replace(',', '.'),
                                latitude_dd = rowdict.get('latitude_dd', '').replace(',', '.'),
                                longitude_dd = rowdict.get('longitude_dd', '').replace(',', '.'),
                                sampling_date = rowdict.get('sampling_date', ''),
                                sampling_year = tmp_year,
                                sampling_month = tmp_month,
                                sampling_day = tmp_day,
                                sample_min_depth = rowdict.get('sample_min_depth', ''),
                                sample_max_depth = rowdict.get('sample_max_depth', ''),
                                sampler_type = rowdict.get('sampler_type', ''),
                                dyntaxa_id = rowdict.get('dyntaxa_id', '') if rowdict.get('dyntaxa_id') else '-',

#                                 taxon_kingdom = rowdict.get('taxon_kingdom', '-') if rowdict.get('taxon_kingdom') else '-',
#                                 taxon_phylum = rowdict.get('taxon_phylum', '-') if rowdict.get('taxon_phylum') else '-',
#                                 taxon_class = rowdict.get('taxon_class', '-') if rowdict.get('taxon_class') else '-',
#                                 taxon_order = rowdict.get('taxon_order', '-') if rowdict.get('taxon_order') else '-',
#                                 taxon_family = rowdict.get('taxon_family', '-') if rowdict.get('taxon_family') else '-',
#                                 taxon_genus = rowdict.get('taxon_genus', '-') if rowdict.get('taxon_genus') else '-',
#                                 taxon_species = rowdict.get('taxon_species', '-') if rowdict.get('taxon_species') else '-',

                                taxon_kingdom = taxonkingdom,
                                taxon_phylum = taxonphylum,
                                taxon_class = taxonclass,
                                taxon_order = taxonorder,
                                taxon_family = taxonfamily,
                                taxon_genus = taxongenus,
#                                 taxon_species = rowdict.get('species', '-') if rowdict.get('species') else '-',

                                orderer = rowdict.get('orderer', '') if rowdict.get('orderer') else '-',
                                reporting_institute = rowdict.get('reporting_institute', '') if rowdict.get('reporting_institute') else '-',
                                sampling_laboratory = rowdict.get('sampling_laboratory', '') if rowdict.get('sampling_laboratory') else '-',
                                analytical_laboratory = rowdict.get('analytical_laboratory', '') if rowdict.get('analytical_laboratory') else '-',
                                #
                                occurrence_id = '', # Added below.
                                #
                                dataset_name = unicode(dataset_queryset.dataset_name),
                                dataset_file_name = unicode(dataset_queryset.dataset_file_name),
                 
#                                 ##### Example: 'POINT(-73.9869510 40.7560540)', Note: Order longitude - latitude.
#                                 geometry = 'POINT(' + rowdict.get('longitude_dd', '0.0').replace(',', '.') + ' ' + rowdict.get('latitude_dd', '0.0').replace(',', '.') + ')',
                 
                                )
                            
                            # Calculate DarwinCore Observation Id.
                            generated_occurrence_id = speciesobs.calculateDarwinCoreObservationIdAsMD5()
                            #
        #                     if speciesobs_models.SpeciesObs(
        #                         speciesobs.save()
                            #speciesobs_models.SpeciesObs.objects.filter(status='DELETED').delete()
                            try: 
                                obs_existing_row = speciesobs_models.SpeciesObs.objects.get(occurrence_id = generated_occurrence_id)
                            except:
                                obs_existing_row = None # Does not exist.
                            #
                            current_date = unicode(time.strftime('%Y-%m-%d'))
                            #
                            if obs_existing_row:
                                if obs_existing_row.status == 'ACTIVE':
                                    obs_existing_row.last_update_date = current_date
                                else:
                                    obs_existing_row.status= 'ACTIVE'
                                    obs_existing_row.last_update_date = current_date
                                    obs_existing_row.last_status_change_date = current_date
                                #    
                                obs_existing_row.save()
                            else:
                                speciesobs.occurrence_id = generated_occurrence_id
                                speciesobs.status = 'ACTIVE'
                                speciesobs.last_update_date = current_date
                                speciesobs.last_status_change_date = current_date
                                #
                                speciesobs.save()
                            
                    except Exception as e:
                        admin_models.addResultLog(log_row_id, result_log = '- Error in row ' + unicode(rowindex) + ': ' + unicode(e))
            #
            print('Species observations update. Mark not updated rows as DELETED.')
            #
            current_date = unicode(time.strftime('%Y-%m-%d'))
    #         speciesobs_models.SpeciesObs.objects.filter(last_update_date = '0000-00-00').update(status = 'DELETED')
    #         speciesobs_models.SpeciesObs.objects.filter(last_update_date = '0000-00-00').update(last_update_date = current_date)
            #
            datarows = speciesobs_models.SpeciesObs.objects.filter(last_update_date = '0000-00-00')
            for datarow in datarows:
                if datarow.status == 'DELETED':
                    datarow.last_update_date = current_date
                else:
                    datarow.status = 'DELETED'
                    datarow.last_update_date = current_date
                    datarow.last_status_change_date = current_date
                #
                datarow.save()
                
            #
            admin_models.changeLogRowStatus(log_row_id, status = 'FINISHED')
            #
            print('Species observations update. Finished.')    
        #
        except Exception as e:
            admin_models.addResultLog(log_row_id, result_log = '- Failed. Error: ' + unicode(e))
            admin_models.changeLogRowStatus(log_row_id, status = 'FAILED')
 def generateOneIcesXml(self, logrow_id, error_counter, 
                        datatype, year, status, user):
     """ """        
     # Add all rows from all datasets that match datatype and year.
     icesxmlgenerator = IcesXmlGenerator(self._translate_taxa)
     #
     db_datasets = datasets_models.Datasets.objects.all()
     for db_dataset in db_datasets:
         if db_dataset.datatype.upper() != datatype.upper():
             continue
         #
         try:
             zip_file_name = db_dataset.dataset_file_name
             admin_models.addResultLog(logrow_id, result_log = 'Reading archive file: ' + zip_file_name + '...')
             if settings.DEBUG:
                 if settings.DEBUG: print('DEBUG: ICES-ZIP processing: ' + zip_file_name)            
             #
             dataset = sharkdata_core.Dataset()
             dataset.loadDataFromZipFile(zip_file_name,
                                         dataset_dir_path = self._ftp_dir_path,
                                         encoding = 'cp1252')
             #
             dataheader = dataset.data_header
             if settings.DEBUG: print(dataheader)
             #
             datarows = dataset.data_rows
             for datarow in datarows:
                 datarow_dict = dict(zip(dataheader, map(unicode, datarow)))
                 #
                 if datarow_dict.get('visit_year', '') == year:
                     # TODO: Should be codes.
                     national_monitoring_list = ['National marine monitoring',
                                                 'Nationella programmet Bottniska Viken från 2015',
                                                 'Nationella programmet Egentliga Östersjön, Utsjön 2007-']
                     #
                     if datarow_dict.get('sample_project_name_sv', '') in national_monitoring_list:
                     
                         icesxmlgenerator.add_row(datarow_dict)
         #
         except Exception as e:
             error_counter += 1 
             traceback.print_exc()
             admin_models.addResultLog(logrow_id, result_log = 'ERROR: Failed to generate ICES-XML from: ' + zip_file_name + '.')                
     #
     try:
         # Create and save the result.
         out_rows = icesxmlgenerator.create_xml()
         #
         export_name = 'ICES-XML' + '_SMHI_' + datatype + '_' + year
         export_file_name = export_name + '.xml'
         export_file_path = os.path.join(self._export_dir_path, export_file_name)
         error_log_file = export_name + '_log.txt'
         error_log_file_path = os.path.join(self._export_dir_path, error_log_file)
         #
         icesxmlgenerator.save_xml_file(out_rows, export_file_path)
         # Update database.
         # Delete row if exists.
         export_db_rows = export_models.ExportFiles.objects.filter(export_name = export_name)
         for db_row in export_db_rows: 
             db_row.delete()
         #
         approved = False
         if status == 'Checked by DC':
             approved = True # Will not be validated via DATSU.
             
         # Write row.
         dbrow = export_models.ExportFiles(
                         format = 'ICES-XML',
                         datatype = datatype,
                         year = year,
                         approved = approved,
                         status = status,
                         export_name = export_name,
                         export_file_name = export_file_name,
                         export_file_path = export_file_path,
                         error_log_file = error_log_file,
                         error_log_file_path = error_log_file_path,
                         generated_by = user,
                       )
         dbrow.save()
         
         # Log file.
         log_rows = []
         log_rows.append('')
         log_rows.append('')
         log_rows.append('Generate ICES-XML files. ' + unicode(datetime.datetime.now()))
         log_rows.append('')
         log_rows.append('- Format: ' + dbrow.format)
         log_rows.append('- Datatype: ' + unicode(dbrow.datatype))
         log_rows.append('- Year: ' + unicode(dbrow.year))
         log_rows.append('- Status: ' + unicode(dbrow.status))
         log_rows.append('- Approved: ' + unicode(dbrow.approved))
         log_rows.append('- Export name: ' + unicode(dbrow.export_name))
         log_rows.append('- Export file name: ' + unicode(dbrow.export_file_name))
         log_rows.append('')
         #
         icesxmlgenerator.save_log_file(log_rows, error_log_file_path)
     #
     except Exception as e:
         error_counter += 1 
         traceback.print_exc()
         admin_models.addResultLog(logrow_id, result_log = 'ERROR: Failed to generate ICES-XML files. Exception: ' + unicode(e))              
Example #16
0
    def generateArchivesForAllDatasets(self, logrow_id, user):
        """ """
        # Load resource file containing WoRMS info for taxa.
        worms_info_object = misc_utils.SpeciesWormsInfo()
        worms_info_object.loadSpeciesFromResource()
        #
        error_counter = 0
        datasets = models.Datasets.objects.all()
        for dataset in datasets:
            zip_file_name = dataset.dataset_file_name
            #
            admin_models.addResultLog(logrow_id, result_log = u'Generating archive file for: ' + zip_file_name + u'...')
            
            if settings.DEBUG:
                print(u'DEBUG: ===== Processing: ' + zip_file_name)            
            
            #
            archive = None
            dwca_config_dir = u''
            if zip_file_name.startswith(u'SHARK_Zooplankton'):
                archive = dwca_eurobis_zooplankton.DwcaEurObisZooplankton()
            elif zip_file_name.startswith(u'SHARK_Phytoplankton'):
                archive = dwca_eurobis_phytoplankton.DwcaEurObisPhytoplankton()
            elif zip_file_name.startswith(u'SHARK_Zoobenthos'):
                archive = dwca_eurobis_zoobenthos.DwcaEurObisZoobenthos()
            elif zip_file_name.startswith(u'SHARK_Bacterioplankton'): 
                archive = dwca_eurobis_bacterioplankton.DwcaEurObisBacterioplankton()
            #
            if not archive:
                continue # Skip if other datatypes.

            # === Test for GBIF-Occurrence, GBIF-EurOBIS (EMODnet-Bio) and GBIF for Sample Data. ===
            try:
                dataset = misc_utils.Dataset()
                dataset.loadDataFromZipFile(zip_file_name,
                                            dataset_dir_path = self._ftp_dir_path,
                                            encoding = u'cp1252')

#                 # === Test for GBIF-Occurrence. ===
#                 try:
#                     admin_models.addResultLog(logrow_id, result_log = u'   - Darwin Core Archive.')
#                     archive = biological_data_exchange_util.DarwinCoreArchiveFormat(
#                                                                             datatype, 
#                                                                             u'settings_dwca.json',
#                                                                             dwca_config_dir,
#                                                                             meta_file_name = u'meta.xml',
#                                                                             eml_file_name = u'eml.xml',
#                                                                             worms_info_object = worms_info_object)
#                     archive.createArchiveParts(dataset)
#                     # Save generated archive file.
#                     generated_archives_path = os.path.join(settings.APP_DATASETS_FTP_PATH, u'generated_archives')
#                     achive_file_name = zip_file_name.replace(u'.zip', u'_DwC-A.zip')
#                     if not os.path.exists(generated_archives_path):
#                         os.makedirs(generated_archives_path)
#                     archive.saveToArchiveFile(achive_file_name, zip_dir_path = generated_archives_path, 
#                                               settings_dir_path = dwca_config_dir)
#                     # Update database.
#                     db_dataset = models.Datasets.objects.get(dataset_file_name = zip_file_name)
#                     db_dataset.dwc_archive_available = True
#                     db_dataset.dwc_archive_file_path = os.path.join(generated_archives_path, achive_file_name)
#                     db_dataset.save()
#                 except Exception as e:
#                     error_counter += 1 
#                     admin_models.addResultLog(logrow_id, result_log = u'ERROR: Failed to generate DwC-A from: ' + zip_file_name + u'.')                
    
                # === Test for GBIF-EurOBIS (EMODnet-Bio). ===
                try:
                    admin_models.addResultLog(logrow_id, result_log = u'   - Darwin Core Archive (EurOBIS format).')
#                     archive = biological_data_exchange_util.DarwinCoreArchiveFormatForEurObis(
#                                                                             datatype, 
#                                                                             u'settings_dwca_eurobis.json',
#                                                                             dwca_config_dir,
#                                                                             meta_file_name = u'meta_eurobis.xml',
#                                                                             eml_file_name = u'eml_eurobis.xml',
#                                                                             worms_info_object = worms_info_object)
                    # Update database before.
                    db_dataset = models.Datasets.objects.get(dataset_file_name = zip_file_name)
                    db_dataset.dwc_archive_eurobis_available = False
                    db_dataset.dwc_archive_eurobis_file_path = u''
                    db_dataset.save()
                    
                    if worms_info_object:
                        archive.setWormsInfoObject(worms_info_object)
                    #    
                    archive.createArchiveParts(dataset)
                    # Save generated archive file.
                    generated_archives_path = os.path.join(settings.APP_DATASETS_FTP_PATH, u'generated_archives')
                    achive_file_name = zip_file_name.replace(u'.zip', u'_DwC-A-EurOBIS.zip')
                    if not os.path.exists(generated_archives_path):
                        os.makedirs(generated_archives_path)
                    archive.saveToArchiveFile(achive_file_name, zip_dir_path = generated_archives_path, 
                                              settings_dir_path = dwca_config_dir)
                    # Update database after.
                    db_dataset = models.Datasets.objects.get(dataset_file_name = zip_file_name)
                    db_dataset.dwc_archive_eurobis_available = True
                    db_dataset.dwc_archive_eurobis_file_path = os.path.join(generated_archives_path, achive_file_name)
                    db_dataset.save()
                except Exception as e:
                    error_counter += 1 
                    print(e)
                    admin_models.addResultLog(logrow_id, result_log = u'ERROR: Failed to generate DwC-A (eurOBIS format) from: ' + zip_file_name + u'.')                
    
#                 # === Test for GBIF for Sample Data. ===
#                 try:
#                     admin_models.addResultLog(logrow_id, result_log = u'   - Darwin Core Archive (Sample data format).')
#                     archive = biological_data_exchange_util.DarwinCoreArchiveFormatForSampleData(
#                                                                             datatype, 
#                                                                             u'settings_dwca_sampledata.json',
#                                                                             dwca_config_dir,
#                                                                             meta_file_name = u'meta_sampledata.xml',
#                                                                             eml_file_name = u'eml_sampledata.xml',
#                                                                             worms_info_object = worms_info_object)
#                     archive.createArchiveParts(dataset)
#                     # Save generated archive file.
#                     generated_archives_path = os.path.join(settings.APP_DATASETS_FTP_PATH, u'generated_archives')
#                     achive_file_name = zip_file_name.replace(u'.zip', u'_DwC-A-SampleData.zip')
#                     if not os.path.exists(generated_archives_path):
#                         os.makedirs(generated_archives_path)
#                     archive.saveToArchiveFile(achive_file_name, zip_dir_path = generated_archives_path, 
#                                               settings_dir_path = dwca_config_dir)
#                     # Update database.
#                     db_dataset = models.Datasets.objects.get(dataset_file_name = zip_file_name)
#                     db_dataset.dwc_archive_sampledata_available = True
#                     db_dataset.dwc_archive_sampledata_file_path = os.path.join(generated_archives_path, achive_file_name)
#                     db_dataset.save()
#                 except Exception as e:
#                     error_counter += 1 
#                     admin_models.addResultLog(logrow_id, result_log = u'ERROR: Failed to generate DwC-A (Sample data format) from: ' + zip_file_name + u'.')                
    
                
            except Exception as e:
                error_counter += 1 
                traceback.print_exc()
                admin_models.addResultLog(logrow_id, result_log = u'ERROR: Failed to generate archive files from: ' + zip_file_name + u'.')                
    
        #
        if error_counter > 0:
            admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED (Errors: ' + unicode(error_counter) + u')')
        else:
            admin_models.changeLogRowStatus(logrow_id, status = u'FINISHED')

        if settings.DEBUG:
            print(u'DEBUG: Archive generation FINISHED')            
    def validateOneIcesXml(self, logrow_id, error_counter, db_export, user):
        """ 
        Status = Checked by DC     (No DATSU check)      --> Approved = True
        Status = Not checked   --> Status = DATSU-ok     --> Approved = True
        Status = Not checked   --> Status = DATSU-failed --> Approved = False
        Status = Test          --> Status = Test-DATSU-ok     --> Approved = False
        Status = Test          --> Status = Test-DATSU-failed --> Approved = False
        """
        
        try:
    #         if (db_export.status == 'Checked by DC'):
    #             # Don't perform DATSU check.
    #             db_export.approved = True
    #             db_export.save()
    #             self.append_to_log_file(db_export, datsu_response = None)
    #         el
            if (db_export.status == 'Not checked'):
                # http://datsu.ices.dk/DatsuRest/api/ScreenFile/test.sharkdata,se!exportformats!ICES-XML_SMHI_zoobenthos_2005,xml/carlos!ices,dk/zb
                # http://datsu.ices.dk/DatsuRest/api/ScreenFile/test.sharkdata,se!exportformats!ICES-XML_SMHI_zoobenthos_2014,xml/arnold,andreasson!smhi,se/zb
                url_part_1 = 'http://datsu.ices.dk/DatsuRest/api/ScreenFile/test,sharkdata,se!exportformats!'
                url_part_2 = db_export.export_file_name.replace('.', ',')
                url_part_3 = '/arnold,andreasson!smhi,se' # TODO: [email protected]
                url_part_4 = '/zb' 
                #
                if settings.DEBUG: print(url_part_1 + url_part_2 + url_part_3 + url_part_4)
                #
                datsu_response_json = urllib2.urlopen(url_part_1 + url_part_2 + url_part_3 + url_part_4)
                datsu_response = json.load(datsu_response_json)
#                 # For test:
#                 datsu_response = dict({u'SessionID': u'484', u'NoErrors': -1, u'ScreenResultURL': u'datsu.ices.dk/test/ScreenResult.aspx?groupError=0&sessionid=484'})            
                if settings.DEBUG: print('DEBUG: \n' + json.dumps(datsu_response, sort_keys = True, indent = 2))
                #
                if datsu_response['NoErrors'] == -1:
                    db_export.status = 'DATSU-ok'
                    db_export.approved = True
                else:    
                    db_export.status = 'DATSU-failed'
                    db_export.approved = False
                #
                db_export.save()
                self.append_to_log_file(db_export, datsu_response = datsu_response)
            elif (db_export.status == 'Test'):
                # http://datsu.ices.dk/DatsuRest/api/ScreenFile/test.sharkdata,se!exportformats!ICES-XML_SMHI_zoobenthos_2005,xml/carlos!ices,dk/zb
                # http://datsu.ices.dk/DatsuRest/api/ScreenFile/test.sharkdata,se!exportformats!ICES-XML_SMHI_zoobenthos_2014,xml/arnold,andreasson!smhi,se/zb
                url_part_1 = 'http://datsu.ices.dk/DatsuRest/api/ScreenFile/test,sharkdata,se!exportformats!'
                url_part_2 = db_export.export_file_name.replace('.', ',')
                url_part_3 = '/arnold,andreasson!smhi,se' # TODO: [email protected]
                url_part_4 = '/zb' 
                #
                if settings.DEBUG: print(url_part_1 + url_part_2 + url_part_3 + url_part_4)
                #
                datsu_response_json = urllib2.urlopen(url_part_1 + url_part_2 + url_part_3 + url_part_4)
                datsu_response = json.load(datsu_response_json)
#                 # For test:
#                 datsu_response = dict({u'SessionID': u'484', u'NoErrors': -1, u'ScreenResultURL': u'datsu.ices.dk/test/ScreenResult.aspx?groupError=0&sessionid=484'})            
                if settings.DEBUG: print('DEBUG: \n' + json.dumps(datsu_response, sort_keys = True, indent = 2))
                #
                if datsu_response['NoErrors'] == -1:
                    db_export.status = 'Test-DATSU-ok'
                    db_export.approved = False
                else:    
                    db_export.status = 'Test-DATSU-failed'
                    db_export.approved = False
                    # Logging.
                    admin_models.addResultLog(logrow_id, result_log = 'ERROR: Failed to validating ICES-XML file. DATSU errors URL: ' + datsu_response['ScreenResultURL'])        
                #
                db_export.save()
                self.append_to_log_file(db_export, datsu_response = datsu_response)            
            else:
                if settings.DEBUG: print('DEBUG: ' + db_export.export_file_name + '   ' + db_export.status + '   ' + unicode(db_export.approved))    

        except Exception as e:
            error_counter += 1 
            traceback.print_exc()
            admin_models.addResultLog(logrow_id, result_log = 'ERROR: Failed to validating ICES-XML files. Exception: ' + unicode(e))   
            
        return error_counter