Exemple #1
0
def run_idss(self,msg):
    (filename,filepath,jobname,email)=msg.split("|")

    with app.app_context():
        log.debug("now going to do long IDSS job via celery")

        message='processing...'
        self.update_state(state='PROGRESS', meta={'status': message})
        #set status to busy

        set_status_to_busy(jobname)

        seriation = IDSS()
        arguments={'inputfile': filename, 'outputdirectory': filepath, 'debug':0 }
        seriation.initialize(arguments)
        log.debug("seriation initialized with args. Now running IDSS for %s and job %s", (filename, jobname))
        (frequencyResults, continuityResults, exceptionList, statsMap) = seriation.seriate()

        message='done. zipping results....'
        self.update_state(state='PROGRESS',
                meta={'status': message})
        log.debug('Finished with IDSS processing for job %s', jobname)
        zipFileURL = zipresults(jobname)
        send_email(zipFileURL, email, jobname)
        message='done. emailing results....'
        self.update_state(state='PROGRESS', meta={'status': message})
        set_status_to_free(jobname)
        message='complete.'
        self.update_state(state='PROGRESS', meta={'status': message})
Exemple #2
0
def run_idss(self, msg):
    (filename, filepath, jobname, email) = msg.split("|")

    with app.app_context():
        log.debug("now going to do long IDSS job via celery")

        message = 'processing...'
        self.update_state(state='PROGRESS', meta={'status': message})
        #set status to busy

        set_status_to_busy(jobname)

        seriation = IDSS()
        arguments = {
            'inputfile': filename,
            'outputdirectory': filepath,
            'debug': 0
        }
        seriation.initialize(arguments)
        log.debug(
            "seriation initialized with args. Now running IDSS for %s and job %s",
            (filename, jobname))
        (frequencyResults, continuityResults, exceptionList,
         statsMap) = seriation.seriate()

        message = 'done. zipping results....'
        self.update_state(state='PROGRESS', meta={'status': message})
        log.debug('Finished with IDSS processing for job %s', jobname)
        zipFileURL = zipresults(jobname)
        send_email(zipFileURL, email, jobname)
        message = 'done. emailing results....'
        self.update_state(state='PROGRESS', meta={'status': message})
        set_status_to_free(jobname)
        message = 'complete.'
        self.update_state(state='PROGRESS', meta={'status': message})
    parser.add_argument('--excel', default=0,
                        help="Will create excel files with the assemblages in seriation order.")
    parser.add_argument('--noheader',default=None,
                        help="If you do not use type names as the first line of the input file, use this option to read the data.")
    parser.add_argument('--frequencyseriation', default=None, help="Generates graphical output for the results in a frequency seriation form.")
    parser.add_argument('--verbose',default=True, help='Provides output for your information')
    parser.add_argument('--occurrence', default=0, type=int, help="Treats data as just occurrence information and produces valid occurrence solutions.")
    parser.add_argument('--occurrenceseriation', default=None, help="Generates graphical output for occurrence seriation.")
    parser.add_argument('--spatialsignificance', default=None, help="Calculate the significance of the spatial aspect of the final solution. Default is None.")
    parser.add_argument('--spatialbootstrapN',default=100, help='Set the number of resamples used for calculating the spatial significance. Default is 100.')
    parser.add_argument('--minmaxbycount',default=None, help='Create a minmax solution from the aggregate set by weighting on the basis of # of times edges appear in solutions. Default is None.')
    parser.add_argument('--delimiter', help="character delimiting the fields in inputfile", default='\t')
    parser.add_argument('--preservepickle', help="Do not delete pickle directory, to allow for debugging", type=int, default=0)
    parser.add_argument("--source_identifier", help="Identifier for the source of the input data, often used in large batches of simulation/seriation pairs to tie steps of an experiment together", type=str, default="none")


    return parser.parse_args()



if __name__ == "__main__":



    seriation = IDSS()
    args = parse_arguments()
    seriation.initialize(args,sys.argv)
    (frequencyResults, continuityResults, exceptionList, statsMap, fileMap) = seriation.seriate()


    parser.add_argument('--xyfile', default=None,
                        help="Enter the name of the XY file that contains the name of the assemblage and the X and Y coordinates for each.")

    parser.add_argument('--inputfile',
                        help="<REQUIRED> Enter the name of the data file with the assemblage data to process.", required=True)
    parser.add_argument('--outputdirectory', default='./',
                        help="If you want the output to go someplace other than the /output directory, specify that here.")
    parser.add_argument('--delimiter', default='\t', help="delimiter for input file, defaults to tab character")
    parser.add_argument('--noheader', default=0, help="Flag, value 1 if there is no header row with type names, 0 otherwise")


    return parser.parse_args()

if __name__ == "__main__":

    ser = IDSS()
    args = parse_arguments()
    ser.initialize(args,sys.argv)
    filename = args.inputfile
    ser.openFile(filename)

    if args.xyfile is not None:
        ser.openXYFile(args.xyfile)

    base_file = os.path.basename(filename)
    base_file = os.path.splitext(base_file)[0]

    afile = args.outputdirectory + "/" + base_file + "-assemblages.pkl"
    pickle.dump(ser.assemblages,open(afile,'wb'))

    affile = args.outputdirectory + '/' + base_file + "-assemblage_freq.pkl"
                        help="The root of the graph figures (i.e., name of assemblage you want to treat as one end in the graphs.")
    parser.add_argument('--continuityroot', default=None,
                        help="If you have a outgroup or root of the graph, set that here.")
    parser.add_argument('--atlas', default=1,
                        help="If you want to have a figure that shows all of the results independently, set that here.")
    parser.add_argument('--excel', default=1,
                        help="Will create excel files with the assemblages in seriation order.")
    parser.add_argument('--noheader',default=None,
                        help="If you do not use type names as the first line of the input file, use this option to read the data.")
    parser.add_argument('--frequencyseriation', default=None, help="Generates graphical output for the results in a frequency seriation form.")
    parser.add_argument('--verbose',default=True, help='Provides output for your information')
    parser.add_argument('--occurrence', default=None, help="Treats data as just occurrence information and produces valid occurrence solutions.")
    parser.add_argument('--occurrenceseriation', default=None, help="Generates graphical output for occurrence seriation.")
    parser.add_argument('--spatialsignificance', default=None, help="Calculate the significance of the spatial aspect of the final solution. Default is None.")
    parser.add_argument('--spatialbootstrapN',default=100, help='Set the number of resamples used for calculating the spatial significance. Default is 100.')
    parser.add_argument('--minmaxbycount',default=None, help='Create a minmax solution from the aggregate set by weighting on the basis of # of times edges appear in solutions. Default is None.')


    return parser.parse_args()



if __name__ == "__main__":

    seriation = IDSS()
    args = parse_arguments()
    seriation.initialize(args)
    (frequencyResults, continuityResults, exceptionList, statsMap) = seriation.seriate()