def handle_dataset_form(dataset_form_): keywords = dataset_form_['keywords'].value #prefix = dataset_form_['prefix'].value if 'submit!' == dataset_form_[create_button_txt].value: print 'creating!' keywords = keywords.split() prefix = dataset_form_['prefix'].value print 'before starting thread:', threading.enumerate() t = Thread(target=getThreadForCreateOutput(keywords, prefix, create_outputs), name=prefix) t.start() print 'thread started', threading.enumerate() elif 'submit!' == dataset_form_[shrink_button_txt].value: print 'shrinking!!' dataset = dataset_form_['degree_dataset'].value new_prefix = dataset_form_['new_prefix'].value #new_prefix = dataset + '_' + dataset_form_['threshold'].value threshold = int(dataset_form_['threshold'].value) create_outputs1(dataset, 'data', new_prefix, threshold) elif 'submit!' == dataset_form_[merge_button_txt].value: print 'merging!!' lst = check_checkboxes(dataset_form_) merge_prefix = dataset_form_['merge_prefix'].value from abstractparser import merge_files merge_files('data', lst, merge_prefix) '''
def handle_job_input_form(job_input_form_): progs = [] if job_input_form_['scluster'].checked: progs.append('scluster') if job_input_form_['vcluster'].checked: progs.append('vcluster') if job_input_form_['lda-tfidf'].checked: progs.append('lda_tfidf') if job_input_form_['lda-word-count'].checked: progs.append('lda_word_count') prefix = job_input_form_['dataset'].value th = job_input_form_['request_threshold'].value #new_prefix = '_'.join(prefix.split('_')[:-1]) + '_' + th new_prefix = prefix if th != '0': new_prefix = prefix + '_' + th try: mkdir(join('data', new_prefix)) except: print "%s is probably created" % (join('data', new_prefix)) if int(th) != 0: create_outputs1(prefix, 'data', new_prefix, int(th)) prefix = new_prefix for prog in progs: web.debug('running %s with k = %s' % ( prog, job_input_form_['k'].value)) k = job_input_form_['k'].value html_path = join('server', 'static', 'data', prog, prefix, k) try: mkdir(join('server', 'static', 'data', prog, prefix)) except: print '%s is already created' % (join('server', 'static', 'data', prog, prefix)) try: mkdir(html_path) except: print '%s is already created' % (html_path) # make minimum dynamic! t = Thread(target=run_prog(eval('runcluster.' + prog), k, prefix, html_path, paper), name='%s %s' % (prog, prefix)) t.start() web.debug('started thread for running %s %s' % (prog, prefix)) web.debug('Active threads: %s' % (threading.enumerate()))
def handle_high_degree_form(high_degree_form_): prefix = high_degree_form_['high-degree-dataset'].value l = high_degree_form_['high-degree-l'].value k = high_degree_form_['high-degree-k'].value #new_prefix = '_'.join(prefix.split('_')[:-1]) + '_' + l new_prefix = prefix + '_' + l from runcluster import scluster from abstractparser import create_outputs1 import paper create_outputs1(prefix, 'data', new_prefix, int(l)) html_path = make_html_path('scluster', prefix, k) scluster(int(k), prefix, '.', html_path, paper, 0) html_path = make_html_path('scluster', new_prefix, k) scluster(int(k), new_prefix, '.', html_path, paper, 0) raise web.seeother('/degree_compare?prefix1=%s&prefix2=%s&k=%s&l=%s' % (prefix, new_prefix, k, l))