def indexer_connection(index_path=None): if not index_path: index_path = configure() indexer = xappy.IndexerConnection(index_path) # indexes indexer.add_field_action('searchable_text', xappy.FieldActions.INDEX_FREETEXT, nopos=True) indexer.add_field_action('author', xappy.FieldActions.INDEX_EXACT) #indexer.add_field_action('keywords', xappy.FieldActions.FACET) indexer.add_field_action('type', xappy.FieldActions.INDEX_EXACT) indexer.add_field_action('alpha', xappy.FieldActions.INDEX_EXACT) indexer.add_field_action('language', xappy.FieldActions.INDEX_EXACT) indexer.add_field_action('genre', xappy.FieldActions.INDEX_EXACT) indexer.add_field_action('sortable_title', xappy.FieldActions.SORTABLE) indexer.add_field_action('hidden', xappy.FieldActions.INDEX_EXACT) #indexer.add_field_action('modified', xappy.FieldActions.SORTABLE, type='data') # metadata indexer.add_field_action('title', xappy.FieldActions.STORE_CONTENT) indexer.add_field_action('alpha', xappy.FieldActions.STORE_CONTENT) indexer.add_field_action('language', xappy.FieldActions.STORE_CONTENT) indexer.add_field_action('genre', xappy.FieldActions.STORE_CONTENT) indexer.add_field_action('type', xappy.FieldActions.STORE_CONTENT) indexer.add_field_action('searchable_text', xappy.FieldActions.STORE_CONTENT) #indexer.add_field_action('description', xappy.FieldActions.STORE_CONTENT) #indexer.add_field_action('author', xappy.FieldActions.STORE_CONTENT) return indexer
def indexer_connection(index_path=None): if not index_path: index_path = configure() indexer = xappy.IndexerConnection(index_path) # indexes indexer.add_field_action("searchable_text", xappy.FieldActions.INDEX_FREETEXT, nopos=True) indexer.add_field_action("author", xappy.FieldActions.INDEX_EXACT) # indexer.add_field_action('keywords', xappy.FieldActions.FACET) indexer.add_field_action("type", xappy.FieldActions.INDEX_EXACT) indexer.add_field_action("alpha", xappy.FieldActions.INDEX_EXACT) indexer.add_field_action("language", xappy.FieldActions.INDEX_EXACT) indexer.add_field_action("genre", xappy.FieldActions.INDEX_EXACT) indexer.add_field_action("sortable_title", xappy.FieldActions.SORTABLE) indexer.add_field_action("hidden", xappy.FieldActions.INDEX_EXACT) # indexer.add_field_action('modified', xappy.FieldActions.SORTABLE, type='data') # metadata indexer.add_field_action("title", xappy.FieldActions.STORE_CONTENT) indexer.add_field_action("alpha", xappy.FieldActions.STORE_CONTENT) indexer.add_field_action("language", xappy.FieldActions.STORE_CONTENT) indexer.add_field_action("genre", xappy.FieldActions.STORE_CONTENT) indexer.add_field_action("type", xappy.FieldActions.STORE_CONTENT) indexer.add_field_action("searchable_text", xappy.FieldActions.STORE_CONTENT) # indexer.add_field_action('description', xappy.FieldActions.STORE_CONTENT) # indexer.add_field_action('author', xappy.FieldActions.STORE_CONTENT) return indexer
def main(path): index_path = configure() ids = [n for n in os.listdir(path) if not n.startswith('.')] ids.sort() #for id in ids: msg = index_books(index_path, ids) log.info(msg)
def main(): """Main function.""" # Configure parameters for clothing synthesis confs = cf.configure() # Create outfit objects outfits = [ ot.Outfit(path[0], path[1], path[3]) for path in confs['input_path'] ] # Delete outfits that without top imag outfits = [outfit for outfit in outfits if hasattr(outfit, 'top')] # Find the same tops and bottoms between different outfits fs.find_same(outfits) print('Finding same completed.') for i, outfit in enumerate(outfits): # Find same tops and bottoms same = outfit.same with open(confs['same_path'], 'a') as f: f.write(str(same)) # Segment top if same[0] != same[1]: outfit.top_mask = outfits[same[1]].top_mask cv2.imwrite(confs['mask_path'][i][0], outfit.top_mask) else: outfit.segment('top', confs['seed_top'], confs['thre_top'], confs['rect_top'], confs['mask_path'][i][0]) # Segment bottom if same[0] != same[2]: outfit.bottom_mask = outfits[same[2]].bottom_mask cv2.imwrite(confs['mask_path'][i][1], outfit.bottom_mask) else: outfit.segment('bottom', confs['seed_bottom'], confs['thre_bottom'], confs['rect_bottom'], confs['mask_path'][i][1]) # Segment input4 outfit.segment('input4', confs['seed_input4'], confs['thre_input4'], confs['rect_top'], confs['mask_path'][i][3]) print('{0:d} segmentation completed.'.format(i + 1)) # Compute realtively position difference if same[0] != same[1] and same[1] == same[2]: outfit.diff = outfits[same[1]].diff else: outfit.cpt_diff(confs['rect_top'], confs['rect_bottom'], confs['thre_feet']) with open(confs['diff_path'], 'a') as f: f.write(str(outfit.diff)) print('{0:d} diff computation completed.'.format(i + 1)) # Judge whether top or bottom is above outfit.judge_above(confs['waist_line'], confs['above_path']) print('{0:d} match judgement completed.'.format(i + 1)) # Generate output outfit.gen_res(confs['output_path'][i]) print('{0:d} output completed.'.format(i + 1))
def index(bookid, **kw): index_path = configure(**kw) return index_all(bookid, index_path)
st=datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') outline=st+','+outline self.parameters['outstream'].write(outline) return def check(self): for myThes in self.collocdict.values(): if len(myThes.neighdict.keys())==0: #print myThes.phrase pass def go(self): if self.parameters['testing']: print self.parameters self.loadphrases() if self.parameters['baseline']: self.addbaseline() self.loadneighbours() if self.parameters['testing']: self.check() self.compareneighbours() return (self.results,self.neighboursets) if __name__=='__main__': parameters=configure(sys.argv) print parameters random.seed(parameters['seed']) myexperiments=Experiments(parameters) myexperiments.run()
signal.signal(signal.SIGINT, interrupt ) def getid(path): base = os.path.basename(path) name, ext = os.path.splitext(base) if ext and ext == '.txt': return name return None if __name__=='__main__': arg = '' argc = len(sys.argv) if (argc > 1): index_path = configure() path = sys.argv[1] ids = [n for n in os.listdir(path) if not n.startswith('.')] indexed = [a.id for a in indexed(index_path)] ids = list(set(ids) - set(indexed)) ids.sort() for id in ids: #print 'Sending: put', id #send('put', cmd='index', arg=id) msg = index_all(id, index_path) log.info(msg)
def __init__(self, index_path=None): if not index_path: index_path=configure() self.index_path = index_path self.connection = xappy.SearchConnection(index_path) print "index at", index_path
import sys, conf, AppAdj if __name__ == "__main__": parameters = conf.configure(sys.argv) parameters["adjust_flag"] = False simsfile = "neighbours.strings" mymatrix = AppAdj.SimMatrix(parameters["directory"], parameters["k"], parameters["adjust_flag"], simsfile, parameters["testing"]) mymatrix.analyse() mymatrix.correlate()
# a = "你好" # b = a.encode(encoding='utf-8') # c = b.decode() # print(c) # # def func(): # global x # x = 2 # print 'x is', x # # print 'Changed local x to', x # # def test(): # print(x) # # test() # print 'Value of x is', x import os from app01 import models import conf os.environ[ 'DJANGO_SETTINGS_MODULE'] = 'svnmanager.settings' #s12day16 是你的setting所在的项目名 import django django.setup() confdic = conf.configure() cc = confdic['readyhost'] print(cc) models.hosts.objects.filter(host_w_ip=cc)
gen_of_entry=0 width=0 totalfreq=0 while len(fields[1:])>0: freq=float(fields.pop()) feat=fields.pop() width+=1 totalfreq+=freq gen_of_feat=self.featureindex.get(feat,1) gen_of_entry+=gen_of_feat #now for reverse feature index reversewidth[feat]=reversewidth.get(feat,0)+1 reversefreq[feat]=reversefreq.get(feat,0)+freq newfeatureindex[feat]=newfeatureindex.get(feat,0)+self.entryindex.get(entry,1) newentryindex[entry]=gen_of_entry*width/totalfreq for feat in newfeatureindex.keys(): newfeatureindex[feat]=newfeatureindex[feat]*reversewidth[feat]/reversefreq[feat] self.entryindex=newentryindex self.featureindex=newfeatureindex def run(self): self.initialiseIndices() self.processEventFile() self.outputIndices() if __name__=="__main__": myIndex=Indexer(configure(sys.argv)) myIndex.run()
def index(): if len(sys.argv)>1: bookid = sys.argv[1] index_path = configure() index_all(bookid, index_path) print 'reindexed %s in %s' % (bookid, index_path)
for line in instream: lines += 1 line = line.rstrip() if self.processline(line): outstream.write(line + "\n") if Analyser.max > 0 and lines > Analyser.max: break def displaycandidates(self): print "----Starting display of candidates----" for cand in self.candidates.keys(): dist = self.candidates[cand] print cand for synset in dist.keys(): print synset.name(), synset.definition(), dist[synset] hypstring = "" for hyp in synset.hyponyms(): hypstring += hyp.name() + "\t" print hypstring print "----" def run(self): self.processfile() self.displaycandidates() if __name__ == "__main__": myAnalyser = Analyser(configure(sys.argv)) myAnalyser.run()
def index(): if len(sys.argv) > 1: bookid = sys.argv[1] index_path = configure() index_all(bookid, index_path) print 'reindexed %s in %s' % (bookid, index_path)
__author__ = 'juliewe' import sys from stsdata import STSData #set up configuration. Import configuration function import conf #pass commandline arguments (testing,at_home,on_apollo,windows,filtered,comptype,metric,setsim,threshold,threshtype,toyrun,use_cache,adja,adjb)=conf.configure(sys.argv) #uni filenames parent="/Volumes/LocalScratchHD/juliewe/Documents/workspace/STS/data/" if at_home: parent="C:/Users/Julie/Documents/GitHub/STS/data/" if on_apollo: parent="/mnt/lustre/scratch/inf/juliewe/STS/data/" datadirname=parent+"trial/STS2012-train/STSinput-tagged" gsdirname=parent+"trial/STS2012-train/gs" if filtered: vectorfilename=parent+"vectors_gw_filt/vectors_mi" else: vectorfilename=parent+"allvectors/vectors_mi" cv_param=10 cv_repeat=10
print "Filtered list = "+str(len(newlist)) if self.parameters['testing']: print newlist with open(outpath,'w') as outstream: for (phrase,freq) in newlist: outstream.write(phrase+'\t'+freq+'\n') return def get_names(self): members = inspect.getmembers(self, predicate=inspect.ismethod) return [x[0][len(PairGenerator.GO_P):] for x in members if x[0].startswith(PairGenerator.GO_P)] def run(self): gonames=self.get_names() print self.parameters['run'] for runp in self.parameters['run']: #print runp if runp in gonames: method=getattr(self,PairGenerator.GO_P+runp) method() else: print "No method defined for "+self.parameters['run'] if __name__=='__main__': myGen=PairGenerator(configure(sys.argv)) myGen.run()
def main(): config = conf.configure() #now configure our settings global set_setting('LOG_LOCATION', config.log_location) set_setting('DEBUG', config.debug) set_setting('VERBOSE', config.verbose) #init logging logger.initLogger() set_setting('REGISTRATION_HOST', config.reg_host) set_setting('REGISTRATION_PORT', config.reg_port) set_setting('PORT', config.port) set_setting('IP', config.ip) set_setting('DB_TYPE', config.db_type) set_setting('REDIS_SOCKET', config.redis_sock) set_setting('REDIS_HOST', config.redis_host) set_setting('REDIS_PORT', config.redis_port) set_setting('MONGO_NAME', config.mongo_name) set_setting('MONGO_HOST', config.mongo_host) set_setting('MONGO_PORT', config.mongo_port) set_setting('MONGO_READ', config.mongo_read) set_setting('MYSQL_NAME', config.mysql_name) set_setting('MYSQL_HOST', config.mysql_host) set_setting('MYSQL_PORT', config.mysql_port) set_setting('MYSQL_USER', config.mysql_user) set_setting('MYSQL_PASSWORD', config.mysql_password) set_setting('HTTP_SERVICE', config.server) set_setting('ROOT_KEY', config.root_key) logger.LOG.log('ReorJS service starting...') logger.LOG.log('Initializing API') logger.LOG.info('Starting with the following settings:') logger.LOG.info('LOG_LOCATION => %s' % settings.LOG_LOCATION) logger.LOG.info('DEBUG => %s' % settings.DEBUG) logger.LOG.info('VERBOSE => %s' % settings.VERBOSE) logger.LOG.info('REGISTRATION_HOST => %s' % settings.REGISTRATION_HOST) logger.LOG.info('REGISTRATION_PORT => %s' % settings.REGISTRATION_PORT) logger.LOG.info('PORT => %s' % settings.PORT) logger.LOG.info('IP => %s' % settings.IP) logger.LOG.info('DB_TYPE => %s' % settings.DB_TYPE) logger.LOG.info('REDIS_SOCKET => %s' % settings.REDIS_SOCKET) logger.LOG.info('REDIS_HOST => %s' % settings.REDIS_HOST) logger.LOG.info('REDIS_PORT => %s' % settings.REDIS_PORT) logger.LOG.info('MONGO_NAME => %s' % settings.MONGO_NAME) logger.LOG.info('MONGO_HOST => %s' % settings.MONGO_HOST) logger.LOG.info('MONGO_PORT => %s' % settings.MONGO_PORT) logger.LOG.info('MONGO_READ => %s' % settings.MONGO_READ) logger.LOG.info('MYSQL_NAME => %s' % settings.MYSQL_NAME) logger.LOG.info('MYSQL_HOST => %s' % settings.MYSQL_HOST) logger.LOG.info('MYSQL_PORT => %s' % settings.MYSQL_PORT) logger.LOG.info('MYSQL_USER => %s' % settings.MYSQL_USER) logger.LOG.info('MYSQL_PASSWORD => %s' % settings.MYSQL_PASSWORD) logger.LOG.info('HTTP_SERVICE => %s' % settings.HTTP_SERVICE) logger.LOG.info('ROOT_KEY => %s' % settings.ROOT_KEY) #we need to configure our API database from settings if api.connect(): #and then our stacker logger.LOG.log('Initializing stacker') stack.initStacker() #next we need to create our query service logger.LOG.log('Initializing query service') service = query.QueryService() #and run it logger.LOG.log('Running service...') service.run() else: logger.LOG.log("Error connecting to API database, please check configuration.")
at_home, on_apollo, windows, filtered, comptype, metric, setsim, threshold, threshtype, toyrun, use_cache, adja, adjb, simcache, byblo, ) = conf.configure(sys.argv) # uni filenames # parent="/Volumes/LocalScratchHD/juliewe/Documents/workspace/STS/data/" parent = "/Volumes/LocalScratchHD/juliewe/Documents/workspace/ThesEval/" # datadirname=parent+"trial/STS2012-train/STSinput-tagged" datadirname = parent + "data/giga_t10/" if at_home: parent = "C:/Users/Julie/Documents/GitHub/STS/data/" if on_apollo: # parent="/mnt/lustre/scratch/inf/juliewe/STS/data/" parent = "/mnt/lustre/scratch/inf/juliewe/ThesEval/" datadirname = "../FeatureExtractionToolkit/Byblo-2.1.0/giga_t10_nouns_deps/"