def register(): base.register() gui.register() item.register() material.register() workshop.register() export.register() # register keyboard shortcuts wm = bpy.context.window_manager global keymap if keymap: wm.keyconfigs.addon.keymaps.remove(keymap) km = wm.keyconfigs.addon.keymaps.new(name="Object Mode") # overriding standart Blender shortcut for moving km.keymap_items.new("prk.move", "G", "PRESS") # shortcut for adding a new wall km.keymap_items.new("prk.wall_edit_add", "E", "PRESS", ctrl=True) # shortcut for extending the wall km.keymap_items.new("prk.wall_edit_extend", "E", "PRESS") # shortcut for completing the wall km.keymap_items.new("prk.wall_complete", "Q", "PRESS") # shortcut to make an area km.keymap_items.new("prk.area_make", "F", "PRESS", ctrl=True) # shortcut to work on an area km.keymap_items.new("prk.area_work", "F", "PRESS") #kmi.properties.name = "" keymap = km
def initialize(): global _modules _modules = [Machine, RQModule, RunPBSScript, RunCommand, RunJob, SyncDirectories, CopyFile] import base import hdfs import streaming _modules.extend(base.register()) _modules.extend(hdfs.register()) _modules.extend(streaming.register())
def initialize(): global _modules _modules = [ Machine, RQModule, RunPBSScript, RunCommand, RunJob, SyncDirectories, CopyFile ] import base import hdfs import streaming _modules.extend(base.register()) _modules.extend(hdfs.register()) _modules.extend(streaming.register())
# Do we always load form db? or just use the weights in the object? obj = self.store.load_one_by_id(self._id, self.table_name) self.__dict__.update(obj) return obj.weights def merge(self, delta): # Should we always load from database and merge. Necessary? # obj = self.store.load_one_by_id(self._id, self.table_name) # if obj: # for i in range(len(delta)): # obj.weights[i] += delta[i] # self.store.update_one_in_fields(obj, {'weights': obj.weights}) # self.__dict__.update(obj) # else: # logger.error('unable to merge in turtle {0}'.format(self._id)) # return False scaleFactor = self.m + 1 / self.rho for k in delta.keys(): if k in self.weights: self.weights[k] += delta[k] / scaleFactor else: self.weights[k] = delta[k] self.store.update_one_in_fields(self, {'weights': self.weights}) return True base.register(Turtle)
for t in self.confusionMatrix: for p in self.confusionMatrix[t]: d += self.confusionMatrix[t][p] * abs(t - p) total += self.confusionMatrix[t][p] return d / total class SelfTigress(Tigress): pass class SPNTigress(Tigress): pass class LexiconTigress(Tigress): pass class CoTigress(Tigress): pass base.register(Tigress) base.register(PatternTigress) base.register(MultiLabelTigress) base.register(SelfTigress) base.register(SPNTigress) base.register(LexiconTigress) base.register(CoTigress)
logger.debug("resetting mantis") try: self.mantis.reset() except: crane.mantisStore.update_in_fields( {Mantis.NAME: self.name, Mantis.CREATOR: self.creator}, {Mantis.FDUALS: [], Mantis.FQ: [], Mantis.FDQ: []}, ) def reset_data(self): logger.debug("resetting data in mantis") try: self.mantis.reset_data() except: crane.mantisStore.update_in_fields( {Mantis.NAME: self.name, Mantis.CREATOR: self.creator}, {Mantis.FDATA: {}} ) def set_mantis_parameter(self, para, value): try: self.mantis.set_mantis_parameter(para, value) except: self.load_mantis() self.mantis.set_mantis_parameter(para, value) base.register(Panda) base.register(ExistPanda) base.register(RegexPanda) base.register(LinearPanda)
return len(self.input_string) def __nonzero__(self): return bool(self.input_string) def _result_iter(self): for item in self.input_string: output = {} output['start'] = None output['end'] = None output['text'] = utils.strip_tags(item) yield output def to_internal(self): if not hasattr(self, 'sub_set'): self.sub_set = SubtitleSet(self.language) valid = False for item in self._result_iter(): item['text'] = item['text'].replace("\n", '<br/>') if not valid and ''.join(item['text'].split()): valid = True self.sub_set.append_subtitle(item['start'], item['end'], item['text'], escape=False) if not valid: raise SubtitleParserError("No subs") return self.sub_set register(TXTParser)
return output def _get_time(self, hour, min, sec, milliseconds): milliseconds = centiseconds_to_milliseconds(milliseconds) res = (1000 * ((int(hour) * 60 * 60) + (int(min) * 60) + int(sec))) + milliseconds if res >= self.MAX_SUB_TIME: return None return res def __replace(self, match): group = match.groupdict() if group['start'] != group['end']: raise ValueError("Unbalanced tags start: %(start)s, end: %(end)s" % group) base_span = '<span %s>%s</span>' if group['start'] == 'b': span = base_span % ('fontWeight="bold"', group['text']) elif group['start'] == 'i': span = base_span % ('fontStyle="italic"', group['text']) elif group['start'] == 'u': span = base_span % ('textDecoration="underline"', group['text']) return span register(SSAParser)
fields = {self.FEATURES: self._features.generic(), self.RAWS: self._raws} crane.entityStore.update_one_in_fields(self, fields) def __contains__(self, key): return key in self._features or key in self._raws def __setitem__(self, key, value): self._features[key] = value def __getitem__(self, key): return self._features[key] def get_raw(self, rawKey, default=0): if rawKey in self._raws: return self._raws[rawKey] else: return default def set_raw(self, rawKey, rawValue): if isinstance(rawKey, basestring): self._raws[rawKey.replace(".", "\uff0e").replace("$", "\uff04")] = rawValue def set_value(self, key, value): if value != 0 and key not in self._features: self._features[key] = value crane.entityStore.push_one_in_fields(self, {self.FEATURES: (key, value)}) return value base.register(Entity)
def __restore__(self): super(Engine, self).__restore__() def generic(self): result = super(Engine, self).generic() result[self.FSTARTTIME] = self.starttime result[self.FENDTIME] = self.endtime return result def clone(self, userName): ''' Engine can not be replicated ''' return None def is_active(self): if not self.status: return False currTime = time.mktime(datetime.datetime.now().timetuple()) lastTime = time.mktime(self.lastModified.timetuple()) logger.debug('currTime={}, lastTime={}, idleTime={}'.format(currTime, lastTime, self.IDLE_TIME)) if currTime - lastTime > self.IDLE_TIME: self.status = cons.STATUS_INACTIVE self.update_fields({self.FSTATUS:cons.STATUS_INACTIVE}) return False return True def add_user(self, userName): self.users.append(userName) self.store.push_one_in_fields(self, {self.FUSERS:userName}) base.register(Engine)
self.lastName = DEFAULT_EMPTY self.midName = DEFAULT_EMPTY self.year = 1900 self.partition = -1 self.turtles = [] def __restore__(self): super(User, self).__restore__() def generic(self): result = super(User, self).generic() return result def clone(self, userName): ''' User can not be replicated ''' return None def age(self): thisyear = datetime.date.today().year return thisyear - self.year def set_partition(self, partition): self.partition = partition self.update_fields({self.PART:self.partition}) def add_turtle(self, turtleName): self.turtles.append(turtleName) self.store.push_one_in_fields(self, {self.FTURTLES: turtleName}) base.register(User)
def _get_time(self, hour, min, sec, secfr): if secfr is None: secfr = '0' res = (int(hour)*60*60+int(min)*60+int(sec)+float('.'+secfr)) * 1000 if res == utils.UNSYNCED_TIME_ONE_HOUR_DIGIT: res = None return res def _get_data(self, match): output = {} output['start'] = self._get_time(match['s_hour'], match['s_min'], match['s_sec'], match['s_secfr']) output['end'] = self._get_time(match['e_hour'], match['e_min'], match['e_sec'], match['e_secfr']) text = ('' if match['text'] is None else utils.escape_ampersands(match['text'])) # [br] are linebreaks text = text.replace("[br]", "<br/>") output['text'] = text return output register(SBVParser)
""" The DFXPParser is in reality just a shim around the basic storage mechanism we're using. So most things should be done over storage.py """ file_type = ['dfxp', 'xml'] NO_UNICODE = True def __init__(self, input_string, language=None): try: self.subtitle_set = SubtitleSet(language, input_string, normalize_time=True) except (XMLSyntaxError, ExpatError), e: raise SubtitleParserError( "There was an error while we were parsing your xml", e) self.language = language def __len__(self): return self.subtitle_set.__len__() def __nonzero__(self): return self.subtitle_set.__nonzero__() def to_internal(self): return self.subtitle_set register(DFXPParser)
import base while True: answer = base.menu (['Check Registered People', 'Register People', 'Exit System']) if answer == 1: base.title('REGISTERED PEOPLE') base.see() elif answer == 2: base.title('NEW REGISTER') base.register() elif answer == 3: base.title('Leaving the system... Bye!') break else: print('ERROR: please type a valid option!') print()
olduuid, (ind, oldy, oldc) = da.popitem() self.solver.setData(entity._features, y, c, ind) da[uuid] = (ind, y, c) def reset(self): self.mu.clear() self.q.clear() self.dq.clear() logger.debug('mu {0}'.format(self.mu)) logger.debug('q {0}'.format(self.q)) logger.debug('dq {0}'.format(self.dq)) self.commit() self.solver.initialize() def reset_data(self): self.data = {} self.solver.num_instances = 0 logger.debug('data {0}'.format(self.data)) self.update_fields({self.FDATA: {}}) def set_mantis_parameter(self, para, value): if (para == 'gamma'): self.gamma = value self.solver.setGamma(value) logger.debug('gamma is {0}'.format(self.gamma)) logger.debug('gamma of solver is {0}'.format(self.solver.gamma)) self.update_fields({self.FGAMMA: self.gamma}) base.register(Mantis)
def save(self): super(Contract, self).save() def delete(self, deep=False): result = super(Contract, self).delete() return result def checkout(self): # Do we always load form db? or just use the weights in the object? obj = self.store.load_one_by_id(self._id, self.table_name) self.__dict__.update(obj) return obj def merge(self, delta): obj = self.store.load_one_by_id(self._id, self.table_name) if obj: for i in range(len(delta)): obj.weights[i] += delta[i] self.store.update_one_in_fields(obj, {'weights': obj.weights}) self.__dict__.update(obj) else: logger.error('unable to merge in turtle {0}'.format(self._id)) return False return True base.register(Contract)
allTokens.update((('_'.join(t), 1) for t in tagged)) return allTokens class StemTurtle(DictionaryTurtle): def _process(self, field): allTokens = {} sents = nltk.tokenize.sent_tokenize(field) logger.debug(sents) port = PorterStemmer() for sent in sents: tokens = nltk.tokenize.word_tokenize(sent.lower()) stems = [ port.stem(t) for t in tokens if not self.is_stop(t) and not self.is_symbol(t) ] allTokens.update(((t, 1) for t in stems)) logger.debug(' '.join(allTokens)) return allTokens base.register(Turtle) base.register(SingleTurtle) base.register(MultiLabelTurtle) base.register(RankingTurtle) base.register(DictionaryTurtle) base.register(SPNTurtle) base.register(UniGramTurtle) base.register(POSTurtle) base.register(StemTurtle)
tokens = nltk.tokenize.word_tokenize(sent.lower()) tagged = nltk.pos_tag([t for t in tokens if not self.is_stop(t) and not self.is_symbol(t) and not self.is_single(t)]) allTokens.update((('_'.join(t),1) for t in tagged)) return allTokens class StemTurtle(DictionaryTurtle): def _process(self, field): allTokens = {} sents = nltk.tokenize.sent_tokenize(field) logger.debug(sents) port = PorterStemmer() for sent in sents: tokens = nltk.tokenize.word_tokenize(sent.lower()) stems = [port.stem(t) for t in tokens if not self.is_stop(t) and not self.is_symbol(t)] allTokens.update(((t,1) for t in stems)) logger.debug(' '.join(allTokens)) return allTokens base.register(Turtle) base.register(SingleTurtle) base.register(MultiLabelTurtle) base.register(RankingTurtle) base.register(DictionaryTurtle) base.register(SPNTurtle) base.register(UniGramTurtle) base.register(POSTurtle) base.register(StemTurtle)
MAX_SUB_TIME = (60 * 60 * 100) - 1 class DFXPParser(BaseTextParser): """ The DFXPParser is in reality just a shim around the basic storage mechanism we're using. So most things should be done over storage.py """ file_type = ['dfxp', 'xml'] NO_UNICODE = True def __init__(self, input_string, language=None): try: self.subtitle_set = SubtitleSet(language, input_string, normalize_time=True) except (XMLSyntaxError, ExpatError), e: raise SubtitleParserError("There was an error while we were parsing your xml", e) self.language = language def __len__(self): return self.subtitle_set.__len__() def __nonzero__(self): return self.subtitle_set.__nonzero__() def to_internal(self): return self.subtitle_set register(DFXPParser)
# TODO: should replace the most confident data olduuid, (ind, oldy, oldc) = da.popitem() self.solver.setData(entity._features, y, c, ind) da[uuid] = (ind, y, c) def reset(self): self.mu.clear() self.q.clear() self.dq.clear() logger.debug('mu {0}'.format(self.mu)) logger.debug('q {0}'.format(self.q)) logger.debug('dq {0}'.format(self.dq)) self.commit() self.solver.initialize() def reset_data(self): self.data = {} self.solver.num_instances = 0 logger.debug('data {0}'.format(self.data)) self.update_fields({self.FDATA : {}}) def set_mantis_parameter(self, para, value): if (para == 'gamma'): self.gamma = value self.solver.setGamma(value) logger.debug('gamma is {0}'.format(self.gamma)) logger.debug('gamma of solver is {0}'.format(self.solver.gamma)) self.update_fields({self.FGAMMA : self.gamma}) base.register(Mantis)
def __len__(self): return len(self.input_string) def __nonzero__(self): return bool(self.input_string) def _result_iter(self): for item in self.input_string: output = {} output['start'] = None output['end'] = None output['text'] = utils.strip_tags(item) yield output def to_internal(self): if not hasattr(self, 'sub_set'): self.sub_set = SubtitleSet(self.language) valid = False for item in self._result_iter(): if not valid and ''.join(item['text'].split()): valid = True self.sub_set.append_subtitle(item['start'], item['end'], item['text']) if not valid: raise SubtitleParserError("No subs") return self.sub_set register(TXTParser)
pass class DifferenceRelation(Relation): def compute(self): try: ent1 = self._arguments[0] ent2 = self._arguments[1] self._features.copyUpdate(ent1._features) self._features.difference(ent2._features) except Exception as e: logger.error(e.message) logger.error('failed to compute the difference relation') class MatchingRelation(Relation): def compute(self): try: ent1 = self._arguments[0] ent2 = self._arguments[1] self._features.copyUpdate(ent1._features) self._features.matching(ent2._features) except Exception as e: logger.error(e.message) logger.error('failed to compute the matching relation') base.register(Relation) base.register(DifferenceRelation) base.register(MatchingRelation)
def compute(self): pass class DifferenceRelation(Relation): def compute(self): try: ent1 = self._arguments[0] ent2 = self._arguments[1] self._features.copyUpdate(ent1._features) self._features.difference(ent2._features) except Exception as e: logger.error(e.message) logger.error('failed to compute the difference relation') class MatchingRelation(Relation): def compute(self): try: ent1 = self._arguments[0] ent2 = self._arguments[1] self._features.copyUpdate(ent1._features) self._features.matching(ent2._features) except Exception as e: logger.error(e.message) logger.error('failed to compute the matching relation') base.register(Relation) base.register(DifferenceRelation) base.register(MatchingRelation)
return output def _get_time(self, hour, min, sec, milliseconds): milliseconds = centiseconds_to_milliseconds(milliseconds) res = (1000 * ( (int(hour)*60*60 )+ (int(min)*60) + int(sec))) + milliseconds if res >= self.MAX_SUB_TIME: return None return res def __replace(self, match): group = match.groupdict() if group['start'] != group['end']: raise ValueError("Unbalanced tags start: %(start)s, end: %(end)s" % group) base_span = '<span %s>%s</span>' if group['start'] == 'b': span = base_span % ('fontWeight="bold"', group['text']) elif group['start'] == 'i': span = base_span % ('fontStyle="italic"', group['text']) elif group['start'] == 'u': span = base_span % ('textDecoration="underline"', group['text']) return span register(SSAParser)
flags=[re.DOTALL], eager_parse=eager_parse) def _get_time(self, hour, min, sec, secfr): if secfr is None: secfr = '0' res = (int(hour) * 60 * 60 + int(min) * 60 + int(sec) + float('.' + secfr)) * 1000 if res >= utils.UNSYNCED_TIME_ONE_HOUR_DIGIT: res = None return res def _get_data(self, match): output = {} output['start'] = self._get_time(match['s_hour'], match['s_min'], match['s_sec'], match['s_secfr']) output['end'] = self._get_time(match['e_hour'], match['e_min'], match['e_sec'], match['e_secfr']) text = ('' if match['text'] is None else utils.escape_ampersands( match['text'])) # [br] are linebreaks text = text.replace("[br]", "<br/>") output['text'] = text return output register(SBVParser)
Mantis.NAME: self.name, Mantis.CREATOR: self.creator }, { Mantis.FDUALS: [], Mantis.FQ: [], Mantis.FDQ: [] }) def reset_data(self): logger.debug('resetting data in mantis') try: self.mantis.reset_data() except: crane.mantisStore.update_in_fields( { Mantis.NAME: self.name, Mantis.CREATOR: self.creator }, {Mantis.FDATA: {}}) def set_mantis_parameter(self, para, value): try: self.mantis.set_mantis_parameter(para, value) except: self.load_mantis() self.mantis.set_mantis_parameter(para, value) base.register(Panda) base.register(ExistPanda) base.register(RegexPanda) base.register(LinearPanda)
def generic(self): result = super(Engine, self).generic() result[self.FSTARTTIME] = self.starttime result[self.FENDTIME] = self.endtime return result def clone(self, userName): ''' Engine can not be replicated ''' return None def is_active(self): if not self.status: return False currTime = time.mktime(datetime.datetime.now().timetuple()) lastTime = time.mktime(self.lastModified.timetuple()) logger.debug('currTime={}, lastTime={}, idleTime={}'.format( currTime, lastTime, self.IDLE_TIME)) if currTime - lastTime > self.IDLE_TIME: self.status = cons.STATUS_INACTIVE self.update_fields({self.FSTATUS: cons.STATUS_INACTIVE}) return False return True def add_user(self, userName): self.users.append(userName) self.store.push_one_in_fields(self, {self.FUSERS: userName}) base.register(Engine)
self.midName = DEFAULT_EMPTY self.year = 1900 self.partition = -1 self.turtles = [] def __restore__(self): super(User, self).__restore__() def generic(self): result = super(User, self).generic() return result def clone(self, userName): ''' User can not be replicated ''' return None def age(self): thisyear = datetime.date.today().year return thisyear - self.year def set_partition(self, partition): self.partition = partition self.update_fields({self.PART: self.partition}) def add_turtle(self, turtleName): self.turtles.append(turtleName) self.store.push_one_in_fields(self, {self.FTURTLES: turtleName}) base.register(User)
def __contains__(self, key): return key in self._features or key in self._raws def __setitem__(self, key, value): self._features[key] = value def __getitem__(self, key): return self._features[key] def get_raw(self, rawKey, default=0): if rawKey in self._raws: return self._raws[rawKey] else: return default def set_raw(self, rawKey, rawValue): if isinstance(rawKey, basestring): self._raws[rawKey.replace('.', '\uff0e').replace('$', '\uff04')] = rawValue def set_value(self, key, value): if value != 0 and key not in self._features: self._features[key] = value crane.entityStore.push_one_in_fields(self, {self.FEATURES: (key, value)}) return value base.register(Entity)
cm[target][predicted] = 1 else: cm[target][predicted] += 1 self.total += 1 def RMS(self): d = 0 total = 0 for t in self.confusionMatrix: for p in self.confusionMatrix[t]: d += self.confusionMatrix[t][p] * abs(t - p) total += self.confusionMatrix[t][p] return d / total class SelfTigress(Tigress): pass class SPNTigress(Tigress): pass class LexiconTigress(Tigress): pass class CoTigress(Tigress): pass base.register(Tigress) base.register(PatternTigress) base.register(MultiLabelTigress) base.register(SelfTigress) base.register(SPNTigress) base.register(LexiconTigress) base.register(CoTigress)