def setUp(self): """ Load test data :return: """ self.lbs = lbsociam.LBSociam() self.baserest = lbrest.BaseREST(rest_url=self.lbs.lbgenerator_rest_url, response_object=True) pass
def __init__(self): """ Construct for social networks data :return: """ LBSociam.__init__(self) self.baserest = lbrest.BaseREST(rest_url=self.lbgenerator_rest_url, response_object=True) self.documentrest = lbrest.DocumentREST( rest_url=self.lbgenerator_rest_url, base=self.lbbase, response_object=False)
def __init__(self, status_base=None, dictionary_base=None, debug=False, term=None): LBSociam.__init__(self) self.debug = debug self.term = term self.api = None self.hashtag = None self.baserest = lbrest.BaseREST(rest_url=self.lbgenerator_rest_url, response_object=True) if status_base is not None: self.status_base = status_base if dictionary_base is not None: self.dictionary_base = dictionary_base
def __init__(self, status_name=None, dic_name=None): """ Bulding method for Status Base :param status_name: Name of status base :param dic_name: Name of dict status :return: """ LBSociam.__init__(self) if status_name is not None: self.status_base = status_name if dic_name is not None: self.dictionary_base = dic_name self.baserest = lbrest.BaseREST(rest_url=self.lbgenerator_rest_url, response_object=True) self.documentrest = lbrest.DocumentREST( rest_url=self.lbgenerator_rest_url, base=self.lbbase, response_object=False) self.crimes_base = crimes.crimes_base
def __init__(self, status_base=None): """ Construct for social networks data :return: """ LBSociam.__init__(self) self.baserest = lbrest.BaseREST( rest_url=self.lbgenerator_rest_url, response_object=True ) self.documentrest = lbrest.DocumentREST( rest_url=self.lbgenerator_rest_url, base=self.lbbase, response_object=False ) # Get status base in constructor if status_base is None: self.status_base = lbstatus.status_base else: self.status_base = status_base
def setUp(self): """ Load test data """ test_twitter_import.TwitterImportTestCase.setUp(self) self.lbs = lbsociam.LBSociam() self.baserest = lbrest.BaseREST(rest_url=self.lbs.lbgenerator_rest_url, response_object=True) self.lbt = lbtwitter.Twitter(debug=False, term='crime') self.status_base = lbstatus.StatusBase() self.tw_status = self.lbt.search() # Debug fd = open('/tmp/status_base.json', 'w+') fd.write(self.status_base.lbbase.json) fd.close() # Cria base self.lbbase = self.status_base.create_base() pass
def setUp(self): """ Load data from previous tests and setup test data :return: """ lbjson_test.TestJSON.setUp(self) # Start base definition field = Field(**self.field) field2 = Field(**self.field2) content_list = Content() content_list.append(field) content_list.append(field2) group_metadata = GroupMetadata(**self.group_metadata) group = Group( metadata=group_metadata, content=content_list, ) field3 = Field(**self.field3) content_list = Content() content_list.append(group) content_list.append(field3) base_metadata = BaseMetadata(**self.base_metadata) self.base = Base(metadata=base_metadata, content=content_list) # End Base definition #self.rest_url = "http://localhost/api" self.rest_url = "http://127.0.0.1/api" self.baserest = lbrest.BaseREST(rest_url=self.rest_url, response_object=True) pass
def setUp(self): """ Ajusta parâmetros iniciais """ self.lbs = lbsociam.LBSociam() self.baserest = lbrest.BaseREST(rest_url=self.lbs.lbgenerator_rest_url, response_object=True) self.lbt = lbtwitter.Twitter(debug=False, term='crime') self.status_base = lbstatus.StatusBase() self.tw_status = self.lbt.search() self.data_dir = os.path.join(lbs.lbsociam_data_dir, 'tests') if not os.path.isdir(self.data_dir): os.mkdir(self.data_dir) # Cria base self.lbbase = self.status_base.create_base() # Insere dois status self.status = list() tw_status_elm = [self.tw_status[0]] tw_status_json = self.lbt.status_to_json(tw_status_elm) status_dict = dict( origin='twitter', inclusion_date=datetime.datetime.now().strftime("%d/%m/%Y"), text=tw_status_elm[0].text, source=tw_status_json, search_term='crime') tokenized = srl.srl_tokenize(tw_status_elm[0].text) if tokenized.get('arg_structures'): status_dict['arg_structures'] = tokenized.get('arg_structures') if tokenized.get('tokens'): status_dict['tokens'] = tokenized.get('tokens') status = conv.dict2document(self.lbbase, status_dict) status_json = conv.document2json(self.lbbase, status) result = self.status_base.documentrest.create(status_json) self.status.append(status) # Segundo status tw_status_elm = [self.tw_status[1]] tw_status_json = self.lbt.status_to_json(tw_status_elm) status_dict = dict( origin='twitter', inclusion_date=datetime.datetime.now().strftime("%d/%m/%Y"), text=tw_status_elm[0].text, source=tw_status_json, search_term='crime') tokenized = srl.srl_tokenize(tw_status_elm[0].text) if tokenized.get('arg_structures'): status_dict['arg_structures'] = tokenized.get('arg_structures') if tokenized.get('tokens'): status_dict['tokens'] = tokenized.get('tokens') status = conv.dict2document(self.lbbase, status_dict) status_json = conv.document2json(self.lbbase, status) result = self.status_base.documentrest.create(status_json) self.status.append(status) # Base de dicionário self.dictionary_base = model_dict.DictionaryBase()