Example #1
0
 def setUp(self):
     super(Volume, self).setUp()
     self.primary_node = self.input["node"]
     self.util = Util(self.primary_node)
     self.data_load_flag = False
     self.number_of_cycles = 300  # ToDO: scale it up later
     self.number_of_kv_pairs = 100000  # ToDO: scale it up later
    def infer(self, params):
        window = {'tweets':[], 'start':0} # storing tweets

        """ User distribution updating """
        for tweet in self.tweets.stream():
            if type(tweet) == type({}) and 'timestamp' in tweet:
                current_time = Util.str_to_unixtime(Util.time_to_str(tweet['timestamp']))
                window['tweets'].append(tweet)
                if current_time - window['start'] > params['window_size']:
                    if params['tl']:
                        """ use tl-words """
                        tlwords = self.extract_local_words_(window['tweets'], params)
                    else:
                        """ dont use tl-words """
                        tlwords = Words()
                    self.update_user_distributions(window['tweets'], tlwords, params)
                    window = {'tweets':[], 'start':current_time}

        """ Location prediction using user distribution """
        for user in self.users.iter():
            if user['location_point'] == None:
                """ unlabeled user """
                if user['id'] in self.user_distributions and len(self.user_distributions[user['id']]) > 0:
                    inferred_city = self.predict(self.user_distributions[user['id']], params)
                    inferred_location = self.model.means_[inferred_city]
                    user['location_point'] = inferred_location
                else:
                    if params['default']:
                        """ no clues            """
                        """ predict using prior """
                        inferred_city = self.predict({}, params)
                        inferred_location = self.model.means_[inferred_city]
                        user['location_point'] = inferred_location
    def importprojectdir(cls, dir_project, file_type):
        """Imports all descriptor files under a given folder

        this method is specific for Srv6_net_prog project type
        """

        project = {'srv6_net_prog': {}, 'positions': {}}

        for desc_type in project:
            cur_type_path = os.path.join(dir_project, desc_type.upper())
            log.debug(cur_type_path)
            if os.path.isdir(cur_type_path):
                for file in glob.glob(
                        os.path.join(cur_type_path, '*.' + file_type)):
                    if file_type == 'json' and os.path.basename(
                            file) != 'vertices.json':
                        project[desc_type][os.path.basename(file).split('.')
                                           [0]] = Util.loadjsonfile(file)
                    elif file_type == 'yaml':
                        project[desc_type][os.path.basename(file).split('.')
                                           [0]] = Util.loadyamlfile(file)

        for vertices_file in glob.glob(
                os.path.join(dir_project, "SRV6_NET_PROG", '*.json')):
            if os.path.basename(vertices_file) == 'vertices.json':
                project['positions']['vertices'] = Util.loadjsonfile(
                    vertices_file)

        return project
Example #4
0
def main():
    opt = parse_cmdline()
    flist = os.listdir(opt.yaml_dir)

    ts = TreeShape(fanout=opt.fanout, width=900, height=900)
    u = Util()
    i = 0
    for f in sorted(flist):
        ybuf = u.file_to_yaml(f"{opt.yaml_dir}/{f}")
        d = ts.process_yaml(ybuf)
        if d != None:
            ts.add_tree_data(d)
        i += 1

    p, slider = ts.plot_graph(title="cN Tree Shape")
    if p == None:
        print("Empty List")
        return -1

    if slider:
        composite = [[p], [slider]]
    else:
        composite = [[p]]

    output_file(opt.html_file)
    plot = layout(composite)
    save(plot)
    print(f"Plot: {opt.html_file}")
Example #5
0
    def predict(self, user_id):
        min_d = 100000000
        min_p = None
        p = self.user_locations[user_id]
        for follower_id in self.graph.get_followers(user_id):
            follower = self.users.get(follower_id)
            if follower != None:
                follower_p = follower['location_point']
                if follower_p != None:
                    d = Util.hubeny_distance(follower_p, p)
                    if min_d > d:
                        min_d = d
                        min_p = follower_p
        for friend_id in self.graph.get_friends(user_id):
            friend = self.users.get(friend_id)
            if friend != None:
                friend_p = friend['location_point']
                if friend_p != None:
                    d = Util.hubeny_distance(friend_p, p)
                    if min_d > d:
                        min_d = d
                        min_p = friend_p 
        for venue_name in self.venues.get_venues(user_id):
            venue_p = self.venues.get_point(venue_name)
            d = Util.hubeny_distance(venue_p, p)
            if min_d > d:
                min_d = d
                min_p = venue_p 


        return min_p
Example #6
0
    def __query_server(url, params=None):
        try:
            url_params = urllib.urlencode(params) if params else None

            if ETR.proxy:
                proxy_handler = urllib2.ProxyHandler({'http': ETR.proxy})
                opener = urllib2.build_opener(proxy_handler)
                urllib2.install_opener(opener)

            ws = urllib2.urlopen(url, url_params, timeout=ETR.timeout)
            response = ws.read()
            ws.close()

            return response

        except Exception, e:
            if hasattr(e, 'reason'):
                text = 'Error al intentar abrir la URL del host: ' + str(e.reason)
            else:
                text = 'Error al intentar abrir la URL del host: Tiempo de espera agotado'

            try:
                Util.write_error_log(getattr(params, 'linea'), getattr(params, 'parada'), text)
            except Exception, e:
                print 'Error al intentar guardar en el registro.'
Example #7
0
def create_eth_if_in_service_profile(ucsm_ssh, param, eth_cnt):
    test_bed = str(param['test_bed_id'])
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    
    current_eth_cnt = 0
    data_vlan_start = int(test_bed) * 100 + 20 + 3
    data_vlan_end   = data_vlan_start + eth_cnt
    eth_vlan_list = range(data_vlan_start, data_vlan_end)
    eth_vlan_list.insert(0, VLAN_ISCSI)
    eth_vlan_list.insert(0, VLAN_MEDUSA)
    eth_vlan_list.insert(0, VLAN_PXE)
    for eth_vlan in eth_vlan_list: 
        eth_id = str(current_eth_cnt).zfill(2) 
        param['tag_mac_address'] = get_mac_address(test_bed, chassis, cartridge, server, eth_id)
        param["tag_eth_name"] = ''.join([param["eth_pxe_name_prefix"], str(eth_vlan)])
        param["tag_eth_vlan"] = 'vlan' + str(eth_vlan)
        param['tag_eth_order'] = str(int(current_eth_cnt) + 1)
        if current_eth_cnt % 2 == 0:
            param["tag_eth_fabric"] = 'a'
        else:
            param["tag_eth_fabric"] = 'b'
        #pprint.pprint(param)
        file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_eth_vlan.txt"   
        Util.run_text_step(ucsm_ssh, file_text_step, param)
        current_eth_cnt += 1
        if current_eth_cnt >= eth_cnt: 
            break
Example #8
0
    def __requestAuthToken(self):
        util = Util()
        config = util.getPushConfig()
        if config is None:
            return None

        requestTime = str(int(round(time.time() * 1000)))
        sign = config['AppKey'] + requestTime + config['MasterSecret']
        sh = hashlib.sha256()
        sh.update(sign.encode("utf8"))
        payload = {
            "sign": sh.hexdigest(),
            "timestamp": requestTime,
            "appkey": config['AppKey']
        }

        try:
            url = "https://restapi.getui.com/v1/" + config[
                'AppId'] + "/auth_sign"
            ret = util.httpsPost(url, params=payload)
            if ret['result'] == 'ok':
                self.__setAuthToken(ret)
            return ret['auth_token']
        except Exception as err:
            print(err)
            return None
Example #9
0
    def edit_descriptor(self, type_descriptor, descriptor_id, new_data,
                        data_type):
        try:

            ##FIXME questa parte va completamente rivista cosi' ha varie lacune
            #log.info('editing ',+ descriptor_id + ' ' + type_descriptor + ' ' + data_type)
            current_data = json.loads(self.data_project)
            new_descriptor = new_data
            if data_type == 'json':
                new_descriptor = json.loads(new_data)
            elif data_type == 'yaml':
                yaml_object = yaml.load(new_data)
                new_descriptor = json.loads(Util.yaml2json(yaml_object))
            if type_descriptor != 'click' and type_descriptor != 'oshi' and type_descriptor != 'cran':
                reference_schema = self.get_json_schema_by_type(
                    type_descriptor)
                Util.validate_json_schema(reference_schema, new_descriptor)
            current_data[type_descriptor][descriptor_id] = new_descriptor
            self.data_project = current_data
            self.update()
            result = True
        except Exception as e:
            log.debug(e)
            result = False
        return result
Example #10
0
    def importprojectdir(cls, dir_project, file_type):
        """Imports all files from NSD and VNFDs folders under a given folder

        this method is specific for Etsi project type
        """

        project = {'nsd': {}, 'vnfd': {}, 'positions': {}}

        # my_util = Util()
        NSD_PATH = dir_project + '/NSD'
        VNFD_PATH = dir_project + '/VNFD'

        #import network service description
        #in root directory file name nsd.json / nsd.yaml
        for nsd_filename in glob.glob(os.path.join(NSD_PATH,
                                                   '*.' + file_type)):
            log.debug(nsd_filename)
            nsd_object = Util.loadjsonfile(nsd_filename)
            project['nsd'][nsd_object['nsdIdentifier']] = nsd_object

        # import vnf descriptions
        # each file in root_path/VFND/*.json
        for vnfd_filename in glob.glob(
                os.path.join(VNFD_PATH, '*.' + file_type)):
            log.debug(vnfd_filename)
            vnfd_object = Util.loadjsonfile(vnfd_filename)
            project['vnfd'][vnfd_object['vnfdId']] = vnfd_object

        for vertices_file in glob.glob(os.path.join(dir_project, '*.json')):
            if os.path.basename(vertices_file) == 'vertices.json':
                project['positions']['vertices'] = Util.loadjsonfile(
                    vertices_file)

        return project
Example #11
0
def importprojectdir(dir_project, file_type):
    files = []
    for file_name in glob.glob(os.path.join(dir_project, '*.' + file_type)):
        files.append(Util().openfile(file_name))
    for file_name in glob.glob(os.path.join(dir_project, '*.' + 'json')):
        files.append(Util().openfile(file_name))

    return importprojectfile(files)
Example #12
0
def create_iscsi_in_service_profile(ucsm_ssh, param):
    test_bed = str(param['test_bed_id'])
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_iscsi.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #13
0
def execute_cmd(ucsm_ssh, param):
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_deletion.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #14
0
def reuse_local_lun(ucsm_ssh, param):
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "local_lun_reuse.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #15
0
 def stop_medusa(self):
     file_json_step = Define.PATH_SNIC_JSON_LINUX + "medusa_stop.json"
     Util.run_step_list(self._ssh, file_json_step)
     
 
     
 
     
     
Example #16
0
 def start_medusa(self, lun_type):
     file_json_step = None
     if lun_type == 1:
         file_json_step = Define.PATH_SNIC_JSON_LINUX + "medusa_start_boot_lun.json"
     elif lun_type == 2:
         file_json_step = Define.PATH_SNIC_JSON_LINUX + "medusa_start_data_lun.json"
     elif lun_type == 3:
         file_json_step = Define.PATH_SNIC_JSON_LINUX + "medusa_start_all_lun.json"
         
     Util.run_step_list(self._ssh, file_json_step)
Example #17
0
 def load(self):
     print('load 1')
     filelist = Util().getFileList(self.folder,".dep")
     for fn in filelist:
         #print('fn',fn, self.getOriginalName(fn))
         if Util().file_exists(self.folder, self.getOriginalName(fn)):
             print('found skip ', fn)
         else:
             #print('not found remove from git')
             self.append(fn)
     return self
Example #18
0
def power_cycle_service_profile(ucsm_ssh, param, wait=False):
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    param['tag_power_cycle_timing'] = 'immediate'
    if wait:
        param['tag_power_cycle_timing'] = 'wait'
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_power_cycle.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
 def import_kubernetes_from_dir_project(cls, dir_project):
     result = {}
     for k8s_filename in glob.glob(
             os.path.join(dir_project, 'K8S', '*.yaml')):
         log.info(k8s_filename)
         yaml_object = Util().loadyamlfile(k8s_filename)
         json_object = Util.json_loads_byteified(
             Util.yaml2json(yaml_object))
         filename = os.path.splitext(os.path.basename(str(k8s_filename)))[0]
         result[filename] = json_object
     return result
Example #20
0
def create_ipmi_in_service_profile(ucsm_ssh, param):
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    
    server_full_list = [chassis, cartridge, server]
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    param['tag_server_full_id'] = '/'.join(server_full_list)
    
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_ipmi.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #21
0
def set_vnic_adapter_policy_in_service_profile(ucsm_ssh, param, adapter_policy_dict):
    test_bed = str(param['test_bed_id'])
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    
    for eth_name, adapter_policy in adapter_policy_dict.iteritems():
        param['tag_eth_name'] = eth_name
        param['tag_adapter_policy'] = adapter_policy
        file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_vnic_adapter_policy.txt"   
        Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #22
0
def set_vnic_no_vlan_in_service_profile(ucsm_ssh, param, vlan_number_list):
    test_bed = str(param['test_bed_id'])
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    
    for vlan_number in vlan_number_list:
        param['tag_eth_name'] = 'eth' + str(vlan_number)
        param['tag_vlan_name'] = 'vlan' + str(vlan_number)
        file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_vnic_no_vlan.txt"   
        Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #23
0
def set_server_ext_mgmt_ip(ucsm_ssh, param):
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "server_ext_mgmt_ip.txt"   
    ip_list = get_ip_list(param['tag_kvm_ip_start'], 16)
    for chassis_id, chassis in config.iteritems():
        if chassis_id != 1: continue
        for cartridge_id, cartridge in chassis.iteritems():
            for server_id, server in cartridge.iteritems():
                param['tag_server_id']   = '/'.join([str(chassis_id), str(cartridge_id), str(server_id)])
                param['tag_addr']        = ip_list.pop()
                param['tag_default_gw']  = param['tag_kvm_ip_gateway']
                param['tag_subnet']      = param['tag_kvm_ip_netmask']
                Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #24
0
 def calc_divergence(self, n, word_count, params):
     if params['divergence'] == 'l2':
         d = Util.l2dist_fast(self.regular_sum, self.model.weights_, n, word_count)
         return d
     elif params['divergence'] == 'kl':
         d = Util.kl_div_fast(self.model.weights_, n, word_count)
         return d
     elif params['divergence'] == 'dispersion':
         points = [self.model.means_[int(k)] for k,v in word_count.items() for i in range(0, v)]
         d = Util.calc_dispersion(points)
         return d
     else:
         print 'invalid divergence'
         exit()
Example #25
0
def create_boot_policy(ucsm_ssh, param):
    
    param['tag_boot_policy'] = 'disk-pxe-legacy'
    param['tag_boot_mode'] = 'legacy'
    
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "boot_policy_order_disk_pxe.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
    
    param['tag_boot_policy'] = 'disk-pxe-uefi'
    param['tag_boot_mode'] = 'uefi'
    
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "boot_policy_order_disk_pxe.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
    
    param['tag_boot_policy'] = 'iscsi-pxe-legacy'
    param['tag_boot_mode'] = 'legacy'
    
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "boot_policy_order_iscsi_pxe.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
    
    param['tag_boot_policy'] = 'iscsi-pxe-uefi'
    param['tag_boot_mode'] = 'uefi'
    
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "boot_policy_order_iscsi_pxe.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #26
0
    def __requestAccessToken(self):
        util = Util()
        config = util.getConfigByAgentId(self.agentId)
        if config is None:
            print('No configuration for '+ str(self.agentId) +' was found!')
            return None

        requestTime = str(int(time.time()))
        try:
            res = util.httpsGet('https://qyapi.weixin.qq.com/cgi-bin/gettoken?corpid=' + config['CorpId'] + '&corpsecret=' + config['Secret'])
            self.__setAccessToken({'expires_in': res['expires_in'], 'access_token': res['access_token'], 'request_time': requestTime})
            return res['access_token']
        except Exception as err:
            print(err)
            return None
Example #27
0
    def get_descriptor_template(cls, type_descriptor):
        """Returns a descriptor template for a given descriptor type"""

        try:
            # schema = Util.loadjsonfile(PATH_TO_DESCRIPTORS_TEMPLATES+type_descriptor+DESCRIPTOR_TEMPLATE_SUFFIX)
            # print 'type_descriptor : '+type_descriptor
            # FixMe bisogna creare un template
            yaml_object = Util().loadyamlfile(
                'toscaparser/extensions/nfv/tests/data/tosca_helloworld_nfv.yaml')
            toscajson = json.loads(Util.yaml2json(yaml_object))
            return toscajson
        except Exception as e:
            # log.error('Exception in get descriptor template') #TODO(stefano) add logging
            print 'Exception in get descriptor template'
            return False
Example #28
0
def create_lun_in_service_profile(ucsm_ssh, param, lun):
    test_bed = str(param['test_bed_id'])
    chassis = str(param['chassis_id'])
    cartridge = str(param['cartridge_id'])
    server = str(param['server_id'])
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    
    for lun_order, lun_detail in lun.iteritems():
        param['tag_lun_order'] = str(lun_order)
        param['tag_disk_size'] = str(lun_detail['disk_size'])
        raid_level = lun_detail['raid_level']
        param['tag_disk_group_config_policy_name'] = raid_level_disk_group_config_policy_dict[raid_level]['policy_name']
        param['tag_local_lun'] = ''.join(['lun', chassis, cartridge, server]) + '_' + str(lun_order)    
        file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile_storage_lun.txt"   
        Util.run_text_step(ucsm_ssh, file_text_step, param)
Example #29
0
    def get_add_element(self, request):

        result = False

        group_id = request.POST.get('group_id')
        element_id = request.POST.get('element_id')
        element_type = request.POST.get('element_type')
        current_data = json.loads(self.data_project)
        tosca_nfv_definition = Util().loadyamlfile(PATH_TO_TOSCA_NFV_DEFINITION)
        node_types = {}
        node_types.update(tosca_nfv_definition['node_types'])
        new_element = {}
        new_element['type'] = element_type
        type_definition = node_types[element_type]
        while element_type in node_types:
            type_definition = node_types[element_type]
            if 'properties' in type_definition:
                for propriety in type_definition['properties']:
                    if 'required' not in type_definition['properties'][propriety] or \
                            type_definition['properties'][propriety]['required']:
                        if 'properties' not in new_element:
                            new_element['properties'] = {}
                        if propriety == 'version':
                            new_element['properties'][propriety] = 1.0
                        else:
                            new_element['properties'][propriety] = 'prova'
            element_type = type_definition['derived_from'] if 'derived_from' in type_definition else None
        if new_element['type'] == 'tosca.nodes.nfv.VNF':
            if 'imports' not in current_data['toscayaml'][group_id] or current_data['toscayaml'][group_id][
                'imports'] is None:
                current_data['toscayaml'][group_id]['imports'] = []
            current_data['toscayaml'][group_id]['imports'].append(element_id + '.yaml')
            vnf_template = Util().loadyamlfile(PATH_TO_DESCRIPTORS_TEMPLATES + 'vnf.yaml')
            vnf_template['topology_template']['subsititution_mappings'] = 'tosca.nodes.nfv.VNF.' + element_id
            vnf_template['topology_template']['node_templates'] = {}
            vnf_template['imports'] = []
            vnf_template['node_types']['tosca.nodes.nfv.VNF.' + element_id] = {}
            vnf_template['node_types']['tosca.nodes.nfv.VNF.' + element_id]['derived_from'] = 'tosca.nodes.nfv.VNF'
            current_data['toscayaml'][element_id] = vnf_template
        if 'node_templates' not in current_data['toscayaml'][group_id]['topology_template'] or current_data['toscayaml'][group_id]['topology_template']['node_templates'] is None:
            current_data['toscayaml'][group_id]['topology_template']['node_templates'] = {}
        current_data['toscayaml'][group_id]['topology_template']['node_templates'][element_id] = new_element

        self.data_project = current_data
        # self.validated = validate #TODO(stefano) not clear if this is the validation for the whole project
        self.update()
        result = True
        return result
    def create_descriptor(self, descriptor_name, type_descriptor, new_data, data_type):
        """Creates a descriptor of a given type from a json or yaml representation

        Returns the descriptor id or False
        """
        try:
            current_data = json.loads(self.data_project)
            if data_type == 'json':
                new_descriptor = json.loads(new_data)
            elif data_type == 'yaml':
                yaml_object = yaml.load(new_data)
                new_descriptor = json.loads(Util.yaml2json(yaml_object))
            elif data_type == 'click':
                new_descriptor = new_data
            elif data_type == 'k8s':
                new_descriptor = new_data
            elif data_type == 'resource':
                new_descriptor = new_data
            else:
                log.debug('Create descriptor: Unknown data type')
                return False

            validate = False
            new_descriptor_id = descriptor_name
            if type_descriptor not in current_data:
                current_data[type_descriptor] = {}
            current_data[type_descriptor][new_descriptor_id] = new_descriptor
            self.data_project = current_data
            self.validated = validate
            self.update()
            result = new_descriptor_id
        except Exception as e:
            log.exception(e)
            result = False
        return result
Example #31
0
File: olim.py Project: eaglebh/olim
 def infer(self, qtree):
     self.ud = UserDistribution(self.params['N'])
     self.wd = WordDistribution(self.params['N'])
     self.window = Window(self.params['N'])
     self.kl = KL(params['N'], self.population)
     for tweet in self.tweets.stream():
         if type(tweet) == type({}) and 'timestamp' in tweet:
             user = self.users.get(tweet['user_id'])
             if user != None:
                 cl = user['location']  # user who posts this tweet
                 words = set(
                     Util.get_nouns(tweet['text'], self.params['lang'])
                 )  # words contained in this tweet
                 if cl != None:
                     """ labeled user """
                     aid = qtree.get_area_id(cl)
                     if aid != None:
                         self.updateKL(user, qtree.get_area_id(cl), words)
                 else:
                     """ unlabeled user """
                     self.updateUD(user, words, self.params['dmin'])
     """ Location prediction using user distribution """
     for user in self.users.iter():
         if user['location'] == None:
             """ unlabeled user """
             ud = self.ud.get(user['id'])
             if ud != None:
                 """ at least one observation """
                 inferred_location_number = self.predict(ud)
                 inferred_location_coordinates = qtree.leaves[
                     inferred_location_number].center
                 user['location'] = inferred_location_coordinates
Example #32
0
    def extract_local_words_(self, tweets, params):
        lwords = {}
        word_counts = {}

        word_sets = {}
        for tweet in tweets:
            if not tweet['user_id'] in word_sets: word_sets[tweet['user_id']] = set([])
            words = Util.get_nouns(tweet['text'], params['lang'])
            word_sets[tweet['user_id']] |= set(words)

        for user_id in word_sets:
            user = self.users.get(user_id)
            if user != None:
                location = user['location_point']
                if location != None:
                    city = str(self.model.predict([location])[0])
                    for w in word_sets[user_id]:
                        if not w in word_counts: word_counts[w] = {}
                        if not city in word_counts[w]: word_counts[w][city] = 0
                        word_counts[w][city] += 1

        """ calculating divergences """
        for w in word_counts:
            N = float(sum([v for v in word_counts[w].values()]))
            if N >= params['cmin']:
                d = self.calc_divergence(N, word_counts[w], params)
                if self.check_divergence(d, params) == True:
                        lwords[w] = {'word':w, 'd':d, 'distribution':word_counts[w]} # save as dict
        return Words(lwords)
    def learn(self, params):
        tf = {} # term frequency in a location
        lf = {} # location frequency
        global_tf = {} # term frequency
        all_tf = 0.0

        for user in self.users.iter():
            location = user['location_point']
            if location != None:
                tweets = self.tweets.get(user['id'])
                user_words = set([])
                location = tuple(location)

                if not location in tf: tf[location] = {}
                if not location in lf: lf[location] = 0

                for tweet in tweets:
                    user_words |= set(Util.get_nouns(tweet['text'], params['lang']))

                for w in user_words:
                    if not w in tf[location]: tf[location][w] = 0
                    if not w in global_tf: global_tf[w] = 0
                    tf[location][w] += 1
                    global_tf[w] += 1
                    lf[location] += 1
                    all_tf += 1

        for w in global_tf.keys():
            if global_tf[w] < params['mincount']:
                del global_tf[w]
            else:
                global_tf[w] /= all_tf

        return {'tf':tf, 'global_tf':global_tf, 'lf':lf}
Example #34
0
 def update_user_distributions(self, tweets, tlwords, params):
     for tweet in tweets:
         neighbors = self.get_neighbors(tweet['user_id'], params)
         users = neighbors | set([tweet['user_id']])
         for user_id in users:
             user = self.users.get(user_id)
             if user != None:
                 if user['location_point'] == None:
                     """ unlabeled users """
                     if not user['id'] in self.user_distributions:
                         self.user_distributions[user['id']] = self.init_user_distribution()
                     words = Util.get_words(tweet['text'])
                     for w in words:
                         if tlwords.contain(w):
                             """ update using temporally-local word """
                             tlword = tlwords.get(w)
                             self.user_distributions[user['id']] = self.add_distribution(self.user_distributions[user['id']], tlword['distribution'])
                         if self.lwords.contain(w):
                             """ update using local word """
                             lword = self.lwords.get(w)
                             if params['divergence'] in ['l2', 'kl']:
                                 if lword['d'] >= params['dmin']:
                                     self.user_distributions[user['id']] = self.add_distribution(self.user_distributions[user['id']], lword['distribution'])
                             else:
                                 if lword['d'] <= params['dmin']:
                                     self.user_distributions[user['id']] = self.add_distribution(self.user_distributions[user['id']], lword['distribution'])
Example #35
0
    def extract_local_words_batch(self, params):
        lwords = {}
        word_counts = {}

        for user in self.users.iter():
            location = user['location_point']
            if location != None:
                city = str(self.model.predict([location])[0])
                tweets = self.tweets.get(user['id'])
                user_words = set([])
                for tweet in tweets:
                    user_words |= set(Util.get_words(tweet['text']))
                for w in user_words:
                    if not w in word_counts:
                        word_counts[w] = {city: 1}
                    elif not city in word_counts[w]:
                        word_counts[w][city] = 1
                    else:
                        word_counts[w][city] += 1

        """ calculating divergences """
        for w in word_counts:
            N = float(sum([v for v in word_counts[w].values()]))
            if N >= params['cmin']:
                d = self.calc_divergence(N, word_counts[w], params)
                if self.check_divergence(d, params) == True:
                        lwords[w] = {'word':w, 'd':d, 'distribution':word_counts[w]} # save as dict
        return Words(lwords)
Example #36
0
    def __init__(self, ripper, queue):
        Thread.__init__(self)

        self._end_of_track = Event()
        self._queue        = queue
        self._ripper       = ripper
        self._util         = Util(self._queue)
    def _is_valid_padding(self, requested_url):  # TODO rename
        request_sent = False
        cnt = 0
        while not request_sent:
            try:
                self._logger.debug('Requesting: %s', requested_url)
                cnt = cnt + 1
                if cnt > Constants.MAX_RETRIES:
                    self._logger.error("Max retries exceeded. Stopping...")
                    sys.exit(-1)

                response = requests.get(requested_url, timeout=Constants.NETWORK_TIMEOUT, verify=False, allow_redirects=False)
                request_sent = True

            except (socket.error, requests.exceptions.RequestException):
                self._logger.exception('Retrying request in %.2f seconds...', Constants.DEFAULT_WAIT_TIME)
                time.sleep(Constants.DEFAULT_WAIT_TIME)
                continue

        content = response.text
        self._logger.debug("Response content: %s", content)

        padding_error = Util.has_padding_error(response=response,
                                               success_msg=self._padding_success_msg,
                                               fail_msg=self._padding_fail_msg, logger=self._logger)
        return not padding_error
Example #38
0
    def create_descriptor(self, descriptor_name, type_descriptor, new_data,
                          data_type):
        """Creates a descriptor of a given type from a json or yaml representation

        Returns the descriptor id or False
        """
        try:
            current_data = json.loads(self.data_project)
            if data_type == 'json':
                new_descriptor = json.loads(new_data)
            elif data_type == 'yaml':
                yaml_object = yaml.load(new_data)
                new_descriptor = json.loads(Util.yaml2json(yaml_object))
            else:
                log.debug('Create descriptor: Unknown data type')
                return False

            # schema = cls.loadjsonfile("lib/cran/schemas/"+type_descriptor+".json")
            #reference_schema = self.get_json_schema_by_type(type_descriptor)
            # validate = Util.validate_json_schema(reference_schema, new_descriptor)
            validate = False
            new_descriptor_id = descriptor_name
            if not type_descriptor in current_data:
                current_data[type_descriptor] = {}
            current_data[type_descriptor][new_descriptor_id] = new_descriptor
            self.data_project = current_data
            self.validated = validate
            self.update()
            result = new_descriptor_id
        except Exception as e:
            log.exception(e)
            result = False
        return result
Example #39
0
    def extract_local_words(self, tweets, params):
        lwords = {}
        word_counts = {}

        """ making user sets """
        user_sets = {}
        for tweet in tweets:
            words = Util.get_words(tweet['text'])
            for w in words:
                if not w in user_sets: user_sets[w] = set([])
                user_sets[w].add(tweet['user_id'])

        """ making word distributions """
        for w in user_sets:
            for user_id in user_sets[w]:
                user = self.users.get(user_id)
                if user != None:
                    location = user['location_point']
                    if location != None:
                        """ labeled user """
                        if not w in word_counts: word_counts[w] = {}
                        city = str(self.model.predict([location])[0])
                        if not city in word_counts[w]: word_counts[w][city] = 0
                        word_counts[w][city] += 1

        """ calculating divergences """
        for w in word_counts:
            N = float(sum([v for v in word_counts[w].values()]))
            if N >= params['cmin']:
                d = self.calc_divergence(N, word_counts[w], params)
                if self.check_divergence(d, params) == True:
                        lwords[w] = {'word':w, 'd':d, 'distribution':word_counts[w]} # save as dict
        return Words(lwords)
Example #40
0
    def extract_local_words_batch(self, params):
        lwords = {}
        word_counts = {}

        for user in self.users.iter():
            location = user['location_point']
            if location != None:
                tweets = self.tweets.get(user['id'])
                user_words = set([])
                city = str(tuple(location))
                for tweet in tweets:
                    user_words |= set(Util.get_nouns(tweet['text'], params['lang']))
                for w in user_words:
                    if not w in word_counts:
                        word_counts[w] = {city: 1}
                    elif not city in word_counts[w]:
                        word_counts[w][city] = 1
                    else:
                        word_counts[w][city] += 1

        """ calculating divergences """
        for w in word_counts:
            N = float(sum([v for v in word_counts[w].values()]))
            if N >= params['cmin']:
                d = self.calc_dispersion(word_counts[w], params)
                if d < params['dmax']:
                        lwords[w] = {'word':w, 'd':d, 'distribution':word_counts[w]} # save as dict
        return Words(lwords)
Example #41
0
    def get_descriptor_template(cls, type_descriptor):
        """Returns a descriptor template for a given descriptor type"""

        try:
            #schema = Util.loadjsonfile(PATH_TO_DESCRIPTORS_TEMPLATES+type_descriptor+DESCRIPTOR_TEMPLATE_SUFFIX)
            # print 'type_descriptor : '+type_descriptor
            #FixMe bisogna creare un template
            yaml_object = Util().loadyamlfile(
                'usecases/TOSCA/One-Server-Three-Networks/YAML/tosca_one_server_three_networks.yaml'
            )
            toscajson = json.loads(Util.yaml2json(yaml_object))
            return toscajson
        except Exception as e:
            # log.error('Exception in get descriptor template') #TODO(stefano) add logging
            print 'Exception in get descriptor template'
            return False
Example #42
0
File: olim.py Project: eaglebh/olim
    def infer(self, qtree):
        self.ud = UserDistribution(self.params['N'])
        self.wd = WordDistribution(self.params['N'])
        self.window = Window(self.params['N'])
        self.kl = KL(params['N'], self.population)
        for tweet in self.tweets.stream():
            if type(tweet) == type({}) and 'timestamp' in tweet:
                user = self.users.get(tweet['user_id'])
                if user != None:
                    cl = user['location'] # user who posts this tweet
                    words = set(Util.get_nouns(tweet['text'], self.params['lang'])) # words contained in this tweet
                    if cl != None:
                        """ labeled user """
                        aid = qtree.get_area_id(cl)
                        if aid != None:
                            self.updateKL(user, qtree.get_area_id(cl), words)
                    else:
                        """ unlabeled user """
                        self.updateUD(user, words, self.params['dmin'])

        """ Location prediction using user distribution """
        for user in self.users.iter():
            if user['location'] == None:
                """ unlabeled user """
                ud = self.ud.get(user['id'])
                if ud != None:
                    """ at least one observation """
                    inferred_location_number = self.predict(ud)
                    inferred_location_coordinates = qtree.leaves[inferred_location_number].center
                    user['location'] = inferred_location_coordinates
Example #43
0
    def create_descriptor(self, descriptor_name, type_descriptor, new_data, data_type):
        """Creates a descriptor of a given type from a json or yaml representation

        Returns the descriptor id or False
        """
        try:
            # utility = Util()
            current_data = json.loads(self.data_project)
            if data_type == 'json':
                new_descriptor = json.loads(new_data)
            elif data_type == 'yaml':
                # utility = Util()
                yaml_object = yaml.load(new_data)
                new_descriptor = json.loads(Util.yaml2json(yaml_object))
            else:
                log.debug('Create descriptor: Unknown data type')
                return False

            # schema = cls.loadjsonfile("lib/etsi/schemas/"+type_descriptor+".json")
            reference_schema = self.get_json_schema_by_type(type_descriptor)
            # validate = Util.validate_json_schema(reference_schema, new_descriptor)
            validate = False
            new_descriptor_id = new_descriptor['vnfdId'] if type_descriptor != "nsd" else new_descriptor[
                'nsdIdentifier']
            if not type_descriptor in current_data:
                current_data[type_descriptor] = {}
            current_data[type_descriptor][new_descriptor_id] = new_descriptor
            self.data_project = current_data
            self.validated = validate  # TODO(stefano) not clear if this is the validation for the whole project
            self.update()
            result = new_descriptor_id
        except Exception as e:
            log.exception(e)
            result = False
        return result
    def importprojectdir(cls, dir_project, file_type):
        """Imports all descriptor files under a given folder

        this method is specific for Superfluidity project type
        """

        project = {
            'nsd': {},
            'vnfd': {},
            'click': {},
            'k8s': {},
            'resource': {},
            'positions': {}
        }

        nfv_path = dir_project + "/NFV/"
        etsi_project = EtsiParser.importprojectdir(nfv_path + '/JSON', 'json')
        #print etsi_project
        project['nsd'] = etsi_project['nsd']
        project['vnfd'] = etsi_project['vnfd']
        project['click'] = click_parser.importprojectdir(
            dir_project + '/CLICK/', 'click')['click']
        # FIXME import k8s descriptors
        project['k8s'] = cls.import_kubernetes_from_dir_project(dir_project)

        for vertices_file in glob.glob(os.path.join(dir_project, '*.json')):
            if os.path.basename(vertices_file) == 'vertices.json':
                project['positions']['vertices'] = Util.loadjsonfile(
                    vertices_file)

        return project
Example #45
0
    def get_dataproject(self):
        """ Return the python dict representation of the project data

        """
        #current_data = json.loads(self.data_project)
        current_data = Util.json_loads_byteified(self.data_project)

        return current_data
Example #46
0
 def _send_delete(self, url, params=None, **kwargs):
     try:
         r = requests.delete(url, params=None, verify=False, **kwargs)
     except Exception as e:
         log.exception(e)
         print "Exception during send DELETE"
         return {'error': 'error during connection to agent'}
     return Util.json_loads_byteified(r.text)
    def translate_push_ns_on_repository(self, translator, nsd_id, repository, **kwargs):
        ns_data = self.get_all_ns_descriptors(nsd_id)
        if translator == 'k8sansible':
            ansible_util = AnsibleUtility()
            playbooks_path = kwargs['repo_path'] + '/project_' + str(self.id) + '/' + nsd_id + '/'
            Util.writejsonfile('/tmp/testing', ns_data)
            ansible_util.generate_playbook(ns_data, nsd_id,playbooks_path)

        elif translator == 'sf2heat':
            hot_path = kwargs['repo_path'] + '/project_' + str(self.id) + '/' + nsd_id + '_hot'
            if not os.path.isdir(hot_path):
                os.makedirs(hot_path)
            nsd_translator = NSDTranslator(ns_data, hot_path, {'app_name': nsd_id, 'cloud_config_name': ns_data['nsd'][nsd_id]['nsdName']})
            nsd_translator.translate()
        commit_msg = kwargs['commit_msg'] if (
        'commit_msg' in kwargs and kwargs['commit_msg'] != '') else 'update project_' + str(self.id) + ' nsd:' + nsd_id
        push_result = repository.push_repository(msg=commit_msg)
        return push_result
Example #48
0
    def get_descriptor_template(cls, type_descriptor):
        """Returns a descriptor template for a given descriptor type"""

        try:
            schema = Util.loadjsonfile(os.path.join(PATH_TO_DESCRIPTORS_TEMPLATES, type_descriptor + DESCRIPTOR_TEMPLATE_SUFFIX))
            # print 'type_descriptor : '+type_descriptor
            return schema
        except Exception as e:
            log.exception(e)
            return False
Example #49
0
def create_service_profile(ucsm_ssh, param):
    test_bed    = str(param['test_bed_id'])
    chassis     = str(param['chassis_id'])
    cartridge   = str(param['cartridge_id'])
    server      = str(param['server_id'])
    
    chassis_id = str(chassis).zfill(2)
    cartridge_id = str(cartridge).zfill(2)
    server_id = str(server).zfill(2)
    
    server_full_id_list = [chassis_id, cartridge_id, server_id]
    server_full_id = ''.join(server_full_id_list)
    
    param['tag_service_profile_name'] = get_service_profile_name(chassis, cartridge, server)
    param['tag_uuid'] = ''.join([param['uuid_prefix'], server_full_id])
    
    # pprint.pprint(param)
    
    file_text_step = Define.PATH_SNIC_TEXT_UCSM + "service_profile.txt"   
    Util.run_text_step(ucsm_ssh, file_text_step, param)
 def calc_error_distances(self, inferred_users):
     error_distances = []
     for test_user in self.test_users.iter():
         true_point = test_user['location_point']
         inferred_user = inferred_users.get(test_user['id'])
         if inferred_user != None:
             inferred_point = inferred_user['location_point']
             if inferred_point != None:
                 error_distance = Util.hubeny_distance(inferred_point, true_point)
                 error_distances.append(error_distance)
     return error_distances
Example #51
0
def start_pingpong_server(q, server_ip, server_usnic):
    server_node_name = Util.find_node_name_by_ip("bcnode", server_ip)
    print "\n------------> server: " + server_node_name
    server_node = NodeCompute(server_node_name)
    ssh = server_node.get_ssh()
    if not ssh:
        print "Failed to ssh to server " + server_node_name
        q.put(False)
    else:
        server_ret = server_node.start_pingpong_server(server_usnic)
        q.put(server_ret)
Example #52
0
def start_pingpong_client(client_ip, client_usnic, server_ip):
    client_node_name = Util.find_node_name_by_ip("bcnode", client_ip)
    print "\n------------> client: " + client_node_name
    node = NodeCompute(client_node_name)
    ssh = node.get_ssh()
    if not ssh:
        print "Failed to ssh to client " + client_node_name
        return False
    else:
        ret = node.start_pingpong_client(usnic, server_ip)
        return ret
Example #53
0
    def get_add_element(self, request):
        result = False

        group_id = request.POST.get('group_id')
        element_id = request.POST.get('element_id')
        element_type = request.POST.get('element_type')
        x = request.POST.get('x')
        y = request.POST.get('y')
        current_data = json.loads(self.data_project)
        tosca_definition = Util().loadyamlfile(PATH_TO_TOSCA_DEFINITION)
        node_types = {}
        node_types.update(tosca_definition['node_types'])
        new_element = {}
        new_element['type'] = element_type
        type_definition = node_types[element_type]
        while element_type in node_types:
            type_definition = node_types[element_type]
            if 'properties' in type_definition:
                for propriety in type_definition['properties']:
                    if 'required' not in type_definition['properties'][
                            propriety] or type_definition['properties'][
                                propriety]['required']:
                        if 'properties' not in new_element:
                            new_element['properties'] = {}
                        if propriety == 'version':
                            new_element['properties'][propriety] = 1.0
                        else:
                            if type_definition['properties'][propriety][
                                    'type'] == 'scalar-unit.size':
                                new_element['properties'][propriety] = '1 MB'
                            else:
                                new_element['properties'][propriety] = 'prova'
            element_type = type_definition[
                'derived_from'] if 'derived_from' in type_definition else None
        if 'node_templates' not in current_data['toscayaml'][group_id][
                'topology_template'] or current_data['toscayaml'][group_id][
                    'topology_template']['node_templates'] is None:
            current_data['toscayaml'][group_id]['topology_template'][
                'node_templates'] = {}
        current_data['toscayaml'][group_id]['topology_template'][
            'node_templates'][element_id] = new_element
        if 'positions' not in current_data:
            current_data['positions'] = {}
        if 'vertices' not in current_data['positions']:
            current_data['positions']['vertices'] = {}
        if element_id not in current_data['positions']['vertices']:
            current_data['positions']['vertices'][element_id] = {}
        current_data['positions']['vertices'][element_id]['x'] = x
        current_data['positions']['vertices'][element_id]['y'] = y
        self.data_project = current_data
        # self.validated = validate #TODO(stefano) not clear if this is the validation for the whole project
        self.update()
        result = True
        return result
Example #54
0
    def __init__(self, *a, **kw):
        Jukebox.__init__(self, *a, **kw)

        self._json_queue = JsonQueue()
        self._util       = Util(self._json_queue)

        self._ripper_thread = RipperThread(self, self._json_queue)
        self._end_of_track = self._ripper_thread.get_end_of_track()
        self.set_ui_thread(self._ripper_thread)

        self.session.set_preferred_bitrate(1) # 320 kbps (ostensibly)
Example #55
0
    def create_descriptor(self, descriptor_name, type_descriptor, new_data,
                          data_type):
        """Creates a descriptor of a given type from a json or yaml representation

        Returns the descriptor id or False
        """
        result = False
        try:
            print type_descriptor, data_type
            current_data = json.loads(self.data_project)
            if data_type == 'json':
                new_descriptor = json.loads(new_data)
            elif data_type == 'yaml':
                # utility = Util()
                yaml_object = yaml.load(new_data)
                new_descriptor = json.loads(Util.yaml2json(yaml_object))
            else:
                print 'Unknown data type'
                return False

            if type_descriptor == 'toscayaml':

                if descriptor_name is None:
                    new_descriptor_id = Util.get_unique_id()
                else:
                    new_descriptor_id = descriptor_name
                if not type_descriptor in current_data:
                    current_data[type_descriptor] = {}
                current_data[type_descriptor][
                    new_descriptor_id] = new_descriptor
                self.data_project = current_data
                #self.validated = validate #TODO(stefano) not clear if this is the validation for the whole project
                self.update()
                result = new_descriptor_id

            else:
                return False

        except Exception as e:
            print 'Exception in create descriptor', e
        return result
Example #56
0
def importprojectfile(cfg_files):
    project = {'click': {}}

    for file in cfg_files:
        if os.path.basename(str(file.name)) == 'vertices.json':
            print 'dentrpp ', str(file)
            project['positions'] = {}
            project['positions']['vertices'] = Util.loadjsonfile(file)
        else:
            project['click'][os.path.splitext(os.path.basename(
                str(file)))[0]] = file.read()
    return project
Example #57
0
def query_summoner(summoner_name):
    if request.method == "GET":
        return render_template("index.html", summoner=summoner_name)
    else:
        mastery_data = data_analyzer.get_summoner_mastery_info(summoner_name)
        main_roles = data_analyzer.get_main_role_analysis(mastery_data)
        current_game_data = data_analyzer.get_current_game_data(summoner_name)
        return Util.json_dump({
            "mastery": mastery_data,
            "main_role": main_roles,
            "current_game": current_game_data
        })
Example #58
0
 def node_info(self, deployment_id, node_id):
     log.debug("get node info cRAN")
     log.debug("get node info")
     url = self.agent['base_url'] + "/deployments/" + str(
         deployment_id) + "/node/" + str(node_id)
     r = requests.get(url).json()
     # WARNING: expect openvim data
     if 'node_info' in r:
         splitted = r['node_info'].splitlines(True)[1:]
         joined = "".join(splitted)
         r['node_info'] = Util.yaml2json(yaml.load(joined))
     return r
    def importprojectfiles(cls, file_dict):
        """Imports descriptors (extracted from the new project POST)

        The keys in the dictionary are the file types
        """
        project = {'nsd': {}, 'vnfd': {}, 'click': {}, 'k8s': {}}
        for desc_type in project:
            if desc_type in file_dict:
                files_desc_type = file_dict[desc_type]
                for file in files_desc_type:
                    if desc_type != 'k8s':
                        project[desc_type][os.path.splitext(
                            file.name)[0]] = json.loads(file.read())
                    else:
                        yaml_object = Util().loadyamlfile(file)
                        json_object = Util.json_loads_byteified(
                            Util.yaml2json(yaml_object))
                        filename = os.path.splitext(os.path.basename(
                            str(file)))[0]
                        project[desc_type][filename] = json_object

        return project
Example #60
0
    def get_graph_model(cls, file_path):
        """Returns the model of the graph of the project type as a yaml object

        Returns an empty dict if there is no file with the model
        """
        # file_path = GRAPH_MODEL_FULL_NAME
        graph_model = {}
        try:
            graph_model = Util.loadyamlfile(file_path)
        except Exception as e:
            log.exception(e)
            pass
        return graph_model