def list(): headers = ['Group', 'Type', 'Type Name', 'Service', 'Port', 'Description'] keys = [ 'svc_group', 'svc_type', 'svc_type_name', 'service', 'port', 'description' ] sql = "SELECT svc_group, svc_type, svc_type_name, service, \n" + \ " port, image_file, project_url, description \n" + \ " FROM v_services" data = cloud.exec_sql_list(sql) l_svc = [] for d in data: dict = {} dict['svc_group'] = str(d[0]) dict['svc_type'] = str(d[1]) dict['svc_type_name'] = str(d[2]) dict['service'] = str(d[3]) dict['port'] = str(d[4]) dict['image_file'] = str(d[5]) dict['project_url'] = str(d[6]) dict['description'] = str(d[7]) l_svc.append(dict) util.print_list(headers, keys, l_svc) return
def list_regions(provider=None, country=None, metro=None): headers = ['Country', 'Area', 'Metro Name', 'Metro', 'Provider', 'Region'] keys = ['country', 'area', 'metro_name', 'metro', 'provider', 'region'] where = "1 = 1" if provider: where = where + " AND provider = '" + provider + "'" if country: where = where + " AND country = '" + country + "'" if metro: where = where + " AND metro = '" + metro + "'" sql = "SELECT country, area, metro_name, metro, provider, region" + \ " FROM v_regions WHERE " + where data = exec_sql_list(sql) l_rgn = [] for d in data: dict = {} dict['country'] = str(d[0]) dict['area'] = str(d[1]) dict['metro_name'] = str(d[2]) dict['metro'] = str(d[3]) dict['provider'] = str(d[4]) dict['region'] = str(d[5]) l_rgn.append(dict) util.print_list(headers, keys, l_rgn) return
def list_providers(status=None): headers = ['Type', 'Provider', 'Short Name', 'Display Name', 'Status'] keys = [ 'provider_type', 'provider', 'short_name', 'display_name', 'status' ] if status == None: where = "1 = 1" else: where = "status = '" + status + "'" sql = "SELECT provider, provider_type, sort_order, status, \n" + \ " short_name, disp_name \n" + \ " FROM providers WHERE " + where + " ORDER BY 2, 3" data = exec_sql_list(sql) l_prov = [] for d in data: dict = {} dict['provider'] = str(d[0]) dict['provider_type'] = str(d[1]) dict['status'] = str(d[3]) dict['short_name'] = str(d[4]) dict['display_name'] = str(d[5]) l_prov.append(dict) util.print_list(headers, keys, l_prov) return
def scrape_company_url(results, browser, leads_to_filter): """ The Angarita automation :return: """ for profile, row in results.iterrows(): if row[COMPANY_URL]: try: print(f'browser.get({row[COMPANY_URL]}), ...') browser.get('http://www.' + row[COMPANY_URL]) html = util.get_html(browser) emails = util.get_list_from_print( results.loc[profile, EMAILS]) + util.get_patterns( util.EMAIL_REGEX, html) emails = util.filter_emails(emails) phones = util.get_list_from_print( results.loc[profile, PHONES]) + util.get_patterns( util.PHONE_REGEX, html) phones = util.filter_phones(phones) results.loc[profile, EMAILS] = util.print_list(emails) results.loc[profile, PHONES] = util.print_list(phones) except WebDriverException: print(f'failed to load {row[COMPANY_URL]}, continuing...') save_leads_to_excel(results, leads_to_filter)
def describe(cloud_name, machine_id, print_list=True): provider, xxx, region, default_ssh_key, cloud_keys = cloud.read( cloud_name, True) svr, name, size, state, location, private_ip, \ public_ip, key_name, vcpus, volumes \ = get_describe_data(provider, machine_id, region, cloud_keys) if state == '' or state == None: return (None) headers = ['Name', 'Size', 'State', 'Location', 'PublicIp', 'Id'] keys = ['name', 'size', 'state', 'location', 'public_ip', 'id'] jsonList = [] dict = {} dict["name"] = name dict["id"] = machine_id dict["size"] = size dict["state"] = state dict["location"] = location dict["private_ip"] = private_ip dict["public_ip"] = public_ip dict["key_name"] = key_name dict["vcpus"] = str(vcpus) dict["volumes"] = volumes jsonList.append(dict) if print_list: util.print_list(headers, keys, jsonList) return return (dict)
def list_images(): headers = [ 'OS', 'Image Type', 'DispName', 'Provider', 'Region', 'Platform', 'IsDefault', 'ImageID' ] keys = [ 'os', 'image_type', 'disp_name', 'provider', 'region', 'platform', 'is_default', 'image_id' ] sql = "SELECT os, image_type, disp_name, provider, region, platform, is_default, image_id \n" + \ " FROM v_images" data = exec_sql_list(sql) l_img = [] for d in data: dict = {} dict['os'] = str(d[0]) dict['image_type'] = str(d[1]) dict['disp_name'] = str(d[2]) dict['provider'] = str(d[3]) dict['region'] = str(d[4]) dict['platform'] = str(d[5]) dict['is_default'] = str(d[6]) dict['image_id'] = str(d[7]) l_img.append(dict) util.print_list(headers, keys, l_img) return
def list_sizes(cloud_name): driver = cloud.get_cloud_driver(cloud_name) if driver == None: return try: sizes = driver.list_sizes() except Exception as e: util.message(str(e), 'error') return headers = [ 'Family', 'Size', 'RAM (MB)', 'Disk (GB)', 'Bandwidth', 'Price (USD/Mo)' ] keys = ['family', 'size', 'ram', 'disk', 'bandwidth', 'price'] jsonList = [] for size in sizes: if size.disk == 0: continue sz = size.name sz_split = sz.split(".") if len(sz_split) < 2: family = "" szz = size.name else: family = sz_split[0] szz = sz_split[1] sizeDict = {} sizeDict['family'] = family sizeDict['size'] = szz sizeDict['ram'] = str(size.ram) sizeDict['disk'] = str(size.disk) if size.price == 0.0: sizeDict['price'] = "" else: sizeDict['price'] = str(round(size.price * 720)) if size.bandwidth == None: sizeDict['bandwidth'] = "" else: sizeDict['bandwidth'] = str(size.bandwidth) jsonList.append(sizeDict) util.print_list(headers, keys, jsonList) return
def group_list(cloud_name, group_name=None, data_only=False): provider, xxx, region, default_ssh_key, cloud_keys = cloud.read(cloud_name, True) if provider == "aws": gl = group_list_aws(region, cloud_keys, group_name) else: gl = group_list_openstack(region, cloud_keys, group_name) if data_only == True: return(gl) headers = ['ID', 'Cidr', 'Port', 'Name'] keys = ['id', 'cidr', 'port', 'name'] util.print_list(headers, keys, gl) return
def do_combine_landscape_process(self, dest_file_name="final.jpg"): final = os.path.join(self.destDirectory, dest_file_name) chosen_images, image_aspect = self.find_images_to_combine() self.substitute_data(chosen_images) if not self.acceptable_difference(image_aspect): return "" try: self.combine_images(final) except Exception: print_list(self.selectedImages) raise stat_file_path = os.path.join(self.destDirectory, "tempStat.txt") self.write_image_statistics(stat_file_path) return final
def process_packet(packet_json_str): # convert packet string to JSON packet = json.loads(packet_json_str) if packet['opcode'] == 'WELCOME': print(packet['data']) elif packet['opcode'] == 'CREATE_ROOM_RES': print(f"#[{packet['roomname']}] room created!") elif packet['opcode'] == 'JOIN_ROOM_RES': print( f"#[{packet['roomname']}] <{packet['username']}> joined the room.") elif packet['opcode'] == 'LEAVE_ROOM_RES': print(f"#[{packet['roomname']}] <{packet['username']}> left the room.") elif packet['opcode'] == 'LIST_USERS_RES': if len(packet['data']): print_list(title=f"#{packet['roomname']} users", list_to_print=packet['data']) else: print(f"*** No users currently in room {packet['roomname']}! ***") elif packet['opcode'] == 'LIST_ROOMS_RES': if len(packet['data']): print_list(title="Chatrooms", list_to_print=packet['data']) else: print("*** No chatrooms created! ***") elif packet['opcode'] == 'TELL_MSG': print( f"#[{packet['roomname']}] <{packet['username']}>: {packet['data']}" ) elif packet['opcode'] == 'TELL_PVT_MSG': print(f"#[Private Msg] <{packet['username']}>: {packet['data']}") elif packet['opcode'] == 'ERROR': print(f"*** {packet['data']} ***") elif packet['opcode'] == 'DISCONNECT': raise ExitIRCApp()
def list_flavors(provider=None, family=None, flavor=None, size=None): keys = [ 'provider', 'family', 'flavor', 'size', 'v_cpu', 'mem_gb', 'das_gb', 'price_hr' ] headers = [ 'Provider', 'Family', 'Flavor', 'Size', 'vCPU', 'Mem GB', 'DAS GB', 'Price/hr' ] where = "1 = 1" if provider: where = where + " AND provider = '" + provider + "'" if family: where = where + " AND family = '" + family + "'" if flavor: where = where + " AND flavor = '" + flavor + "'" if size: where = where + " AND size = '" + size + "'" sql = "SELECT provider, family, flavor, size, v_cpu, mem_gb, das_gb, price_hr \n" + \ " FROM flavors WHERE " + where + " ORDER BY provider, v_cpu" data = exec_sql_list(sql) l_flv = [] for d in data: dict = {} dict['provider'] = d[0] dict['family'] = d[1] dict['flavor'] = d[2] dict['size'] = d[3] dict['v_cpu'] = d[4] dict['mem_gb'] = d[5] dict['das_gb'] = d[6] dict['price_hr'] = d[7] l_flv.append(dict) util.print_list(headers, keys, l_flv) return
def list_cloud_keys(cloud_name): try: driver = cloud.get_cloud_driver(cloud_name) kk = driver.list_key_pairs() except Exception as e: util.fatal_error(str(e)) headers = ['Name'] keys = ['name'] jsonList = [] for key in kk: dict = {} dict['name'] = key.name jsonList.append(dict) util.print_list(headers, keys, jsonList) return
def list(cloud_name): driver = cloud.get_cloud_driver(cloud_name) if driver == None: return try: nds = driver.list_nodes() except Exception as e: util.message(str(e), 'error') return headers = ['Name', 'ID', 'State', 'Public IP', 'Private IP'] keys = ['name', 'id', 'state', 'public_ip', 'private_ip'] jsonList = [] for nd in nds: ndDict = {} ndDict['id'] = str(nd.id) ndDict['name'] = str(nd.name) ndDict['state'] = str(nd.state) if len(nd.public_ips) >= 1: ndDict['public_ip'] = str(nd.public_ips[0]) else: ndDict['public_ip'] = "" if len(nd.private_ips) >= 1: ndDict['private_ip'] = str(nd.private_ips[0]) else: if len(nd.public_ips) >= 1: ndDict['private_ip'] = str(nd.public_ips[0]) else: ndDict['private_ip'] = "" jsonList.append(ndDict) util.print_list(headers, keys, jsonList) return
def read(cloud_name=None, data_only=False): headers = ['Provider', 'Name', 'Region', 'Default SSH Key'] keys = ['provider', 'name', 'region', 'default_ssh_key'] where = "" if cloud_name: where = "WHERE name = '" + cloud_name + "'" sql = "SELECT provider, name, region, default_ssh_key, keys \n" + \ " FROM clouds " + where + " ORDER BY 1, 2" data = exec_sql_list(sql) if data_only: for d in data: if d[3] == None: default_ssh_key = "" else: default_ssh_key = str(d[3]) return str(d[0]), str(d[1]), str(d[2]), default_ssh_key, str(d[4]) return None jsonList = [] for d in data: if d[3] == None: default_ssh_key = "" else: default_ssh_key = str(d[3]) dict = {} dict['provider'] = str(d[0]) dict['name'] = str(d[1]) dict['region'] = str(d[2]) dict['default_ssh_key'] = default_ssh_key dict['keys'] = str(d[4]) jsonList.append(dict) util.print_list(headers, keys, jsonList) return
def list(): headers = ['Name', 'UserName', 'PemFile', 'Updated UTC '] keys = ['name', 'username', 'pem_file', 'updated_utc'] sql = "SELECT name, username, pem_file, updated_utc \n" + \ " FROM keys ORDER BY 1" data = cloud.exec_sql_list(sql) lst = [] for d in data: dict = {} dict['name'] = str(d[0]) dict['username'] = str(d[1]) dict['pem_file'] = str(d[2]) dict['updated_utc'] = str(d[3]) lst.append(dict) util.print_list(headers, keys, lst) return
def list_locations(provider=None, country=None, metro=None): headers = [ 'Country', 'Area', 'Metro', 'Provider', 'Region', 'Location', 'Is Pref' ] keys = [ 'country', 'area', 'metro', 'provider', 'region', 'location', 'is_preferred' ] where = "1 = 1" if provider: where = where + " AND provider = '" + provider + "'" if country: where = where + " AND country = '" + country + "'" if metro: where = where + " AND metro = '" + metro + "'" sql = "SELECT country, area, metro, provider, \n" + \ " region, location, is_preferred \n" + \ " FROM v_locations WHERE " + where data = exec_sql_list(sql) l_lcn = [] for d in data: dict = {} dict['country'] = str(d[0]) dict['area'] = str(d[1]) dict['metro'] = str(d[2]) dict['provider'] = str(d[3]) dict['region'] = str(d[4]) dict['location'] = str(d[5]) dict['is_preferred'] = str(d[6]) l_lcn.append(dict) util.print_list(headers, keys, l_lcn) return
import cpu import mem import net import load import util if __name__ == '__main__': cpu_info = cpu.CPUInfo() cpu_usage = cpu.CPUUsage() mem_info = mem.MemInfo() net_info = net.NetStat() load_info = load.LoadStat() util.print_list(cpu_info) util.print_list(cpu_usage) util.print_list(mem_info) util.print_list(net_info) util.print_list(load_info)
def eventlines_to_particlesdict(lines, \ particles_naming_function = particles_pdg_naming): """Takes event lines from LHE file and returns dictionary of particles {particle_name: particle}. """ particles_list = parse_event(lines) particles_dict = particles_list_to_dictionary(particles_list,\ particles_naming_function) return particles_dict if __name__ == "__main__": try: inpath = sys.argv[1] except: print "Input file path expected!" sys.exit(-1) lhe = LHELoader(open(inpath)) for eventlines in lhe.yield_events(): particles = eventlines_to_particlesdict(eventlines) util.print_list(eventlines, "LINES:") util.print_dict(particles, "PARTICLES:") print "--------------------------------------" print "header =", lhe.header print "footer =", lhe.footer print "events_counter =", lhe.events_counter print "len(events) =", len(events) print "events = ", events[:2], "...", events[-1]
from __future__ import absolute_import from __future__ import unicode_literals from delete_duplicate import deleteDups, deleteDupsNonbuffer from util import make_linked_list, print_list, make_linked_list_number from kth_last import printKthToLast, kthToLast, nthToLast from delete_node import deleteNode from partition import partition, partion_short_code from adding import addLists, addlistsSecond from palindrome import is_palindrome, isPalinedromeSecond from find_intersection import find_inter_section if __name__ == "__main__": linkedlist = make_linked_list() print_list(linkedlist) deleteDups(linkedlist) print('-----Delete Duplicate----') print_list(linkedlist) print('-----Retry----') linkedlist = make_linked_list() deleteDupsNonbuffer(linkedlist) print('-----Delete Duplicate Non buffer----') print_list(linkedlist) print('-----Linked list last node----') linkedlist = make_linked_list() printKthToLast(linkedlist, 2) print(kthToLast(linkedlist, 2)) print(kthToLast(linkedlist, 3))
else: for unit, rate in zip(unit_list, rate_list): tmp_value = float(value)/rate if (tmp_value >= 0 and tmp_value < 1024) or (unit_list.index(unit) == len(unit_list)-1): return {'volume':round(tmp_value, 2), 'unit':unit} def getSample(self): mem_info = OrderedDict() try: if util.is_exist('/proc/meminfo'): #close file is unnecessary with open('/proc/meminfo') as f: for line in f: tmp = line.split(':') if len(tmp) == 2: vol_unit = tmp[1].strip().split(' ') if len(vol_unit) == 2: tmp_value = self._changeUnit(value=long(vol_unit[0]), force_unit='MB') elif len(vol_unit) == 1: tmp_value = {'volume':long(long(vol_unit[0])), 'unit':''} mem_info[tmp[0].strip()] = tmp_value except: print "Unexpected error:", sys.exc_info()[1] finally: return mem_info if __name__=='__main__': mem = MemInfoPollster() util.print_list(mem.getSample())
disk_usage[dev_short] = {} disk_usage[dev_short]['mnt'] = item['mnt'] disk_usage[dev_short]['fstype'] = item['fstype'] disk_usage[dev_short]['dev'] = item['dev'] disk_usage[dev_short]['available'] = self._changeUnit(value=usg['available'], force_unit='GB') total_available += disk_usage[dev_short]['available']['volume'] disk_usage[dev_short]['used'] = round(usg['used'], 4) disk_usage[dev_short]['capacity'] = self._changeUnit(value=usg['capacity'], force_unit='GB') total_capacity += disk_usage[dev_short]['capacity']['volume'] disk_usage[dev_short]['free'] = self._changeUnit(value=usg['free'], force_unit='GB') total_free += disk_usage[dev_short]['free']['volume'] if disk_io.has_key(dev_short): disk_usage[dev_short]['io_stat'] = disk_io[dev_short] disk_usage['total_available'] = total_available disk_usage['total_capacity'] = total_capacity disk_usage['total_free'] = total_free return disk_usage def test(self): disk_list = self._getDiskPartitions() return self._getDiskIO(disk_list) if __name__=='__main__': disk = DiskUsagePollster() util.print_list(disk.getSample())
class LoadStatPollster(Pollster): def __init__(self, name='load_stat'): super(LoadStatPollster, self).__init__(name=name) def getSample(self): load_stat = {} load_info = None f = None try: if util.is_exist('/proc/loadavg'): f = open('/proc/loadavg') load_info = f.read().split() if load_info and len(load_info) == 5: load_stat['load_1_min'] = load_info[0] load_stat['load_5_min'] = load_info[1] load_stat['load_15_min'] = load_info[2] load_stat['nr_thread'] = load_info[3] load_stat['last_pid'] = load_info[4] except: print "Unexpected error:", sys.exc_info()[1] finally: if f: f.close() return load_stat if __name__ == '__main__': load = LoadStatPollster(name='load') util.print_list(load.getSample())
def scrap_word(word, df, html, group_name, group_url): """ :param word: string :param df: pandas Dataframe :param html: str html :param group_url: str :return: df """ post_pattern = f'>[^>]*\s{word}\s[^<]*<' splits = re.compile(post_pattern).split(html)[:-1] # found nothing if len(splits) == 0: print(f'nothing found :( for word {word} on group {group_url}') return df posts = re.findall(post_pattern, html) for idx, split in enumerate(splits): profile = get_profile(split) if profile: post = posts[idx].replace('>', '').replace('<', '') post = post[:min(2000, len(post))] if profile in list(df.index.values): if post == df.loc[profile, 'post']: df.loc[profile, 'count'] += 1 else: df.loc[profile, 'post'] += post else: phones = util.get_patterns(util.PHONE_REGEX, post) emails = util.get_patterns(util.EMAIL_REGEX, post) if emails or phones: if len(emails) > 0: company_url = get_company_url_from_email(emails[0]) else: company_url = '' #name_text = scrape_name(browser, profile) name_text = '' # By default will assign It to all positions row = pd.Series( { 'name': name_text, 'post': post, 'phones': util.print_list(phones), 'emails': util.print_list(emails), COMPANY_URL: company_url, 'word': word, 'group_name': group_name, 'group_url': group_url, 'count': 1, WORK_AREA_CODE: 'IT' }, name=profile) df = df.append(row) return df
def eventlines_to_particlesdict(lines, \ particles_naming_function = particles_pdg_naming): """Takes event lines from LHE file and returns dictionary of particles {particle_name: particle}. """ particles_list = parse_event(lines) particles_dict = particles_list_to_dictionary(particles_list,\ particles_naming_function) return particles_dict if __name__=="__main__": try: inpath = sys.argv[1] except: print "Input file path expected!"; sys.exit(-1) lhe = LHELoader(open(inpath)) for eventlines in lhe.yield_events(): particles = eventlines_to_particlesdict(eventlines) util.print_list(eventlines, "LINES:") util.print_dict(particles, "PARTICLES:") print "--------------------------------------" print "header =", lhe.header print "footer =", lhe.footer print "events_counter =", lhe.events_counter print "len(events) =", len(events) print "events = ", events[:2], "...", events[-1]
import cpu import mem import net import load import util if __name__=='__main__': cpu_info = cpu.CPUInfo() cpu_usage = cpu.CPUUsage() mem_info = mem.MemInfo() net_info = net.NetStat() load_info = load.LoadStat() util.print_list(cpu_info) util.print_list(cpu_usage) util.print_list(mem_info) util.print_list(net_info) util.print_list(load_info)
''' class LoadStatPollster(Pollster): def __init__(self, name='load_stat'): super(LoadStatPollster, self).__init__(name=name) def getSample(self): load_stat = {} load_info = None f = None try: if util.is_exist('/proc/loadavg'): f = open('/proc/loadavg') load_info = f.read().split() if load_info and len(load_info) == 5: load_stat['load_1_min'] = {'volume':float(load_info[0]), 'unit':''} load_stat['load_5_min'] = {'volume':float(load_info[1]), 'unit':''} load_stat['load_15_min'] = {'volume':float(load_info[2]), 'unit':''} load_stat['nr_thread'] = load_info[3] load_stat['last_pid'] = load_info[4] except: print "Unexpected error:", sys.exc_info()[1] finally: if f: f.close() return load_stat if __name__=='__main__': load = LoadStatPollster(name='load') util.print_list(load.getSample())
title[tmp[i].strip()] = [] elif line.strip().startswith('face'): tmp = line.strip().split('|') for i in range(1, len(tmp)): title[title.items()[i-1][0]] = tmp[i].strip().split() total_item += len(title.items()[i-1][1]) else: tmp = line.strip().split(':') tmp_data = OrderedDict() value = tmp[1].strip().split() if len(value) == total_item: cnt = 0 for t_item in title.items(): tmp_data[t_item[0]] = {} for it in t_item[1]: tmp_data[t_item[0]][it] = value[cnt] cnt += 1 else: print 'number of items error' net_state[tmp[0]] = tmp_data except: print "Unexpected error:", sys.exc_info()[1] finally: return net_state if __name__=='__main__': net_stat = NetStatPollster() util.print_list(net_stat.getSample())