def __init__(self, imdb_id): self.api_key = API_KEY self.imdb_id = imdb_id info_url = 'http://thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s&language=en' % (self.imdb_id) self.info_result = get_url(info_url) if len(self.id()) > 0: complete_info_url = 'http://thetvdb.com/api/%s/series/%s/all/en.xml' % (API_KEY, self.id()) self.info_result = get_url(complete_info_url) else: self.info_result = ''
def __init__(self, movie_name=None, imdb_id=None): self.api_key = API_KEY if imdb_id: self.imdb_id = imdb_id else: search_url = 'http://api.themoviedb.org/2.1/Movie.search/en/xml/' + self.api_key + '/' + urllib2.quote(movie_name) search_result = get_url(search_url) self.imdb_id = regex_from_to(search_result, '<id>', '</id>') info_url = 'http://api.themoviedb.org/2.1/Movie.imdbLookup/en/xml/' + self.api_key + '/' + self.imdb_id self.info_result = get_url(info_url)
def _close_handwich_bridge(): c = HANDWICH_BRIDGE_CONFIG if is_open(c['ip'], c['port']): url = _make_handwich_base_url() + 'exit' if c['key'] != None: url += '?key=' + c['key'] # just send the exit command try: get_url(url, allow_cache=False, use_pool=False) except Exception as e: logging.error(e) # ignore error
def __init__(self, imdb_id): self.api_key = API_KEY self.imdb_id = imdb_id info_url = 'http://thetvdb.com/api/GetSeriesByRemoteID.php?imdbid=%s&language=en' % ( self.imdb_id) self.info_result = get_url(info_url) if len(self.id()) > 0: complete_info_url = 'http://thetvdb.com/api/%s/series/%s/all/en.xml' % ( API_KEY, self.id()) self.info_result = get_url(complete_info_url) else: self.info_result = ''
def run(log_path): test_project = "can" log_content = test_project + " test start" common.write_log(log_path, test_project, "info", log_content) url = common.get_url( "/gs-robot/real_time_data/ultrasonic_raw?frame_id=ultrasonic0") state = True count_num = 600 pass_num = 0 fail_num = 0 for i in range(count_num): data = common.getUrlData(url) if data == 1: state = False fail_num += 1 common.write_log(log_path, test_project, "error", str(data)) else: pass_num += 1 common.write_log(log_path, test_project, "info", str(data)) time.sleep(1) return { test_project: { "state": state, "count_num": count_num, "pass_num": pass_num, "fail_num": fail_num } }
def get_slow_pairs(threshold=1, vo=None): content = get_url('https://fts3-pilot.cern.ch:8449/fts3/ftsmon/overview', vo=vo, page='all') pairs = json.loads(content) slow = [] for pair in pairs['items']: if 'current' in pair and pair['current'] < threshold: slow.append(pair) return slow
def run(log_path): test_project = "input" log_content = test_project + " test start" common.write_log(log_path, test_project, "info", log_content) url = common.get_url("/gs-robot/data/device_status") state = True count_num = 10 pass_num = 0 fail_num = 0 do_value_dict = {} for i in range(8): do_value = pow(2, i) do_value_key = "do" + str(i + 1) do_value_dict[do_value_key] = do_value all_do_value_comb = combination_all(do_value_dict.keys()) all_do_value_comb_list = [] for i in all_do_value_comb: value = 0 for j in do_value_dict.keys(): if j in i: value += do_value_dict[j] all_do_value_comb_list.append(value) all_do_value_comb_list.append(0) all_do_value_comb_list = sorted(all_do_value_comb_list) count_num = len(all_do_value_comb_list) * 2 for j in range(2): for i in all_do_value_comb_list: do_commond = 'rosservice call /device/operate_device \"operation:\n- key: \'do\'\n value: \'%d\'\"' % i common.write_log(log_path, test_project, "info", do_commond) res = os.popen(do_commond).read() if "True" in res: common.write_log(log_path, test_project, "info", res) else: common.write_log(log_path, test_project, "error", res) time.sleep(1) data = common.getUrlData(url) di_value = int(data["data"]["detailedDi"]) if di_value != i: state = False common.write_log(log_path, test_project, "error", str(data)) fail_num += 1 else: common.write_log(log_path, test_project, "info", str(data)) pass_num += 1 return { test_project: { "state": state, "count_num": count_num, "pass_num": pass_num, "fail_num": fail_num } }
def title_search(params, start="1"): params["count"] = COUNT params["has"] = HAS params["view"] = VIEW params["num_votes"] = NUM_VOTES params["user_rating"] = USER_RATING params["start"] = start url = IMDB_TITLE_SEARCH for key in params: url += "%s=%s&" % (key, params[key]) body = get_url(url, cache=CACHE_PATH, cache_time=86400) #Need to clear cache to allow filter changes return body
def get_playlist_tracks(playlist_file, open_playlists=False): tracks = [] try: file_name = playlist_file.name if file_name.endswith('.avi') or file_name.endswith('.mkv'): tracks = [{'name': file_name, 'location': playlist_file.url_dl}] elif open_playlists: playlist_url = playlist_file.url_pls playlist = get_url(playlist_url) tracks = scrape_xspf(playlist) except: pass return tracks
def check_core_loaded(): core_about_url = _make_call_core_url(c_id, 'about') # info = json.loads(getUrl(core_about_url, allowCache=False)) # FIXME text = get_url(core_about_url, allow_cache=False, use_pool=False) logging.debug("core_about raw return:" + text) # print('DEBUG: core_about raw return') # print(text) info = json.loads(text) if info[0] != 'ret': logging.debug('core not loaded, ' + str(info)) return False logging.debug('core ' + str(c_id) + ', ' + str(info[1])) return True
def run(log_path): test_project = "motor" log_content = test_project + " test start" common.write_log(log_path, test_project, "info", log_content) state = True count_num = 600 pass_num = 0 fail_num = 0 url = common.get_url("/gs-robot/data/health_status") for i in range(count_num): common.write_log(log_path, test_project, "info", url) data = common.getUrlData(url) time.sleep(1) if data["rightMotor"] and data["leftMotor"]: common.write_log(log_path, test_project, "info", str(data)) pass_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) fail_num += 1 return {test_project:{"state":state, "count_num":count_num, "pass_num":pass_num, "fail_num":fail_num}}
#!/usr/bin/env python from common import create_client, link, wait_done, get_url client = create_client('http://localhost:8080') print 'Creating logstash' logstash = client.create_container(name='logstash', imageUuid='docker:cattle/logstash') print 'Creating Kibana' kibana = client.create_container(name='kibana', imageUuid='docker:cattle/kibana', environment=link(es=logstash)) print 'Creating MySQL' db = client.create_container(name='mysql', imageUuid='docker:cattle/mysql') print 'Creating Cattle' cattle = client.create_container(name='cattle', imageUuid='docker:cattle/cattle', environment=link(mysql=db, gelf=logstash)) cattle = wait_done(cattle) print 'phpMyAdmin running at {}/phpmyadmin'.format(get_url(db, '80/tcp')) print 'Kibana running at', get_url(kibana, '80/tcp') print 'Cattle running at', get_url(cattle, '8080/tcp')
import sys sys.path.append('..') from common import save_page, get_url, is_subtitle, has_pdf, namify, get_title, has_italic, unspace, is_centered, has_bold from common import remove_parenthesised import re import csv import spacy from urllib.parse import urljoin from bs4 import BeautifulSoup # save webpage FILE_NAME = sys.argv[0] FILE_NAME = FILE_NAME.split("_")[1].replace(".py", "") print(FILE_NAME) URL = get_url(FILE_NAME) save_page(URL, FILE_NAME) # Uncomment the first time you run this script with open(FILE_NAME, 'rb') as g: html = g.read() bs = BeautifulSoup(html, "html.parser") page_lines = bs.find_all(['p']) nlp = spacy.load("en_core_web_sm") track = None vol = None with open(FILE_NAME + ".tsv", 'w') as f: writer = csv.writer(f, delimiter='\t', quotechar='"', quoting=csv.QUOTE_MINIMAL)
def run(log_path): test_project = "emergency" log_content = test_project + " test start" common.write_log(log_path, test_project, "info", log_content) state = True count_num = 10 pass_num = 0 fail_num = 0 url = "http://" + host_ip + ":" + host_ip + "/gs-robot/cmd/operate_device" url_emerg = common.get_url("/gs-robot/data/device_status") open_value = str(int("01010101", 2)) close_value = str(int("01010110", 2)) for i in range(count_num): value = [{ "name": "relay", "type": "int", "value": close_value, "delayTime": 0 }] common.write_log(log_path, test_project, "info", url) common.write_log(log_path, test_project, "info", value) data = common.postUrlData(url, value) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", data) common.write_log(log_path, test_project, "info", url_emerg) data = common.get_url(url_emerg) time.sleep(1) if not data["data"]["emergency"]: common.write_log(log_path, test_project, "info", data) value = [{ "name": "relay", "type": "int", "value": open_value, "delayTime": 0 }] common.write_log(log_path, test_project, "info", url) common.write_log(log_path, test_project, "info", value) data = common.postUrlData(url, value) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", data) common.write_log(log_path, test_project, "info", url_emerg) data = common.get_url(url_emerg) time.sleep(1) if data["data"]["emergency"]: common.write_log(log_path, test_project, "info", data) value = [{ "name": "relay", "type": "int", "value": close_value, "delayTime": 0 }] common.write_log(log_path, test_project, "info", url) common.write_log(log_path, test_project, "info", value) data = common.postUrlData(url, value) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", data) common.write_log(log_path, test_project, "info", url_emerg) data = common.get_url(url_emerg) time.sleep(1) if not data["data"]["emergency"]: common.write_log(log_path, test_project, "info", data) pass_num += 1 else: common.write_log(log_path, test_project, "error", "No Data...") state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", "No Data...") state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", "No Data...") state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", "No Data...") state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", "No Data...") state = False fail_num += 1 return { test_project: { "state": state, "count_num": count_num, "pass_num": pass_num, "fail_num": fail_num } }
def extract2(html): soup = BeautifulSoup(html, 'html.parser') try: job_name = soup.find('h1').get('title') except: job_name = '' try: job_year = soup.find('em', {'class': 'i1'}).next_element except: job_year = 0 try: job_degree = soup.find('em', {'class': 'i2'}).next_element except: job_degree = '' try: job_date = soup.find('em', {'class': 'i4'}).next_element except: job_date = '' try: company_inurl = soup.find('p', {'class': 'cname'}).a.get('href') job_company = soup.find('p', {'class': 'cname'}).a.get('title') except: company_inurl = '' job_company = '' p = re.compile('co\d+') aa = re.search(p, company_inurl) if aa: company_id = aa.group() else: company_id = '' print job_year, job_name, job_degree try: job_salary = soup.find('div', {'class': 'cn'}).strong.get_text() job_l = re.findall(r'\d+', job_salary) job_low = job_l[0] job_high = job_l[1] except: job_low = '' job_high = '' print job_low, job_high try: job_addr = soup.find('div', {'class': 'bmsg inbox'}).p.contents[2].strip() except: job_addr = '' print job_addr job_str = soup.find('div', {'class': 'bmsg job_msg inbox'}).contents[2] job_describe = job_str.get_text().split(u'职能类别:')[0] job_input = soup.find_all('div', {'class': 'tBorderTop_box'}) phone = common.get_phone(job_describe) email = common.get_email(job_describe) company_info = soup.find('div', {'class': 'tmsg inbox'}).get_text() company_url = common.get_url(job_describe) if len(phone) == 0: phone = common.get_phone(company_info) if len(email) == 0: email = common.get_email(company_info) if len(company_url) == 0: company_url = common.get_url(company_info) try: job_type = job_str.find('p', {'class': 'fp f2'}).find_all('span', {'class': 'el'}) job_type = ','.join([i.get_text() for i in job_type]) except: job_type = '' job_area = soup.find('span', {'class': 'lname'}).get_text() print job_area job_dict = {'job_name':job_name, 'job_addr': job_addr, 'job_low': job_low, 'job_high': job_high, 'job_des': job_describe, 'job_type': job_type, 'job_area': job_area, 'company_id': company_id, 'job_year': job_year, 'job_degree': job_degree, 'email': email, 'phone': phone, 'job_date': job_date, 'company_inurl': company_inurl, 'company_url': company_url, 'job_company': job_company} return job_dict
def run(log_path): test_project = "relay" log_content = test_project + " test start" common.write_log(log_path, test_project, "info", log_content) state = True count_num = 75 pass_num = 0 fail_num = 0 url_relay = common.get_url("/gs-robot/cmd/operate_device") url_di = "http://" + host_ip + ":" + host_ip + "/gs-robot/data/device_status" for i in range(count_num): value = [{"name" : "do","type" : "int","value" : "0","delayTime" : 0}] common.write_log(log_path, test_project, "info", url_relay) common.write_log(log_path, test_project, "info", str(value)) data = common.postUrlData(url_relay, value) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_di) data = common.getUrlData(url_di) time.sleep(1) di_value = int(data["data"]["detailedDi"]) one_list = [] common.check_value(di_value, one_list) if 4 not in one_list and 5 not in one_list: common.write_log(log_path, test_project, "info", str(data)) value = [{"name" : "do","type" : "int","value" : "1","delayTime" : 0}] common.write_log(log_path, test_project, "info", url_relay) data = common.postUrlData(url_relay, value) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_di) data = common.getUrlData(url_di) time.sleep(1) di_value = int(data["data"]["detailedDi"]) one_list = [] common.check_value(di_value, one_list) if 4 in one_list and 5 not in one_list: common.write_log(log_path, test_project, "info", str(data)) value = [{"name" : "do","type" : "int","value" : "2","delayTime" : 0}] common.write_log(log_path, test_project, "info", url_relay) data = common.postUrlData(url_relay, value) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_di) data = common.getUrlData(url_di) time.sleep(1) di_value = int(data["data"]["detailedDi"]) one_list = [] common.check_value(di_value, one_list) if 4 not in one_list and 5 in one_list: common.write_log(log_path, test_project, "info", str(data)) value = [{"name" : "do","type" : "int","value" : "3","delayTime" : 0}] common.write_log(log_path, test_project, "info", url_relay) data = common.postUrlData(url_relay, value) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_di) data = common.getUrlData(url_di) time.sleep(1) di_value = int(data["data"]["detailedDi"]) one_list = [] common.check_value(di_value, one_list) if 4 in one_list and 5 in one_list: common.write_log(log_path, test_project, "info", str(data)) pass_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 time.sleep(1) return {test_project:{"state":state, "count_num":count_num, "pass_num":pass_num, "fail_num":fail_num}}
from pprint import pprint import pandas as pd from common import (get_url, fetch_data) url = 'http://www.kobis.or.kr/kobisopenapi/webservice/rest/movie/searchMovieI\ nfo.json' # 명량: 20129370 params = dict(movieCd=20129370) url0 = get_url(url, params) data = fetch_data(url0) pprint(data) ''' movieListResult/movieList '''
import sys sys.path.append('..') from common import save_page, get_url, is_subtitle, has_pdf, namify, get_title, has_italic, unspace, is_centered, has_bold from common import remove_parenthesised import re import csv import spacy from urllib.parse import urljoin from bs4 import BeautifulSoup # save webpage FILE_NAME = sys.argv[0] FILE_NAME = FILE_NAME.split("_")[1].replace(".py", "") print(FILE_NAME) URL = get_url(FILE_NAME, conference_page=2) save_page(URL, FILE_NAME) # Uncomment the first time you run this script with open(FILE_NAME, 'rb') as g: html = g.read() bs = BeautifulSoup(html, "html.parser") page_lines = bs.find_all(['p']) nlp = spacy.load("en_core_web_sm") track = None vol = None with open(FILE_NAME + ".tsv", 'w') as f: writer = csv.writer(f, delimiter='\t', quotechar='"', quoting=csv.QUOTE_MINIMAL)
def _init_handwich_bridge(): c = HANDWICH_BRIDGE_CONFIG ip = c['ip'] port = c['port'] key = c['key'] # TODO start handwich_bridge if not is_open(ip, port): argv = [ _get_rel_path(BIN_ADL), _get_rel_path(HANDWICH_BRIDGE_BIN), '--' ] argv += ['--ip', str(ip), '--port', str(port)] if key != None: argv += ['--key', str(key)] logging.debug(' start handwich_bridge --> ' + str(argv)) subprocess.Popen(argv, shell=False, close_fds=True) # wait and check bridge started successfully init_ok = False for i in range(3): if not is_open(ip, port): time.sleep(i + 1) continue url = _make_handwich_base_url() + 'version' if key != None: url += '?key=' + str(key) try: info = get_url(url, allow_cache=False, use_pool=False) logging.debug('handwich_bridge version: ' + info) init_ok = True break except Exception as e: logging.warning(e) time.sleep(i + 1) if not init_ok: raise Exception('start handwich_bridge failed') # check core loaded and load core l = LOAD_CORE c_id = l['id'] c_path = os.path.abspath(_get_rel_path(l['path'])) def check_core_loaded(): core_about_url = _make_call_core_url(c_id, 'about') # info = json.loads(getUrl(core_about_url, allowCache=False)) # FIXME text = get_url(core_about_url, allow_cache=False, use_pool=False) logging.debug("core_about raw return:" + text) # print('DEBUG: core_about raw return') # print(text) info = json.loads(text) if info[0] != 'ret': logging.debug('core not loaded, ' + str(info)) return False logging.debug('core ' + str(c_id) + ', ' + str(info[1])) return True if not check_core_loaded(): load_core_url = _make_handwich_base_url() + 'load_core?id=' + str(c_id) if c['key'] != None: load_core_url += '&key=' + str(c['key']) load_core_url += '&path=' + urllib.parse.quote(c_path) info = json.loads( get_url(load_core_url, allow_cache=False, use_pool=False)) if info[0] == 'done': logging.debug('core loaded, ' + str(info)) else: raise Exception('can not load core', info) if not check_core_loaded(): raise Exception('core not loaded')
def get_servers(): servers = get_url( 'https://fts3-pilot.cern.ch:8449/fts3/ftsmon/stats/servers') return json.loads(servers)
#!/usr/bin/env python from common import create_client, link, wait_done, get_url client = create_client('http://localhost:8080') print 'Creating logstash' logstash = client.create_container(name='logstash', imageUuid='docker:ibuildthecloud/logstash') print 'Creating Kibana' kibana = client.create_container(name='kibana', imageUuid='docker:ibuildthecloud/kibana', environment=link(es=logstash)) print 'Creating MySQL' db = client.create_container(name='mysql', imageUuid='docker:ibuildthecloud/mysql') print 'Creating dStack' dstack = client.create_container(name='dstack', imageUuid='docker:ibuildthecloud/dstack', environment=link(mysql=db, gelf=logstash)) dstack = wait_done(dstack) print 'phpMyAdmin running at {}/phpmyadmin'.format(get_url(db, '80/tcp')) print 'Kibana running at', get_url(kibana, '80/tcp') print 'dStack running at', get_url(dstack, '8080/tcp')
def run(log_path): test_project = "charge" log_content = test_project + " test start" common.write_log(log_path, test_project, "info", log_content) state = True count_num = 100 pass_num = 0 fail_num = 0 url_di = "http://" + host_ip + ":" + host_ip + "/gs-robot/data/device_status" url_auto = common.get_url("/gs-robot/cmd/start_charge") url_hand = common.get_url("/gs-robot/cmd/stop_charge") for i in range(count_num): common.write_log(log_path, test_project, "info", url_hand) data = common.getUrlData(url_hand) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", url_di) data = common.getUrlData(url_di) time.sleep(1) di_value = int(data["data"]["detailedDi"]) one_list = [] common.check_value(di_value, one_list) if 1 in one_list and 2 not in one_list: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_auto) data = common.getUrlData(url_auto) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_di) data = common.getUrlData(url_di) time.sleep(1) di_value = int(data["data"]["detailedDi"]) one_list = [] common.check_value(di_value, one_list) if 2 in one_list and 1 not in one_list: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_hand) data = common.getUrlData(url_hand) time.sleep(1) if data["successed"]: common.write_log(log_path, test_project, "info", str(data)) common.write_log(log_path, test_project, "info", url_di) data = common.getUrlData(url_di) time.sleep(1) di_value = int(data["data"]["detailedDi"]) if di_value == 1: common.write_log(log_path, test_project, "info", str(data)) pass_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 else: common.write_log(log_path, test_project, "error", str(data)) state = False fail_num += 1 return { test_project: { "state": state, "count_num": count_num, "pass_num": pass_num, "fail_num": fail_num } }