def test_emcc_multiprocess_cache_access(self): restore_and_set_up() create_file('test.c', r''' #include <stdio.h> int main() { printf("hello, world!\n"); return 0; } ''') cache_dir_name = self.in_dir('test_cache') libname = Cache.get_lib_name('libc.a') with env_modify({'EM_CACHE': cache_dir_name}): tasks = [] num_times_libc_was_built = 0 for i in range(3): p = self.run_process([EMCC, 'test.c', '-o', '%d.js' % i], stderr=STDOUT, stdout=PIPE) tasks += [p] for p in tasks: print('stdout:\n', p.stdout) if 'generating system library: ' + libname in p.stdout: num_times_libc_was_built += 1 # The cache directory must exist after the build self.assertExists(cache_dir_name) # The cache directory must contain a built libc self.assertExists(os.path.join(cache_dir_name, libname)) # Exactly one child process should have triggered libc build! self.assertEqual(num_times_libc_was_built, 1)
def test_emcc_cache_flag(self, use_response_files, relative): restore_and_set_up() if relative: cache_dir_name = 'emscripten_cache' else: cache_dir_name = self.in_dir('emscripten_cache') self.assertFalse(os.path.exists(cache_dir_name)) create_file('test.c', r''' #include <stdio.h> int main() { printf("hello, world!\n"); return 0; } ''') args = ['--cache', cache_dir_name] if use_response_files: rsp = response_file.create_response_file(args, shared.TEMP_DIR) args = ['@' + rsp] self.run_process([EMCC, 'test.c'] + args, stderr=PIPE) if use_response_files: os.remove(rsp) # The cache directory must exist after the build self.assertTrue(os.path.exists(cache_dir_name)) # The cache directory must contain a sysroot self.assertTrue(os.path.exists(os.path.join(cache_dir_name, 'sysroot')))
def configure(): if False == common.is_cmd_installed("docker"): common.msg("System", "docker is not installed", "warn") return False common.msg("Perform ", "docker config") proxy_dir = '/etc/systemd/system/docker.service.d/' http_proxy_file = 'http-proxy.conf' https_proxy_file = 'https-proxy.conf' proxy = apps['docker'] if True == proxy['use_proxy']: comment = "" else: comment = "#" # Data http_proxy_content = """[Service] {1}Environment="HTTP_PROXY={0}" """.format(proxy['http_proxy_target'], comment) https_proxy_content = """[Service] {1}Environment="HTTPS_PROXY={0}" """.format(proxy['https_proxy_target'], comment) # action common.create_dir(proxy_dir) common.create_file(proxy_dir, http_proxy_file, http_proxy_content) common.create_file(proxy_dir, https_proxy_file, https_proxy_content) call(["service", "docker", "restart"]) call(["systemctl", "daemon-reload"])
def test_sockets_echo_bigdata(self): sockets_include = '-I' + test_file('sockets') # generate a large string literal to use as our message message = '' for i in range(256 * 256 * 2): message += str(chr(ord('a') + (i % 26))) # re-write the client test with this literal (it's too big to pass via command line) src = read_file(test_file('sockets/test_sockets_echo_client.c')) create_file( 'test_sockets_echo_bigdata.c', src.replace('#define MESSAGE "pingtothepong"', '#define MESSAGE "%s"' % message)) harnesses = [(CompiledServerHarness( test_file('sockets/test_sockets_echo_server.c'), ['-DTEST_DGRAM=0'], 49172), 0), (CompiledServerHarness( test_file('sockets/test_sockets_echo_server.c'), ['-DTEST_DGRAM=1'], 49173), 1)] if not common.EMTEST_LACKS_NATIVE_CLANG: harnesses += [(WebsockifyServerHarness( test_file('sockets/test_sockets_echo_server.c'), [], 49171), 0)] for harness, datagram in harnesses: with harness: self.btest_exit('test_sockets_echo_bigdata.c', args=[ sockets_include, '-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram ])
def down_pic_include_child(down_path): file_name_list = FileTool.get_file_list('txt') # down_path = down_param['down_file_path'] for index, file_name in enumerate(file_name_list, 1): # print('down the %i file : %s' % (index, file_name)) print('读取第 %i 个文件 : %s' % (index, file_name)) # 打开文件 with open(file_name) as file_obj: for num, value in enumerate(file_obj, 1): line = value.strip('\n') if line == '': print('当前行为空:%i line' % num) continue # print('the %i line: -%s- ' % (num, line), end=';') print('第 %i 行: -%s- ' % (num, line), end=';') # 获取子页面连接 # child_img_url = get_img_child_url(line, pre_url) url_list = get_img_url_list(line) img_urls = url_list[0] # img_urls.extend(child_img_url) total = len(img_urls) # print('duplicate removal image num: %i ' % total) print('去重后图片数量: %i ' % total) new_title = url_list[1] # 保存所有的下载记录 os.chdir(cur_dir) write_to_done_log(line, new_title) if len(img_urls) < 2: os.chdir(cur_dir) save_not_down_url(line, new_title, num) else: path = down_path + cur_month + str(new_title.strip()) + '/' common.create_file(path) os.chdir(path) for i in range(0, len(img_urls)): file_url = img_urls[i].get('file') # print(file_url) # fileUrl = file_url.replace('http://pic.w26.rocks/', pre_url) if not file_url.startswith('http'): # if not ('http://' in file_url or 'https://' in file_url): print('in:' + file_url) file_url = pre_url + file_url image_name = file_url.split("/")[-1] # print(file_url) if not os.path.exists(image_name): # print('the %i line:the %i / %i ge: %s' % (num, i + 1, total, file_url), end=';') print('第 %i 行:第 %i / %i 个: %s' % (num, i + 1, total, file_url), end=';') common.down_img(file_url) # else: # print('the %i line:the %i / %i is exist: %s' % (num, i + 1, total, file_url)) # print('第 %i 行:第 %i / % i个已存在: %s' % (num, i + 1, total, file_url)) # print("-----down over----------------") print('第 %i 行: %s 下载完毕,开始下载下一行文件 ' % (num, line)) print('第 %i 个文件: %s 下载完毕,开始删除...' % (index, file_name)) os.remove(file_name) print('第 %i 个文件: %s 删除成功,开始读取下一个文件' % (index, file_name)) print("----------------所有文件下载完毕-------------------")
def work4(self,UURL): # 2222222本日の先物取引情報 browser = webdriver.PhantomJS() browser.get(UURL) res = browser.page_source common.create_file("margin.txt", res) res.to_csv("TEST.CSV") dfs = pd.read_html(res, 0) # header=0,skiprows=0(省略可能) for ii in range(len(dfs)): print("××××", ii, dfs[ii].columns[0])
def down_all_pic(down_param): path_ = down_param + cur_month if not (os.path.exists(path_)): os.makedirs(path_) file_name_list = FileTool.get_file_list('txt') for index, file_name in enumerate(file_name_list, 1): # print('down the %i file: %s' % (index, file_name)) print('读取第 %i 个文件: %s' % (index, file_name)) # 打开文件 with open(file_name) as file_obj: for num, value in enumerate(file_obj, 1): line = value.strip('\n') if line == '': print('当前行为空:%i line' % num) continue # print('the %i line: -%s- ' % (num, line), end=' ;') print('第 %i 行: -%s- ' % (num, line), end=' ;') # 获取所有图片连接 url_list = get_img_url_list(line) img_urls = url_list[0] # print(' image num: %i ' % l) print(' 图片数量: %i ' % len(img_urls)) new_title = url_list[1] if len(img_urls) < 2: os.chdir(cur_dir) save_not_down_url(line, new_title, num) else: path = path_ + str(new_title.strip()) + os.sep common.create_file(path) os.chdir(path) for i in range(0, len(img_urls)): file_url = img_urls[i].get('file') # if not ('http://' in file_url or 'https://' in file_url): if not file_url.startswith('http'): print('in:' + file_url) file_url = pre_url + file_url # fileUrl = file_url.replace('http://pic.w26.rocks/', pre_url) image_name = file_url.split("/")[-1] if not os.path.exists(image_name): # print('the %i line:the %i / %i ge : %s' % (num, i + 1, l, file_url), end=' ;') print('第 %i 行:第 %i / %i 个 : %s' % (num, i + 1, len(img_urls), file_url), end=' ;') common.down_img(file_url) # print("-----down over----------------") print('第 %i 行: %s 下载完毕 ' % (num, line)) # 保存所有的下载链接 os.chdir(cur_dir) write_to_done_log(line, new_title) print('第 %i 个文件: %s 下载完毕,开始删除...' % (index, file_name)) os.remove(file_name) print('第 %i 个文件: %s 删除成功,开始读取下一个文件' % (index, file_name), end=";") # print("down all over----------------start delete old undown text-------------------") print("---------------- 所有文件下载完毕 -------------------")
def test_nodejs_sockets_echo(self): # This test checks that sockets work when the client code is run in Node.js if config.NODE_JS not in config.JS_ENGINES: self.skipTest('node is not present') harnesses = [ (CompiledServerHarness(test_file('sockets/test_sockets_echo_server.c'), ['-DTEST_DGRAM=0'], 59162), 0), (CompiledServerHarness(test_file('sockets/test_sockets_echo_server.c'), ['-DTEST_DGRAM=1'], 59164), 1) ] if not common.EMTEST_LACKS_NATIVE_CLANG: harnesses += [(WebsockifyServerHarness(test_file('sockets/test_sockets_echo_server.c'), [], 59160), 0)] # Basic test of node client against both a Websockified and compiled echo server. for harness, datagram in harnesses: with harness: expected = 'do_msg_read: read 14 bytes' self.do_runf(test_file('sockets/test_sockets_echo_client.c'), expected, emcc_args=['-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram]) if not common.EMTEST_LACKS_NATIVE_CLANG: # Test against a Websockified server with compile time configured WebSocket subprotocol. We use a Websockified # server because as long as the subprotocol list contains binary it will configure itself to accept binary # This test also checks that the connect url contains the correct subprotocols. print("\nTesting compile time WebSocket configuration.\n") with WebsockifyServerHarness(test_file('sockets/test_sockets_echo_server.c'), [], 59166): self.run_process([EMCC, '-Werror', test_file('sockets/test_sockets_echo_client.c'), '-o', 'client.js', '-sSOCKET_DEBUG', '-sWEBSOCKET_SUBPROTOCOL="base64, binary"', '-DSOCKK=59166']) out = self.run_js('client.js') self.assertContained('do_msg_read: read 14 bytes', out) self.assertContained(['connect: ws://127.0.0.1:59166, base64,binary', 'connect: ws://127.0.0.1:59166/, base64,binary'], out) # Test against a Websockified server with runtime WebSocket configuration. We specify both url and subprotocol. # In this test we have *deliberately* used the wrong port '-DSOCKK=12345' to configure the echo_client.c, so # the connection would fail without us specifying a valid WebSocket URL in the configuration. print("\nTesting runtime WebSocket configuration.\n") create_file('websocket_pre.js', ''' var Module = { websocket: { url: 'ws://localhost:59168/testA/testB', subprotocol: 'text, base64, binary', } }; ''') with WebsockifyServerHarness(test_file('sockets/test_sockets_echo_server.c'), [], 59168) as harness: self.run_process([EMCC, '-Werror', test_file('sockets/test_sockets_echo_client.c'), '-o', 'client.js', '--pre-js=websocket_pre.js', '-sSOCKET_DEBUG', '-DSOCKK=12345']) out = self.run_js('client.js') self.assertContained('do_msg_read: read 14 bytes', out) self.assertContained('connect: ws://localhost:59168/testA/testB, text,base64,binary', out)
def test_em_config_env_var(self): # emcc should be configurable directly from EM_CONFIG without any config file restore_and_set_up() create_file('main.cpp', ''' #include <stdio.h> int main() { printf("hello from emcc with no config file\\n"); return 0; } ''') wipe() with env_modify({'EM_CONFIG': get_basic_config()}): out = self.expect_fail([EMCC, 'main.cpp', '-Wno-deprecated', '-o', 'a.out.js']) self.assertContained('error: Inline EM_CONFIG data no longer supported. Please use a config file.', out)
def test_nodejs_sockets_echo_subprotocol(self): # Test against a Websockified server with compile time configured WebSocket subprotocol. We use a Websockified # server because as long as the subprotocol list contains binary it will configure itself to accept binary # This test also checks that the connect url contains the correct subprotocols. with WebsockifyServerHarness( test_file('sockets/test_sockets_echo_server.c'), [], 59166): self.run_process([ EMCC, '-Werror', test_file('sockets/test_sockets_echo_client.c'), '-o', 'client.js', '-sSOCKET_DEBUG', '-sWEBSOCKET_SUBPROTOCOL="base64, binary"', '-DSOCKK=59166' ]) out = self.run_js('client.js') self.assertContained('do_msg_read: read 14 bytes', out) self.assertContained([ 'connect: ws://127.0.0.1:59166, base64,binary', 'connect: ws://127.0.0.1:59166/, base64,binary' ], out) # Test against a Websockified server with runtime WebSocket configuration. We specify both url and subprotocol. # In this test we have *deliberately* used the wrong port '-DSOCKK=12345' to configure the echo_client.c, so # the connection would fail without us specifying a valid WebSocket URL in the configuration. print("\nTesting runtime WebSocket configuration.\n") create_file( 'websocket_pre.js', ''' var Module = { websocket: { url: 'ws://localhost:59168/testA/testB', subprotocol: 'text, base64, binary', } }; ''') with WebsockifyServerHarness( test_file('sockets/test_sockets_echo_server.c'), [], 59168): self.run_process([ EMCC, '-Werror', test_file('sockets/test_sockets_echo_client.c'), '-o', 'client.js', '--pre-js=websocket_pre.js', '-sSOCKET_DEBUG', '-DSOCKK=12345' ]) out = self.run_js('client.js') self.assertContained('do_msg_read: read 14 bytes', out) self.assertContained( 'connect: ws://localhost:59168/testA/testB, text,base64,binary', out)
def work3(self): allLines = open("test.txt",encoding="utf-8").read() print(allLines) exit() # f = open("TEST.CSV", 'r') # dataReader = csv.reader(f) # for row in dataReader: # print(row) # exit() # exit() # 新規追加確認 # for row in dataReader: # 2222222本日の先物取引情報 table_name = "futures" UURL = "https://www.traders.co.jp/domestic_stocks/invest_tool/futures/futures_top.asp" # res = common.Chorme_get(UURL) browser = webdriver.PhantomJS() # ライブスターログイン画面にアクセス browser.get(UURL) common.create_file("test.txt", browser.page_source) exit(9) dfs = pd.read_html(res, header=0) for ii in range(len(dfs)): print("XXX",ii,dfs[ii].columns[0]) if dfs[ii].columns[0] == "先物・手口情報": num = ii +1 break dfs[num].to_csv("TEST.CSV") common.create_file("test.txt",dfs[num]) # カラムの入れ替え CC = ['証券会社名', 'SELL_225', 'BUY_225', 'NET_225', '日付', 'SELL_TOPIX','BUY_TOPIX', 'NET_TOPIX', '更新日', 'SELL_225M', 'BUY_225M', 'NET_225M'] col_name = {} col_name = {dfs[num].columns[c]: CC[c] for c in range(len(dfs[num].columns))} dfs[num] = dfs[num].rename(columns=col_name) # DBへのインポート for idx, row in dfs[num].iterrows(): dict_w = {} for ii in range(len(row)): dict_w[dfs[num].columns[ii]] = row[ii] dict_w['更新日'] = common.env_time()[1] dict_w['日付'] = common.env_time()[0][:8] common.insertDB3(DB_INFO, table_name, dict_w)
def test_sockets_echo_bigdata(self, harness_class, port, args): if harness_class == WebsockifyServerHarness and common.EMTEST_LACKS_NATIVE_CLANG: self.skipTest('requires native clang') sockets_include = '-I' + test_file('sockets') # generate a large string literal to use as our message message = '' for i in range(256 * 256 * 2): message += str(chr(ord('a') + (i % 26))) # re-write the client test with this literal (it's too big to pass via command line) src = read_file(test_file('sockets/test_sockets_echo_client.c')) create_file( 'test_sockets_echo_bigdata.c', src.replace('#define MESSAGE "pingtothepong"', '#define MESSAGE "%s"' % message)) with harness_class(test_file('sockets/test_sockets_echo_server.c'), args, port) as harness: self.btest_exit( 'test_sockets_echo_bigdata.c', args=[sockets_include, '-DSOCKK=%d' % harness.listen_port] + args)
def test_sockets_echo_bigdata(self): sockets_include = '-I' + test_file('sockets') # generate a large string literal to use as our message message = '' for i in range(256 * 256 * 2): message += str(chr(ord('a') + (i % 26))) # re-write the client test with this literal (it's too big to pass via command line) input_filename = test_file('sockets', 'test_sockets_echo_client.c') input = read_file(input_filename) create_file( 'test_sockets_echo_bigdata.c', input.replace('#define MESSAGE "pingtothepong"', '#define MESSAGE "%s"' % message)) harnesses = [(CompiledServerHarness( os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=0'], 49172), 0), (CompiledServerHarness( os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include, '-DTEST_DGRAM=1'], 49173), 1)] if not WINDOWS: # TODO: Python pickling bug causes WebsockifyServerHarness to not work on Windows. harnesses += [(WebsockifyServerHarness( os.path.join('sockets', 'test_sockets_echo_server.c'), [sockets_include], 49171), 0)] for harness, datagram in harnesses: with harness: self.btest('test_sockets_echo_bigdata.c', expected='0', args=[ sockets_include, '-DSOCKK=%d' % harness.listen_port, '-DTEST_DGRAM=%d' % datagram ])
def wishlist(): return create_file(DATA_PATH, "wishlist.list")
def subs_imdb_file(): return create_file(DATA_PATH, "subs_imdb.list")
def plugin_list(): return create_file(DATA_PATH, "plugins.list")
def authenticate_user(self, username, password, cookiedir, cookiefile): ''' Makes REST API call to generate the cookiefile for the specified user after validation. Returns: SUCCESS OR FAILURE ''' SEC_REDIRECT = 302 SEC_AUTHTOKEN_HEADER = 'X-SDS-AUTH-TOKEN' LB_API_PORT = 4443 # Port on which load-balancer/reverse-proxy listens to all incoming requests for ViPR REST APIs APISVC_PORT = 8443 # Port on which apisvc listens to incoming requests cookiejar=cookielib.LWPCookieJar() url = 'https://'+str(self.__ipAddr)+':'+str(self.__port)+self.URI_AUTHENTICATION try: if(self.__port == APISVC_PORT): login_response = requests.get(url, headers=self.HEADERS, verify=False, auth=(username,password), cookies=cookiejar, allow_redirects=False, timeout=common.TIMEOUT_SEC) if(login_response.status_code == SEC_REDIRECT): location = login_response.headers['Location'] if(not location): raise SOSError(SOSError.HTTP_ERR, "The redirect location of the authentication service is not provided") # Make the second request login_response = requests.get(location, headers=self.HEADERS, verify=False, cookies=cookiejar, allow_redirects=False, timeout=common.TIMEOUT_SEC) if(not login_response.status_code == requests.codes['unauthorized']): raise SOSError(SOSError.HTTP_ERR, "The authentication service failed to reply with 401") # Now provide the credentials login_response = requests.get(location, headers=self.HEADERS, auth=(username,password), verify=False, cookies=cookiejar, allow_redirects=False, timeout=common.TIMEOUT_SEC) if(not login_response.status_code == SEC_REDIRECT): raise SOSError(SOSError.HTTP_ERR, "Access forbidden: Authentication required") location = login_response.headers['Location'] if(not location): raise SOSError(SOSError.HTTP_ERR, "The authentication service failed to provide the location of the service URI when redirecting back") authToken = login_response.headers[SEC_AUTHTOKEN_HEADER] if (not authToken): raise SOSError(SOSError.HTTP_ERR, "The token is not generated by authentication service") # Make the final call to get the page with the token newHeaders = self.HEADERS newHeaders[SEC_AUTHTOKEN_HEADER] = authToken login_response = requests.get(location, headers=newHeaders, verify=False, cookies=cookiejar, allow_redirects=False, timeout=common.TIMEOUT_SEC) if(login_response.status_code != requests.codes['ok']): raise SOSError(SOSError.HTTP_ERR, "Login failure code: " + str(login_response.status_code) + " Error: " + login_response.text) elif(self.__port == LB_API_PORT): login_response = requests.get(url, headers=self.HEADERS, verify=False, cookies=cookiejar, allow_redirects=False) if(login_response.status_code == requests.codes['unauthorized']): # Now provide the credentials login_response = requests.get(url, headers=self.HEADERS, auth=(username,password), verify=False, cookies=cookiejar, allow_redirects=False) authToken = login_response.headers[SEC_AUTHTOKEN_HEADER] else: raise SOSError(SOSError.HTTP_ERR, "Incorrect port number. Load balanced port is: " + str(LB_API_PORT) + ", api service port is: " + str(APISVC_PORT) + ".") if (not authToken): raise SOSError(SOSError.HTTP_ERR, "The token is not generated by authentication service") if (login_response.status_code != requests.codes['ok']): error_msg=None if(login_response.status_code == 401): error_msg = "Access forbidden: Authentication required" elif(login_response.status_code == 403): error_msg = "Access forbidden: You don't have sufficient privileges to perform this operation" elif(login_response.status_code == 500): error_msg="Bourne internal server error" elif(login_response.status_code == 404): error_msg="Requested resource is currently unavailable" elif(login_response.status_code == 405): error_msg = "GET method is not supported by resource: " + url elif(login_response.status_code == 503): error_msg = "Service temporarily unavailable: The server is temporarily unable to service your request" else: error_msg=login_response.text if isinstance(error_msg, unicode): error_msg = error_msg.encode('utf-8') raise SOSError(SOSError.HTTP_ERR, "HTTP code: " + str(login_response.status_code) + ", response: " + str(login_response.reason) + " [" + str(error_msg) + "]") except (SSLError, socket.error, ConnectionError, Timeout) as e: raise SOSError(SOSError.HTTP_ERR, str(e)) form_cookiefile= None parentshellpid = None installdir_cookie = None if sys.platform.startswith('linux'): parentshellpid = os.getpid() if(cookiefile is None): if (parentshellpid is not None): cookiefile=str(username)+'cookie'+str(parentshellpid) else: cookiefile=str(username)+'cookie' form_cookiefile = cookiedir+'/'+cookiefile if (parentshellpid is not None): installdir_cookie = '/cookie/'+str(parentshellpid) else: installdir_cookie = '/cookie/cookiefile' elif sys.platform.startswith('win'): if (cookiefile is None): cookiefile=str(username)+'cookie' form_cookiefile = cookiedir+'\\'+cookiefile installdir_cookie = '\\cookie\\cookiefile' else: if (cookiefile is None): cookiefile=str(username)+'cookie' form_cookiefile = cookiedir+'/'+cookiefile installdir_cookie = '/cookie/cookiefile' try: if(common.create_file(form_cookiefile)): tokenFile = open(form_cookiefile , "w") if(tokenFile): tokenFile.write(authToken) tokenFile.close() else: raise SOSError(SOSError.NOT_FOUND_ERR, " Failed to save the cookie file path " + form_cookiefile) except (OSError) as e: raise SOSError(e.errno, cookiedir +" "+e.strerror) except IOError as e: raise SOSError(e.errno , e.strerror) if (common.create_file(form_cookiefile)): #cookiejar.save(form_cookiefile, ignore_discard=True, ignore_expires=True); sos_cli_install_dir= common.getenv('VIPR_CLI_INSTALL_DIR') if (sos_cli_install_dir): if (not os.path.isdir(sos_cli_install_dir)): raise SOSError(SOSError.NOT_FOUND_ERR, sos_cli_install_dir+" : Not a directory") config_file = sos_cli_install_dir+installdir_cookie if (common.create_file(config_file)): fd = open(config_file,'w+') if (fd): fd_content=os.path.abspath(form_cookiefile)+'\n' fd.write(fd_content) fd.close() ret_val=username+' : Authenticated Successfully\n'+ \ form_cookiefile+' : Cookie saved successfully' else: raise SOSError(SOSError.NOT_FOUND_ERR, config_file+" : Failed to save the cookie file path " + form_cookiefile) else: raise SOSError(SOSError.NOT_FOUND_ERR, config_file+" : Failed to create file") else: raise SOSError(SOSError.NOT_FOUND_ERR, "VIPR_CLI_INSTALL_DIR is not set. Please check viprcli.profile") return ret_val
def test_webrtc( self ): # XXX see src/settings.js, this is disabled pending investigation self.skipTest('WebRTC support is not up to date.') host_src = 'webrtc_host.c' peer_src = 'webrtc_peer.c' host_outfile = 'host.html' peer_outfile = 'peer.html' host_filepath = test_file('sockets', host_src) temp_host_filepath = os.path.join(self.get_dir(), os.path.basename(host_src)) host_src = read_file(host_filepath) with open(temp_host_filepath, 'w') as f: f.write(host_src) peer_filepath = test_file('sockets', peer_src) temp_peer_filepath = os.path.join(self.get_dir(), os.path.basename(peer_src)) peer_src = read_file(peer_filepath) with open(temp_peer_filepath, 'w') as f: f.write(peer_src) create_file( 'host_pre.js', ''' var Module = { webrtc: { broker: 'http://localhost:8182', session: undefined, onpeer: function(peer, route) { window.open('http://localhost:8888/peer.html?' + route); // iframe = document.createElement("IFRAME"); // iframe.setAttribute("src", "http://localhost:8888/peer.html?" + route); // iframe.style.display = "none"; // document.body.appendChild(iframe); peer.listen(); }, onconnect: function(peer) { }, ondisconnect: function(peer) { }, onerror: function(error) { console.error(error); } }, setStatus: function(text) { console.log('status: ' + text); } }; ''') create_file( 'peer_pre.js', ''' var Module = { webrtc: { broker: 'http://localhost:8182', session: window.location.toString().split('?')[1], onpeer: function(peer, route) { peer.connect(Module['webrtc']['session']); }, onconnect: function(peer) { }, ondisconnect: function(peer) { // Calling window.close() from this handler hangs my browser, so run it in the next turn setTimeout(window.close, 0); }, onerror: function(error) { console.error(error); }, }, setStatus: function(text) { console.log('status: ' + text); } }; ''') self.compile_btest( ['-Werror', temp_host_filepath, '-o', host_outfile] + [ '-s', 'GL_TESTING', '--pre-js', 'host_pre.js', '-s', 'SOCKET_WEBRTC', '-s', 'SOCKET_DEBUG' ]) self.compile_btest( ['-Werror', temp_peer_filepath, '-o', peer_outfile] + [ '-s', 'GL_TESTING', '--pre-js', 'peer_pre.js', '-s', 'SOCKET_WEBRTC', '-s', 'SOCKET_DEBUG' ]) # note: you may need to run this manually yourself, if npm is not in the path, or if you need a version that is not in the path self.run_process([NPM, 'install', test_file('sockets', 'p2p')]) broker = Popen( config.NODE_JS + [test_file('sockets', 'p2p', 'broker', 'p2p-broker.js')]) expected = '1' self.run_browser(host_outfile, '.', ['/report_result?' + e for e in expected]) broker.kill()
def cookie_jar(): return create_file(DATA_PATH, "cookiejar.lwp")
def subscription_file(): return create_file(DATA_PATH, "subsciption.list")
def furk_search_file(): return create_file(DATA_PATH, "furk_search.list")
def wishlist_finished(): return create_file(DATA_PATH, "wishlist_finished.list")
def people_list(): return create_file(DATA_PATH, "people_list.list")
def imdb_search_file(): return create_file(DATA_PATH, "imdb_search.list")
def downloads_file_tv(): return create_file(DATA_PATH, "active_downloads_tv.list")
def imdb_actor_file(): return create_file(DATA_PATH, "imdb_actors.list")