def create_restart_remove_delivery(create_delivery_url, create_failed_delivery_data, check_delivery_status_url, restart_url, remove_delivery_url, success): test.log("create_restart_remove_delivery()") test.log("create_delivery_url : " + create_delivery_url) test.log("check_delivery_status_url : " + check_delivery_status_url) test.log("restart_url : " + restart_url) test.log("remove_delivery_url : " + remove_delivery_url) delivery_id = None delivery_stage = None source(findFile("scripts", "deliverywebservices.py")) delivery_id = create_save_delivery_using_post_url( create_delivery_url, create_failed_delivery_data) squish.snooze(30) if (delivery_id or delivery_id != None): delivery_stage = restarting_delivery(check_delivery_status_url, restart_url, delivery_id) source(findFile("scripts", "remove_delete_delivery.py")) squish.snooze(30) success = remove_delivery(remove_delivery_url, delivery_id, success) return success else: test.fail("delivery id is not created")
def add_attachable_aut(name="teest",port=11233): test.log("Adding attachable aut to autlist") squish_module_helper.import_squish_symbols() if sys.platform.startswith("win"): subprocess.Popen([os.path.join(os.environ["SQUISH_PREFIX"], 'bin', 'squishserver.exe'), '--config', 'addAttachableAUT', name, str(port)]) else: subprocess.Popen([os.path.join(os.environ["SQUISH_PREFIX"], 'bin', 'squishserver'), '--config', 'addAttachableAUT', name, str(port)])
def dump_rsvndump_incremental(id, stepsize, args, repos=None): log(id, "\n*** dump_rsvndump_incremental (" + str(id) + ")\n") if not repos: repos = test.repo(id) dump = test.dumps(id) + "/rsvndump.dump" start = 0 end = stepsize while True: try: if not platform.system() == "Windows": run("../../src/rsvndump", uri("file://" + repos), "--incremental", "--no-incremental-header", "--revision", str(start) + ":" + str(end), extra_args=tuple(args), output=dump, error=test.log(id)) else: run("../../bin/rsvndump.exe", uri("file://" + repos), "--incremental", "--no-incremental-header", "--revision", str(start) + ":" + str(end), extra_args=tuple(args), output=dump, error=test.log(id)) start = end + 1 end = start + stepsize except: break return dump
def get_position_item(itemObjectOrName): """Gets the position of the given object in the coordinates of the root object""" def recursive_go_deep(obj): parent = object.parent(obj) if parent: worked, rel_pos = recursive_go_deep(parent) if worked: posx = obj.x + rel_pos.x posy = obj.y + rel_pos.y return True, QPoint(posx, posy) else: return False, QPoint(0, 0) else: return True, QPoint(obj.x, obj.y) item = waitForObject(itemObjectOrName) # from https://kb.froglogic.com/display/KB/Getting+screen+coordinates+of+QGraphicsItem%2C+QGraphicsObject if className(item) != "QQuickItem": try: item = object.convertTo(item, "QQuickItem") except Exception as e: test.log( "Positioning object couldn't be converted to QQuickItem: " + str(e)) return False, QPoint(0, 0) worked, pos = recursive_go_deep(item) return worked, pos
def main(): test.log("Test Case Name: tst_start_bulk_revert_delivery") data = testData.dataset("s_list_of_webservices.tsv")[0] get_delivery_status_url = testData.field(data, "get_delivery_status_url") revert_all_deliveries_get_call = testData.field(data, "revert_all_deliveries_get_call") data = testData.dataset("delivery_ids_for_bulk_reverting.tsv")[0] parent_delivery_id = testData.field(data, "delivery_id") videoISRC = testData.field(data, "videoISRC") test.log(" get_delivery_status_url : " + get_delivery_status_url) test.log(" revert_all_deliveries_get_call : " + revert_all_deliveries_get_call) test.log(" delivery_id : " + parent_delivery_id) test.log(" videoISRC: " + videoISRC) data = testData.dataset("ftp_login_details.tsv")[0] ftp_vevo_host = testData.field(data, "ftp_vevo_host") ftp_vevo_user = testData.field(data, "ftp_vevo_user") ftp_vevo_password = testData.field(data, "ftp_vevo_password") manifest_parse_success_replace_success = findFile("testdata", "vevo_status\\parse_success_replace_success\\status-manifest.xml") squish.snooze(40) # latest_revert_id = "ff80808147da5cc80147e9c872f00172" latest_revert_id = bulk_revert_delivery(revert_all_deliveries_get_call,get_delivery_status_url,parent_delivery_id) if(latest_revert_id and latest_revert_id != None): delivery_stage = check_delivery_status_until_waiting_on_confirmation(get_delivery_status_url, latest_revert_id) ftp_transfer(get_delivery_status_url, parent_delivery_id, latest_revert_id, delivery_stage, ftp_vevo_host, ftp_vevo_user, ftp_vevo_password, manifest_parse_success_replace_success) else: test.fail("latest revert id is null or doesn't exist")
def check_delivery_status(check_delivery_status_url, delivery_id): test.log("check delivery status") source(findFile("scripts", "deliverywebservices.py")) delivery_stage = check_delivery_status(check_delivery_status_url, delivery_id) test.log(delivery_stage) return delivery_stage
def export_study(study_name): # status bar status = squish.waitForObject(cvi42Objects.statusBar) # Searches for study, and exports squish.waitForObject(cvi42Objects.patientlistEditBox).setText(study_name) squish.openContextMenu( squish.waitForObjectItem(cvi42Objects.studyTreeitem, study_name), 50, 5, 0) squish.activateItem( squish.waitForObjectItem(cvi42Objects.contextMenu, "Export Study")) # export dialog flow squish.mouseClick( squish.waitForObjectItem(":splitter.sidebar_QSidebar", "My Computer"), 53, 13, 0, squish.Qt.LeftButton) squish.mouseClick( squish.waitForObjectItem(":stackedWidget.treeView_QTreeView", "DATA (D:)"), 33, 14, 0, squish.Qt.LeftButton) squish.doubleClick( squish.waitForObjectItem(":stackedWidget.treeView_QTreeView", "DATA (D:)"), 34, 11, 0, squish.Qt.LeftButton) squish.clickButton(squish.waitForObject(":dcmBrowser.Choose_QPushButton")) start = time.time() while True: if status.currentMessage() == "Export Images done": break else: pass end = time.time() test.log("Time to export study: %.2f" % (end - start))
def save_workspace(workspace_name): # Grabs the status bar object to check current message status = squish.waitForObject(cvi42Objects.statusBar) # Saves workspace as name given squish.activateItem( squish.waitForObjectItem(cvi42Objects.menuBar, "Workspace")) squish.activateItem( squish.waitForObjectItem(cvi42Objects.workspaceButton, "Save Workspace As")) squish.waitForObject( cvi42Objects.workspaceWindowEdit).setText(workspace_name) squish.clickButton( squish.waitForObject(cvi42Objects.workspaceWindowOkButton)) start = time.time() while True: if status.currentMessage() == "Save workspace done.": break else: pass end = time.time() test.log("Saving workspace: %.2f" % (end - start)) return
def load_series(window_given, series_num): series = ":scrollArea.frame-%s_SeriesThumbPreview" % (series_num - 1) window, x, y = find_window(window_given) # if series needed is bigger than 8, check if scrolling is needed to reach the series if object.exists(cvi42Objects.series_scrollbar): squish.scrollTo(squish.waitForObject(cvi42Objects.series_scrollbar), -505) if series_num > 8: squish.scrollTo( squish.waitForObject(cvi42Objects.series_scrollbar), 855) # Load series into window requested squish.mouseMove(squish.waitForObject(series), 5, 5) squish.mousePress(squish.waitForObject(series)) # squish.snooze(1) squish.mouseRelease(squish.waitForObject(window)) squish.snooze(0.2) if object.exists(":Load Volume.Yes_QPushButton"): squish.clickButton( squish.waitForObject(":Load Volume.Yes_QPushButton")) # Wait for progress bar time = loading_time() if time > 10: test.log("Time to load series: %.2f" % time) return
def get_delivery_config_get_url(url): test.log("get_delivery_config_get_url : " + url) open = urllib2.urlopen(url) dict = json.load(open) #pprint(dict) list_of_hubs = dict['availableHubs'] if(len(list_of_hubs) > 0): for hub in list_of_hubs: test.log("hub :", hub) if(hub == "hub-t1"): test.log("hub exists", hub) else: test.fail("No Hubs available ") list_of_delivery_targets = dict['deliveryTargets'] if(len(list_of_delivery_targets) > 0): for delivery_target in list_of_delivery_targets: test.log("delivery delivery_target : ", delivery_target) if(delivery_target == "VEVO"): test.log("delivery_target exists", delivery_target) else: test.fail("No Delivery targets available ")
def switch_to_app(app_name): """Switch context to the given app, to act in its process.""" # in case of multi process not needed if not settings.G_MULTI_PROCESS: return True # do this always upfront, because an app # might have been connected in the meanwhile # and must be updated before trying to possibly # change to it. update_all_contexts() good = False if app_name in settings.G_APP_HANDLE.keys(): switch_to_app = settings.G_APP_HANDLE[app_name] if switch_to_app is not None: test.log("Trying to switch to registered app '" + app_name + "'!") squish.snooze(settings.G_WAIT_SWITCH_APP_CONTEXT) squish.setApplicationContext(switch_to_app) test.log("Switched to registered app '" + app_name + "'!") good = True else: test.fail("App '" + app_name + "' is known but yet not registered") else: test.fail("App '" + app_name + "' is not known!") return good
def repos_load(id, dumpfile): log(id, "\n*** repos_load ("+str(id)+")\n") tmp = test.mkdtemp(id) run("svnadmin", "create", tmp, output = test.log(id)) run("svnadmin", "load", tmp, input = dumpfile, output = test.log(id)) return tmp
def post_url_data(url, data): test.log("post_url_data") test.log("url : " + url) req = urllib2.Request(url) req.add_header('Content-Type', 'application/json') response = urllib2.urlopen(req, json.dumps(data)) dict = json.load(response) return dict
def repos_load(id, dumpfile): log(id, "\n*** repos_load (" + str(id) + ")\n") tmp = test.mkdtemp(id) run("svnadmin", "create", tmp, output=test.log(id)) run("svnadmin", "load", tmp, input=dumpfile, output=test.log(id)) return tmp
def dump_reload(id, dumpfile): log(id, "\n*** dump_reload (" + str(id) + ")\n") tmp = test.mkdtemp(id) run("svnadmin", "create", tmp, output=test.log(id)) run("svnadmin", "load", tmp, input=dumpfile, output=test.log(id)) dump = test.dumps(id) + "/validate.dump" run("svnadmin", "dump", tmp, output=dump, error=test.log(id)) return dump
def parsexml(local_manifest_file_path): test.log("parsexml") test.log("local_manifest_file_path : " + local_manifest_file_path) xmlDoc = open(local_manifest_file_path, 'r') xmlDocData = xmlDoc.read() root = etree.XML(xmlDocData) videoISRC = root.find('VideoISRC') text = videoISRC.text print(videoISRC.text) return text
def get_latest_revert_id_with_complete_url(complete_url): test.log("get_latest_revert_id_with_complete_url: " + complete_url) print complete_url open = urllib2.urlopen(complete_url) dict = json.load(open) latest_revert_id = dict['latestRevertId'] print latest_revert_id #test.log(" latest_revert_id is : " + latest_revert_id) return latest_revert_id
def dump_reload(id, dumpfile): log(id, "\n*** dump_reload ("+str(id)+")\n") tmp = test.mkdtemp(id) run("svnadmin", "create", tmp, output = test.log(id)) run("svnadmin", "load", tmp, input = dumpfile, output = test.log(id)) dump = test.dumps(id)+"/validate.dump" run("svnadmin", "dump", tmp, output = dump, error = test.log(id)) return dump
def bspatch(id, file, patch): log(id, "\n*** bspatch ("+file+", "+patch+")\n") tmp = mktemp(id) try: run("bspatch", file, tmp, patch, output = test.log(id), error = test.log(id)) shutil.move(tmp, file) except: return False return True
def check_revert_status_with_complete_url(complete_url): test.log("check_revert_status_with_complete_url: " + complete_url) test.log("list of revert stages : REVERTED,REVERTING,REVERT_FAILED,UNREVERTABLE,UNREVERTABLE_DUPLICATE,UNREVERTABLE_NO_SOURCE,UNREVERTED") print complete_url open = urllib2.urlopen(complete_url) dict = json.load(open) revert_stage = dict['revertStage'] print revert_stage #test.log("revert stage is : " + revert_stage) return revert_stage
def dump_rsvndump_incremental_sub(id, path, stepsize, args, repos=None): log(id, "\n*** dump_rsvndump_incremental (" + str(id) + ")\n") if not repos: repos = test.repo(id) dump = test.dumps(id) + "/rsvndump.dump" # Fetch history history = mktemp(id) run("svnlook", "history", repos, path, output=history, error=test.log(id)) f = open(history, "r") hist = f.readlines() f.close() hist.reverse() # Filter history regex = re.compile(" *[0-9]* \/" + path + "$", re.IGNORECASE) hist = [h for h in hist if regex.search(h)] # Iteratve over history regex = re.compile(" *([0-9]*)", re.IGNORECASE) start = 0 end = 0 for h in hist: match = regex.match(h) if not match: break end = int(match.group()) if end - start < stepsize: continue if not platform.system() == "Windows": run("../../src/rsvndump", uri("file://" + repos + "/" + path), "--incremental", "--no-incremental-header", "--revision", str(start) + ":" + str(end), extra_args=tuple(args), output=dump, error=test.log(id)) else: run("../../bin/rsvndump", uri("file://" + repos + "/" + path), "--incremental", "--no-incremental-header", "--revision", str(start) + ":" + str(end), extra_args=tuple(args), output=dump, error=test.log(id)) start = end + 1 return dump
def get_info(obj): try: posx = obj.x posy = obj.y width = obj.width height = obj.height except Exception as e: test.fail("Fail due to: " + str(e)) else: test.log("(" + str(posx) + "," + str(posy) + ") size [" + str(width) + "," + str(height) + "]")
def main(): test.log("Test Case Name: tst_campaign_service_call") data = testData.dataset("s_list_of_webservices.tsv")[0] campaign_service_call = testData.field(data, "campaign_service_call_url") campaign_id_data = testData.field(data, "campaign_id") source(findFile("scripts", "campaignservicecall.py")) get_campaign_service_call(campaign_service_call, campaign_id_data)
def get_list_of_files_folders_from_parent_folder_in_hub(url, hub, json_data): test.log("get_list_of_files_folders_from_parent_folder_in_hub") test.log("url : " + url) complete_url = url + hub + "/parent" print complete_url dict = post_url_data(complete_url, json_data) files = dict['files'] new_data_dict = None if (len(files) > 0): print len(files) else: test.fail("no files exist in parent folder including the child file")
def check_delivery_status_until_cancelled(check_delivery_status_url, delivery_id): test.log("check_delivery_status_until_cancelled") source(findFile("scripts", "deliverywebservices.py")) while(True): delivery_stage = check_delivery_status(check_delivery_status_url, delivery_id) test.log(delivery_stage) if(delivery_stage == "CANCELLED"): return delivery_stage elif (delivery_stage == "FAILED"): test.fail("Delivery cancel failed") return delivery_stage
def ml_button(): mlButton = ":mContourDetectionTools.mlButton_SwitchingToolButton" squish.clickButton(squish.waitForObject(mlButton)) # Wait for progress bar time = studyFunctions.loading_time() test.log("biplanarLAX ML %.2f" % time) return
def get_list_of_files_folders_from_parent_folder_in_hub(url, hub, json_data): test.log("get_list_of_files_folders_from_parent_folder_in_hub") test.log("url : " + url) complete_url = url + hub + "/parent" print complete_url dict = post_url_data(complete_url, json_data) files = dict['files'] new_data_dict = None if(len(files) > 0): print len(files) else: test.fail("no files exist in parent folder including the child file")
def stop_delivery(stop_cancel_delivery, delivery_id): test.log("stop delivery : " + stop_cancel_delivery + delivery_id) r = requests.post(stop_cancel_delivery + delivery_id) print r.status_code print r.text if(r.text): test.log("stopped or cancelled the delivery successfully") else: test.fail("Failed to cancel or stop the delivery")
def dump_reload_rsvndump_sub(id, dumpfile, path, args): log(id, "\n*** dump_reload ("+str(id)+")\n") tmp = test.mkdtemp(id) run("svnadmin", "create", tmp, output = test.log(id)) run("svnadmin", "load", tmp, input = dumpfile, output = test.log(id)) dump = test.dumps(id)+"/validate.dump" if not platform.system() == "Windows": run("../../src/rsvndump", uri("file://"+tmp+"/"+path), extra_args = tuple(args), output = dump, error = test.log(id)) else: run("../../bin/rsvndump.exe", uri("file://"+tmp+"/"+path), extra_args = tuple(args), output = dump, error = test.log(id)) return dump
def bspatch(id, file, patch): log(id, "\n*** bspatch ("+file+", "+patch+")\n") tmp = mktemp(id) try: run("bspatch", file, tmp, patch, output = test.log(id), error = test.log(id)) shutil.move(tmp, file) except OSError: sys.stderr.write("'bspatch' executable missing?\n") raise except: return False return True
def stop_delivery(stop_cancel_delivery, delivery_id): test.log("stop delivery : " + stop_cancel_delivery + delivery_id) r = requests.post(stop_cancel_delivery + delivery_id) print r.status_code print r.text if (r.text): test.log("stopped or cancelled the delivery successfully") else: test.fail("Failed to cancel or stop the delivery")
def get_midth_of_item(itemObjectOrName): """Returns (success, QPoint) of the center point of the given object""" item = waitForObject(itemObjectOrName) if className(item) != "QQuickItem": try: item = object.convertTo(item, "QQuickItem") except Exception as e: test.log( "Positioning object couldn't be converted to QQuickItem: " + str(e)) return False, QPoint(0, 0) return True, QPoint(item.x + math.floor(item.width / 2), item.y + math.floor(item.height / 2))
def get_list_of_media_families(url, campaign_id): test.log("get_list_of_media_families :" + url + campaign_id) complete_url = url + campaign_id print complete_url open = urllib2.urlopen(complete_url) dict = json.load(open) squish.snooze(10) test.log("get_list_of_media_families : " + complete_url) #pprint(dict) number_of_results = dict['numberOfResults'] media_family_id = None if (number_of_results > 0): print number_of_results count = 0 for i in range(number_of_results): count = count + 1 dict_of_media_family_id_list = dict['results'][i] media_family_id = dict_of_media_family_id_list['id'] print media_family_id test.log("successfully retrieved the no of results :") if (number_of_results != count): test.fail("assets doesn't match the number of results retrieved") else: test.fail("No results exist for media family") if (media_family_id and media_family_id != None): test.log("media family id exists : ", media_family_id) test.passes("Media family exists") return media_family_id else: test.fail("Failed to retrieve media family id")
def check_delivery_status_until_cancelled(check_delivery_status_url, delivery_id): test.log("check_delivery_status_until_cancelled") source(findFile("scripts", "deliverywebservices.py")) while (True): delivery_stage = check_delivery_status(check_delivery_status_url, delivery_id) test.log(delivery_stage) if (delivery_stage == "CANCELLED"): return delivery_stage elif (delivery_stage == "FAILED"): test.fail("Delivery cancel failed") return delivery_stage
def get_list_of_media_families(url, campaign_id): test.log("get_list_of_media_families :" + url + campaign_id) complete_url = url + campaign_id print complete_url open = urllib2.urlopen(complete_url) dict = json.load(open) squish.snooze(10) test.log("get_list_of_media_families : " + complete_url) #pprint(dict) number_of_results = dict['numberOfResults'] media_family_id = None if(number_of_results > 0): print number_of_results count = 0 for i in range(number_of_results): count = count + 1 dict_of_media_family_id_list = dict['results'][i] media_family_id = dict_of_media_family_id_list['id'] print media_family_id test.log("successfully retrieved the no of results :") if(number_of_results != count): test.fail("assets doesn't match the number of results retrieved") else: test.fail("No results exist for media family") if(media_family_id and media_family_id != None): test.log("media family id exists : ", media_family_id) test.passes("Media family exists") return media_family_id else: test.fail("Failed to retrieve media family id")
def check_contents_in_ftp_server_and_download_manifest(ftp_delivery_path, ftp_vevo_host, ftp_vevo_user, ftp_vevo_password): test.log("check_contents_in_ftp_server_and_download_manifest") test.log("ftp_delivery_path : " + ftp_delivery_path) squish.snooze(60) files = [] absolute_path = None squish.snooze(60) try: session = ftplib.FTP(ftp_vevo_host, ftp_vevo_user, ftp_vevo_password) print session session.cwd(ftp_delivery_path) # Change working directory files = session.nlst() print files for filename in files: print filename #getting delivery.complete.lock file as soon as file transfer has been done(look for that as well) if (len(files) == 3 and filename == "manifest.xml"): print filename testdata = testData.create("shared", filename) print testdata #dataFile = open(testdata, "w") #local_filename = os.path.join('download_manifest', filename) absolute_path = os.path.abspath(testdata) print absolute_path file = open(testdata, 'wb') session.retrbinary('RETR %s' % filename, file.write) file.close() elif (len(files) > 3): test.fail("no of files in the FTP server are more than 3") test.log(len(files)) elif (len(files) < 3): test.fail("no of files in the FTP server are less than 3") test.log(len(files)) session.quit() return absolute_path except Exception as e: test.fail("check_contents_in_ftp_server_and_download_manifest") print e s = str(e) print sys.exc_info() test.log(s)
def bspatch(id, file, patch): log(id, "\n*** bspatch (" + file + ", " + patch + ")\n") tmp = mktemp(id) try: run("bspatch", file, tmp, patch, output=test.log(id), error=test.log(id)) shutil.move(tmp, file) except: return False return True
def main(): test.log("Test Case Name: tst_asset_service_call") data = testData.dataset("s_list_of_webservices.tsv")[0] source(findFile("scripts", "test_create_restart_remove.py")) asset_service_to_get_list_of_mediafamilies_url = testData.field(data, "asset_service_to_get_list_of_mediafamilies_url") asset_service_to_get_list_of_assets_url = testData.field(data, "asset_service_to_get_list_of_assets_url") campaign_id_data = testData.field(data, "campaign_id") source(findFile("scripts", "assetservicecall.py")) media_family_id = get_list_of_media_families(asset_service_to_get_list_of_mediafamilies_url, campaign_id_data) get_list_of_assets(asset_service_to_get_list_of_assets_url, media_family_id)
def remove_delivery(remove_delivery_url, delivery_id, success): test.log("remove_delivery : " + remove_delivery_url + delivery_id) r = requests.delete(remove_delivery_url + delivery_id) #print r.status_code #print r.text if (r.text): test.log("removed the delivery card successfully") success = True return success else: test.fail("Failed to remove the delivery card") return success
def remove_delivery(remove_delivery_url, delivery_id,success): test.log("remove_delivery : " + remove_delivery_url + delivery_id) r = requests.delete(remove_delivery_url + delivery_id) #print r.status_code #print r.text if(r.text): test.log("removed the delivery card successfully") success = True return success else: test.fail("Failed to remove the delivery card") return success
def restart_delivery(restart_url, delivery_id): test.log("restart_delivery : " + restart_url + delivery_id) squish.snooze(10) r = requests.put(restart_url + delivery_id) print r.status_code dicts = json.loads(r.text) delivery_stage = dicts['deliveryStage'] if (delivery_stage == "RESTARTED"): test.log("restarted the server") return delivery_stage else: test.fail("Failed to restart the delivery")
def restart_delivery(restart_url, delivery_id): test.log("restart_delivery : " + restart_url + delivery_id) squish.snooze(10) r = requests.put(restart_url + delivery_id) print r.status_code dicts = json.loads(r.text) delivery_stage = dicts['deliveryStage'] if(delivery_stage == "RESTARTED"): test.log("restarted the server") return delivery_stage else: test.fail("Failed to restart the delivery")
def setup_repos(id, setup_fn): log(id, "\n*** setup_repos (" + str(id) + ")\n") repo = test.repo(id) wc = test.wc(id) cache.load_repos(id, test.name(id), repo, wc, setup_fn, test.log(id)) return repo
def copy_to_ftp_server(local_manifest_file_path,ftp_delivery_path,ftp_vevo_host,ftp_vevo_user,ftp_vevo_password): test.log("copy_to_ftp_server started") test.log("local_manifest_file_path : " + local_manifest_file_path) test.log("ftp_delivery_path : " + ftp_delivery_path) squish.snooze(60) test.log("copying : " + local_manifest_file_path + "to " +ftp_delivery_path) session = ftplib.FTP(ftp_vevo_host, ftp_vevo_user, ftp_vevo_password) file = open(local_manifest_file_path, 'rb') # file to send files = session.nlst() print files session.cwd(ftp_delivery_path) # Change working directory session.storbinary('STOR status-manifest.xml', file) # send the file using STOR command file.close() # close file and FTP session.quit() test.log("copy_to_ftp_server finished")
def add_attachable_aut(name="teest", port=11233): test.log("Adding attachable aut to autlist") squish_module_helper.import_squish_symbols() if sys.platform.startswith("win"): subprocess.Popen([ os.path.join(os.environ["SQUISH_PREFIX"], 'bin', 'squishserver.exe'), '--config', 'addAttachableAUT', name, str(port) ]) else: subprocess.Popen([ os.path.join(os.environ["SQUISH_PREFIX"], 'bin', 'squishserver'), '--config', 'addAttachableAUT', name, str(port) ])
def setup_repos(id, setup_fn): log(id, "\n*** setup_repos ("+str(id)+")\n") repo = test.repo(id) wc = test.wc(id) cache.load_repos(id, test.name(id), repo, wc, setup_fn, test.log(id)) return repo
def check_contents_in_ftp_server_and_download_manifest(ftp_delivery_path,ftp_vevo_host,ftp_vevo_user,ftp_vevo_password): test.log("check_contents_in_ftp_server_and_download_manifest") test.log("ftp_delivery_path : " + ftp_delivery_path) squish.snooze(60) files = [] absolute_path = None squish.snooze(60) try: session = ftplib.FTP(ftp_vevo_host, ftp_vevo_user, ftp_vevo_password) print session session.cwd(ftp_delivery_path) # Change working directory files = session.nlst() print files for filename in files: print filename #getting delivery.complete.lock file as soon as file transfer has been done(look for that as well) if(len(files) == 3 and filename == "manifest.xml"): print filename testdata = testData.create("shared", filename) print testdata #dataFile = open(testdata, "w") #local_filename = os.path.join('download_manifest', filename) absolute_path = os.path.abspath(testdata) print absolute_path file = open(testdata, 'wb') session.retrbinary('RETR %s' % filename, file.write) file.close() elif(len(files) > 3): test.fail("no of files in the FTP server are more than 3") test.log(len(files)) elif(len(files) < 3): test.fail("no of files in the FTP server are less than 3") test.log(len(files)) session.quit() return absolute_path except Exception as e: test.fail("check_contents_in_ftp_server_and_download_manifest") print e s = str(e) print sys.exc_info() test.log(s)
def patch(id, file, patch): log(id, "\n*** patch ("+file+", "+patch+")\n") try: run("patch", file, patch, output = test.log(id)) except: return False return True
def dump_original(id, repos = None): log(id, "\n*** dump_original ("+str(id)+")\n") if not repos: repos = test.repo(id) dump = test.dumps(id)+"/original.dump" run("svnadmin", "dump", repos, output = dump, error = test.log(id)) return dump
def diff(id, file1, file2): log(id, "\n*** diff ("+file1+", "+file2+")\n") diff = test.log(id)+".diff" try: run("diff", "-Naur", file1, file2, output = diff) except: return False return True
def bulk_revert_delivery(bulk_revert_url,get_delivery_status_url,parent_delivery_id): test.log("bulk_revert_delivery : " + bulk_revert_url) open = urllib2.urlopen(bulk_revert_url) response_code = open.getcode() dict = json.load(open) request_status = dict['requestStatus'] if(response_code == 200 and request_status == "OK"): squish.snooze(30) test.passes("Bulk Revert initiated:") else: test.fail("Bulk Revert initiation failed: " + response_code) squish.snooze(60) latest_revert_id = status_of_reverting_delivery(get_delivery_status_url, parent_delivery_id) return latest_revert_id
def create_delivery_and_save_delivery_id_to_tsv(create_delivery_url, create_delivery_data,videoISRC): source(findFile("scripts", "deliverywebservices.py")) '''create the delivery''' delivery_id = create_save_delivery_using_post_url(create_delivery_url, create_delivery_data) squish.snooze(40) if(delivery_id and delivery_id != None): test.log("delivery successfully created", delivery_id) testdata = testData.create("shared", "delivery_ids_for_bulk_reverting.tsv") dataFile = open(testdata, "w") dataFile.write("delivery_id\tvideoISRC\n") dataFile.write(delivery_id + "\t" + videoISRC) dataFile.close() test.log("tsv created") return delivery_id else: test.fail("delivery id is none")
def get_campaign_service_call(url,campaign_id): test.log("get_campaign_service_call: campaignId" + url + campaign_id) open = urllib2.urlopen(url) dict = json.load(open) #pprint(dict) total_number_of_results = dict['numberOfResults'] if(total_number_of_results > 0): test.log("total campaign ids : ") else: test.fail("No campaign found") list_of_results = dict['results'] #count = 0 if(len(list_of_results) > 0): test.passes("Campaign service call returned open campaigns") else: test.fail("No campaign available ")
def check_delivery_status_until_waiting_on_confirmation(get_delivery_status_url, latest_revert_id): test.log("check delivery status " + latest_revert_id) complete_url = get_delivery_status_url + latest_revert_id source(findFile("scripts", "deliverywebservices.py")) delivery_stage = None #ftp_file_path = None while(True): squish.snooze(60) complete_url = get_delivery_status_url + latest_revert_id print complete_url open = urllib2.urlopen(complete_url) dict = json.load(open) delivery_stage = dict['deliveryStage'] test.log("delivery_stage : " + delivery_stage) if(delivery_stage == "WAITING_ON_CONFIRMATION"): return delivery_stage elif(delivery_stage == "FAILED"): test.fail("delivery stage is failed") return delivery_stage