def run_all(args_obj, parser): print("") print("Running all tests.") print(args_obj) link_checker.DO_FAIL = args_obj.fail check_main_web_functionality(args_obj, parser) check_links(args_obj, parser) check_mapping(args_obj, parser)
def main(): print("[INFO]: ############################################################") print("[INFO]: # Starting integirty checker") print("[INFO]: ############################################################\n") print("[INFO]: Sending request ...") answer = requests.get(base_url+request, headers=headers) if answer.status_code == 200: print("[INFO]: Request successfull...") else: print("[ERR]: Request failed! Received {0} status code from codebeamer".format(answer.status_code)) sys.exit(1) content = json.loads(answer.content) items = parse_items_to_named_dict(content['items']) if TABLE_DISABLED == 'true': print("[INFO]: Skipping table generation ...") else: generate_table(items) message = "" if LINK_CHECKER_DISABLED == 'true': print("[INFO]: Skipping link checker ...") else: message += check_links(items) if WIKI_CHECKER_DISABLED == 'true': print("[INFO]: Skipping wiki checker ...") else: message += check_wiki_text(items) print("[INFO]: Displaying message to be send ...") print(message) print("[INFO]: End of message ...") send_mail(TABLE_DISABLED, message) print("[INFO]: ############################################################") print("[INFO]: # Integirty checker finished") print("[INFO]: ############################################################\n")
def get_posts(self, num_blocks, link_checker): host = IP s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) port = 58443 # connection to hostname on the port. s.connect((host, port)) # Receive no more than 1024 bytes # tm = s.recv(1024) entry_bytes = pickle.dumps(num_blocks) s.sendall(entry_bytes) #s.close() data = s.recv(1024) file_list = pickle.loads(data) new_file_list = [] for items in file_list: new_file_list.append(items.decode()) #print(file_list) #print(new_file_list) timeblocks = [] test_entry_links = [] if link_checker == True: from link_checker import check_links test_entry_links = check_links(new_file_list) reversed_test_entry_links = [] test_entry_links_length = len(test_entry_links) for items in range(test_entry_links_length - 1, 0, -1): reversed_test_entry_links.append(test_entry_links[items]) for items in new_file_list: filename, filesize = items.split(SEPARATOR) #filename = "./static/"+filename url = 'http://13.82.102.90:58442/static/' + filename data = requests.get(url=url) time_block = pickle.loads(data.content) timeblocks.append(time_block) print('Obtained data.') ''' for items in new_file_list: filename, filesize = items.split(SEPARATOR) print(filesize) filename = "./static/"+filename with open(filename, 'wb') as file: bytes_read = s.recv(5000) file.write(bytes_read) print('size of bytes read:', sys.getsizeof(bytes_read)) #if not bytes_read: # break #bytes_read = pickle.loads(bytes_read) #file.write(bytes_read) print('wrote file') s.send(b'1') for items in new_file_list: filename, filesize = items.split(SEPARATOR) #filename = "./static/"+filename print(filename) print('file size:', os.path.getsize(filename)) file = open(filename, 'rb') time_block = TimeBlock(1) time_block = pickle.loads(file.read()) #timeblock = pickle.load(file.read()) timeblocks.append(time_block) #file.close() ''' entries = [] #print('number of timeblocks:', len(timeblocks)) for items in timeblocks: new_list = items.get_entries() print(items.get_entries()) for entry in new_list: entries.append(entry) #for items in entries: # print(items.get_ipfs_id()) reversed_entries = [] length_entries = len(entries) for items in range(length_entries - 1, 0, -1): reversed_entries.append(entries[items]) #posts = [Entry(1, 'r'), Entry(1,'r'),Entry(1, 'r')] print(entries, reversed_entries) return reversed_entries, reversed_test_entry_links