class FilterLargeFiles: def __init__(self): self.file_paths = "file_paths.json" self.json_path = None self.log_writer = LogWriter() self.load_file_paths() def load_file_paths(self): file_paths_config_file = open(self.file_paths) file_paths = json.load(file_paths_config_file) file_paths_config_file.close() self.json_path = file_paths["Linux"]["master_json_dir"] def filter_files(self): for directory in os.listdir(self.json_path): if os.path.isdir(self.json_path + "/" + directory): for file in os.listdir(self.json_path + "/" + directory): file_path = self.json_path + "/" + directory + "/" + file if os.path.getsize(file_path) > 500000: self.delete_and_record(file_path) def delete_and_record(self, file_path): self.log_writer.write_info_log("Deleting " + file_path) os.remove(file_path)
def __init__(self,h_params:HParams,seed: int = None): self.h_params = h_params self.model = None self.train_data_loader = None self.valid_data_loader = None self.test_data_loader = None self.criteria = None self.optimizer = None if seed is None: self.seed = torch.cuda.initial_seed() torch.manual_seed(self.seed) else: self.seed = seed torch.manual_seed(self.seed) self.check_point_num = 0 #binary self.current_epoch = 0 self.total_epoch = self.h_params.train.epoch self.best_valid_metric = None self.best_valid_epoch = 0 self.global_step = 0 self.local_step = 0 self.log_writer = LogWriter(self.h_params)
def to_json_str(fci_object): if isinstance(fci_object, FormattedCodeInterface): dic = fci_object.to_dictionary() return json.dumps(dic, sort_keys=True) else: lw = LogWriter() lw.write_error_log("Method 'to_json' in FCIConverter requires an FCI type of object as parameter!") exit()
def to_dic(file_name): if not os.path.exists(file_name): lw = LogWriter() lw.write_error_log("File " + file_name + " doesn't exist!") else: f = open(file_name, 'r', encoding="utf-8") dic = json.load(f) return dic
def __init__(self): super(GithubCrawler, self).__init__() self.log_writer = LogWriter() # Change Account ? self.g_account = None self.next_search_page = None self.next_search_stars_ub = None self.download_record_filepath = "" self.output_path = "python"
def __init__(self, connection): self.local_path = "../Hester'sWorkSpace/files" self.clean_projects_path = None self.unclean_projects_path = None self.remote_json_path = None self.json_data = None self.files_in_project = [] self.project_info = { } # Dictionary with project names and containing directory as key and corresponding json data as value self.log_writer = LogWriter() self.connection = connection
def __init__(self): super(GithubCrawler, self).__init__() # self.arg = arg self.log_writer = LogWriter() # Change Account ? # self.g = GitHub("SoapKe", "BBC19951228Soap") # Config Github Account self.g = GitHub("", "") self.project_filename = "download_test" self.output_path = ""
def __init__(self): # Parent self.log_writer = LogWriter() self.ssh_client = paramiko.SSHClient() self.sftp = None self.linux_server_details = { 'hostname': '123.206.77.77', 'username': '******', 'password': '******' } #self.connect_to_server() self.open_linux_connection() super().__init__()
def __init__(self): self.clean_projects_path = None self.unclean_projects_path = None self.json_files_path = None self.master_json_path = None self.so_questions = None self.json_data = None # Dictionary with project names and containing directory as keys and their corresponding json data as values self.project_info = {} self.connection = None self.log_writer = LogWriter()
def __init__(self, connection): # A relative path works when I run this code on its own but # when I run from main.py I need an absolute path for some reason # self.local_path = "../../Hester'sWorkSpace/files" self.local_path = "C:/Users/CeXGalway/PycharmProjects/Final-Year-Project/Hester'sWorkSpace/files" self.remote_path = "/home/ubuntu/test_files/json_files" self.clean_projects_path = "/home/ubuntu/test_files/clean" self.unclean_projects_path = "/home/ubuntu/test_files/unclean" self.json_data = None self.files_in_project = [] self.project_info = {} # Dictionary with project name as key and corresponding json data as value self.log_writer = LogWriter() self.connection = connection
def update_current_widget_by_name(self, widget_name): index = self.widget_dict[widget_name] self.setCurrentIndex(index) LogWriter().write_log( "update_current_widget_by_name '{}'".format(widget_name)) current_widget = self.currentWidget() if widget_name == "index": gc.collect() if hasattr(current_widget, "load"): current_widget.load()
from gensim import corpora, models, similarities import os import pickle import time import operator import json import FCIConverter from LogWriter import LogWriter from NLTK.NLTKFormatter import NLTKFormatter # from nltk.stem.lancaster import LancasterStemmer from six import iteritems lw = LogWriter() # Start Time startTime = time.time() # import io # import sys # sys.stdout = io.TextIOWrapper(sys.stdout.buffer,encoding='utf8') # Read all files from the root path def ReadFilesDeep(rootDir): filePath = [] fileName = [] list_dirs = os.walk(rootDir) for root, dirs, files in list_dirs:
import time import asyncio import threading import sys from LogWriter import LogWriter from api_exceptions import * from flask import Flask, request, jsonify from Led_Board import Led_Board from Led_Character_Map import char_map from Led_Config import Led_Config #global vars q = asyncio.Queue() EndQueue = False t = None log = LogWriter('logs', 'led_log', 2) app = Flask(__name__) #format given exception as a string def GetInnerException(): exc_type, exc_value, exc_traceback = sys.exc_info() lines = traceback.format_exception(exc_type, exc_value, exc_traceback) return ''.join('!! ' + line for line in lines) #error handler for bad requests @app.errorhandler(BadRequest) def handle_bad_request(error): response = jsonify(error.to_dict())
class FileDetailsToJson: def __init__(self, connection): self.local_path = "../Hester'sWorkSpace/files" self.clean_projects_path = None self.unclean_projects_path = None self.remote_json_path = None self.json_data = None self.files_in_project = [] self.project_info = { } # Dictionary with project names and containing directory as key and corresponding json data as value self.log_writer = LogWriter() self.connection = connection def load_file_paths(self): file_paths_config_file = open("../file_paths.json") file_paths = json.load(file_paths_config_file) self.clean_projects_path = file_paths["Linux"]["clean_dir"] self.unclean_projects_path = file_paths["Linux"]["unclean_dir"] self.remote_json_path = file_paths["Linux"]["json_dir"] # For each json file from Kirk find the corresponding clean project # For each file within that project crete an fci object with the details of that file def run(self): self.load_file_paths() self.find_all_json_files() for project_name in self.project_info: self.json_data = self.project_info[project_name] self.create_fci_objects(self.clean_projects_path + project_name) #self.save_fci_objects_to_json_files() # Goes through each unclean folder and searches for all json files from Kirk # When a file is found it saves it to a directory with the folder and file name as a key # and the json data as the element def find_all_json_files(self): for directory in self.connection.listdir(self.unclean_projects_path): if self.connection.isdir(self.unclean_projects_path + "/" + directory): self.log_writer.write_info_log("Reading jsons from " + directory) for file in self.connection.listdir( self.unclean_projects_path + "/" + directory): if file.endswith(".json"): json_path = "/" + directory + "/" + file json_file = self.connection.open_file( self.unclean_projects_path + json_path) self.project_info[json_path[:-5]] = json.load( json_file) # Goes through all files in a cleaned project and creates an fci object for each def create_fci_objects(self, parent_directory): f = '' try: for file_name in self.connection.listdir(parent_directory): file_path = parent_directory + '/' + file_name f = file_path if file_name.endswith(".py"): self.save_file_details_to_fci_object(file_path, file_name) elif self.connection.isdir(file_path): self.create_fci_objects(file_path) '''else: if self.connection.isdir(file_path): self.create_fci_objects(file_path) else: # Just an extra check to make sure no other files are left self.log_writer.write_warning_log(file_path + " not deleted")''' except Exception as e: exc_type, exc_obj, exc_tb = sys.exc_info() self.log_writer.write_error_log("%s at line %d" % (str(e), exc_tb.tb_lineno)) # Saves the details of an individual file to an fci object def save_file_details_to_fci_object(self, file_path, file_name): fci_object = FormattedCodeInterface() fci_object.set_file_name(file_name) fci_object.set_save_path(file_path) self.set_content(file_path, fci_object) self.set_project_details(fci_object) #self.files_in_project.append(fci_object) self.save_fci_objects_to_json_files(fci_object) self.log_writer.write_info_log(file_path + " documented.") # Save the content, code, and comments of an individual file to an fci object def set_content(self, file_path, fci_object): file = self.connection.open_file(file_path) content = '' comments_list = [] comments = '' python_comments = [ '\"\"\"((.|\n)*)\"\"\"', '\'\'\'((.|\n)*)\'\'\'', '(?<!(\"|\'))#.*(?=\n)' ] # Content for line in file.readlines(): content += line fci_object.set_content(content) if file_path.endswith("red_test.py"): print() # Code code = content for comment_pattern in python_comments: for match in re.finditer(comment_pattern, code): #comments_list += match.group(0) code = re.sub(match.group(0), '', code) comments_list.append(self.format_comments(match.group(0))) fci_object.set_code(code) # Comments '''for recorded_comment in comments_list: if type(recorded_comment) is tuple: for comment in recorded_comment: comments += comment + '\n' else: comments += recorded_comment + '\n''' comments = ' '.join(comments_list) fci_object.set_comments(comments) def format_comments(self, comment): formatted_comment = '' alnum_pattern = r'[^(a-zA-Z0-9)]' stopwords = set(nltk.corpus.stopwords.words('english')) comment = re.sub(alnum_pattern, ' ', comment) for word in comment.split(' '): if word not in stopwords: formatted_comment += str(word) + ' ' return formatted_comment.lower() '''def format_comments(self, comments_list): filtered_comments_list = [] stopwords = set(nltk.corpus.stopwords.words('english')) for comment in comments_list: if type(comment) is tuple: print() for word in list(comment).split(' '): if word.startswith('#'): word = word[1:] if word.endswith('.') or word.endswith(','): word = word[:-1] if (word not in stopwords) and (word is not '#') and (word is not ''): filtered_comments_list.append(word) comments = ' '.join(filtered_comments_list) return comments.lower()''' # Saves the details of the current project to an fci object def set_project_details(self, fci_object): fci_object.set_author(self.json_data["owner_name"]) fci_object.set_description(self.json_data["description"]) fci_object.set_language(self.json_data["language"]) fci_object.set_project_name(self.json_data["name"]) # fci.set_quality(data["items"][0]["owner"]) # fci.set_save_time() fci_object.set_update_at(self.json_data["updated_at"]) fci_object.set_url(self.json_data["html_url"]) fci_object.set_wiki(self.json_data["has_wiki"]) # Converts fci objects to json files and saves them remotely def save_fci_objects_to_json_files(self, fci_object): self.log_writer.write_info_log("Saving Json files") FCI.FCIConverter.to_remote_json_file(self.remote_json_path, fci_object, self.connection) '''for fci_object in self.files_in_project: FCI.FCIConverter.to_remote_json_file(self.remote_json_path, fci_object, self.connection)''' self.log_writer.write_info_log( "Json files saved to remote machine at " + self.remote_json_path) '''
from pathlib import Path import Agent.agent as agnt from Agent import RemoteAPIServer as ras from LogRecorder import LogRecorder from LogWriter import LogWriter api_server = ras.RemoteAPIServer('127.0.0.1', 9000) current_dir = Path().absolute() log_recorder = LogRecorder(current_dir, "c:/tmp", 2, '*.log') log_writer = LogWriter(current_dir, 2, "Applog.log") recorders = [log_writer, log_recorder] agent = agnt.Agent(recorders) api_server.register_function(agent.start) api_server.register_function(agent.stop) api_server.start_listen()
class LinuxConnection: def __init__(self): # Parent self.log_writer = LogWriter() self.ssh_client = paramiko.SSHClient() self.sftp = None self.linux_server_details = { 'hostname': '123.206.77.77', 'username': '******', 'password': '******' } #self.connect_to_server() self.open_linux_connection() super().__init__() ### Parent ### def open_file(self, file, mode='r'): return self.sftp.open(file, mode) def copy_file_to_server(self, local_path, remote_path): self.sftp.put(local_path, remote_path) def isdir(self, file): try: self.listdir(file) return True except FileNotFoundError: return False def mkdir(self, dir_path): self.sftp.mkdir(dir_path) def listdir(self, path): try: return self.sftp.listdir(path) except IsADirectoryError: return None def exec_command(self, command): return self.ssh_client.exec_command(command) def close_connection(self): self.ssh_client.close() ### Parent ### def connect_to_server(self): try: self.ssh_client.get_transport().is_active() return self.sftp except ConnectionError: self.open_linux_connection() return self.sftp def open_linux_connection(self): hostname = self.linux_server_details.get('hostname') username = self.linux_server_details.get('username') password = self.linux_server_details.get('password') ssh_client = self.ssh_client # Let's the machine know that you trust the server # Paramiko rejects all new remote machines by default but AutoAddPolicy() changes it to allow any host ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) self.log_writer.write_info_log("Connecting to server " + hostname) try: ssh_client.connect(hostname=hostname, username=username, password=password) self.sftp = self.ssh_client.open_sftp( ) # Open an SFTP session on the SSH server. except Exception as connection_error: self.log_writer.write_error_log("Could not connect to server: " + str(connection_error))
class GithubCrawler(object): """docstring for GithubCrawler""" def __init__(self): super(GithubCrawler, self).__init__() self.log_writer = LogWriter() # Change Account ? self.g_account = None self.next_search_page = None self.next_search_stars_ub = None self.download_record_filepath = "" self.output_path = "python" def config_github_account(self, username, password): self.g_account = HTTPBasicAuth(username, password) def call_api_search(self, language, stars_ub=""): # Check Github Account Rate Limit if self.check_api_ratelimit(): if (self.next_search_page == None): # Logging self.log_writer.write_info_log("Call Github API Search") ''' With Request Lib ''' # Create params for Github Search API if (stars_ub == ""): payload = { "q": "language:" + language, "sort": "stars", "order": "desc", "per_page": "100" } else: payload = { "q": "language:" + language + " " + "stars:<=" + str(stars_ub), "sort": "stars", "order": "desc", "per_page": "100" } # Call Github Search API response = requests.get( "https://api.github.com/search/repositories", params=payload, auth=self.g_account) print(response.url) self.next_search_page = response.links.get("next").get("url") print(response.links.get("next")) # Logging self.log_writer.write_info_log( "Call Github API Search is done") return response.json() else: # Call Github Search API response = requests.get(self.next_search_page, auth=self.g_account) # If it is not the last page if (response.links.get("next") != None): self.next_search_page = response.links.get("next").get( "url") print(response.links.get("next")) # If it is the last page else: # Set [Next Page] to [None] for the next range of search self.next_search_page = None # Record the stars count of the last repo next_tmp = response.json().get("items")[-1].get( "stargazers_count") # To see: Have reached the API Search Limitation? if (next_tmp != self.next_search_stars_ub): self.next_search_stars_ub = response.json().get( "items")[-1].get("stargazers_count") else: print(self.next_search_stars_ub) print("Reach the limitation") # Set a flag to stop crawling self.next_search_page = "END" return response.json() else: self.log_writer.write_error_log("call_api_search failed") pass def check_api_ratelimit(self): # Config Github Account api_rate_limit = "https://api.github.com/rate_limit" try: response = requests.get(url=api_rate_limit, timeout=15.5, auth=self.g_account) except requests.exceptions.Timeout: self.log_writer.write_error_log("Download request ---- TIMEOUT" + " url: " + download_url) # TO DO: Reconnect self.check_api_ratelimit() except requests.exceptions.HTTPError: self.log_writer.write_error_log( "Download request ---- BAD REQUEST " + "HTTP Response Status Code: " + str(response.status_code) + " url: " + download_url) # TO DO: Reconnect self.check_api_ratelimit() else: print(response.status_code) print(response.json().get("resources").get("search")) # Get remaning number of times for Search API Request remaining_times = response.json().get("resources").get( "search").get("remaining") # Get API rate limit reset time -- in Unix Time reset_time = response.json().get("resources").get("search").get( "reset") if remaining_times > 0: return True else: # Get current sys time in Unix Time current_time = int(time.time()) # Get wait time wait_time = reset_time - current_time # Log self.log_writer.write_info_log( "Wait for API rate limit reset. Thread sleep for " + str(wait_time) + " seconds") # Test print("Wait for API reset" + str(wait_time) + " seconds") # Hold the theard time.sleep(wait_time) # Check again self.check_api_ratelimit() def save_repo_info_to_json(self, api_json): # Create JSON repo_json = {} # Store Field -- Repo Name repo_json["name"] = api_json.get("name") # Store Field -- Repo Full Name repo_json["full_name"] = api_json.get("full_name") # Store Field -- Repo Owner Name repo_json["owner_name"] = api_json.get("owner").get("login") # Store Field -- Repo HTML URL repo_json["html_url"] = api_json.get("html_url") # Store Field -- Repo Description repo_json["description"] = api_json.get("description") # Store Field -- Repo Api URL for downloading repo_json["api_url"] = api_json.get("url") # Store Field -- Repo Created Time repo_json["created_at"] = api_json.get("created_at") # Store Field -- Repo Updated Time repo_json["updated_at"] = api_json.get("updated_at") # Store Field -- Repo Programming Language repo_json["language"] = api_json.get("language") # Store Field -- Repo Has Wiki repo_json["has_wiki"] = api_json.get("has_wiki") # Store Field -- Source repo_json["source"] = "github" # Save to JSON file repo_name = repo_json["full_name"].replace("/", "-") with open(os.path.join(self.output_path, repo_name) + ".json", "w") as file: file.write(json.dumps(repo_json, indent=2)) def download(self, download_url, download_filename): ### Download does not cost the API Times ### # Authentication? # Header -- if need? # Handle [timeout] & [stream transmission] ''' HTTP Request to download the repo Handle all the excepotions ''' try: headers = { 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.162 Safari/537.36', 'Accept-Encoding': 'identity' } response = requests.get(download_url + "/zipball", headers=headers, timeout=15.5, stream=True, auth=self.g_account) except requests.exceptions.Timeout: # Logging self.log_writer.write_error_log("Download request ---- TIMEOUT" + " url: " + download_url) # Formatting print("") # TO DO: Reconnect self.download(download_url, download_filename) except requests.exceptions.HTTPError: # Logging self.log_writer.write_error_log( "Download request ---- BAD REQUEST " + "HTTP Response Status Code: " + str(response.status_code) + " url: " + download_url) # Formatting print("") # TO DO: Reconnect self.download(download_url, download_filename) except requests.exceptions.ConnectionError: # Logging self.log_writer.write_error_log( "Download request ---- Connection broken due to bad Internet condition" + " url: " + download_url) # Formatting print("") # TO DO: Reconnect self.download(download_url, download_filename) else: ''' Get the size of the repo Star to download Handle all the excepotions ''' # Content length -- Chunked file_size = response.headers.get("content-length") if file_size != None: # Unit in byte pbar = tqdm(total=int(file_size), ncols=80, ascii=False, unit='b', unit_scale=True, desc=download_filename) else: # Unit in byte pbar = tqdm(total=None, ncols=80, ascii=False, unit='b', unit_scale=True, desc=download_filename) # Have a chance to fail, when the Internet Connection is bad try: with open( os.path.join(self.output_path, download_filename) + ".zip", "wb") as f: # Chunk size unit in byte for chunk in response.iter_content(chunk_size=1024): if chunk: f.write(chunk) pbar.update(1024) # Test the integrity of .Zip files with zipfile.ZipFile( os.path.join(self.output_path, download_filename) + ".zip", 'r') as zipfile_test: zipfile_test.testzip() # Logging self.log_writer.write_info_log( "Download request ---- Download Complete! Zip file is fine!" + " url: " + download_url) except requests.exceptions.ConnectionError: # Logging self.log_writer.write_error_log( "Download request ---- Connection broken due to bad Internet condition" + " url: " + download_url) # Close the Progress Bar pbar.close() # Close the request response.close() # Formatting print("") # TO DO: Reconnect self.download(download_url, download_filename) except requests.exceptions.ChunkedEncodingError: self.log_writer.write_error_log( "Download request ---- Connection broken due to bad Internet condition" + " url: " + download_url) # Close the Progress Bar pbar.close() # Close the request response.close() # Formatting print("") # TO DO: Reconnect self.download(download_url, download_filename) except zipfile.BadZipFile: # Test # print("zipfile is broken: " + download_filename) # Log self.log_writer.write_error_log( "Download request ---- Zip file broken" + " url: " + download_url) # Close the Progress Bar pbar.close() # Close the request response.close() # Formatting print("") # TO DO: Reconnect self.download(download_url, download_filename) finally: # Close the Progress Bar pbar.close() # Close the request response.close() # Formatting print("") def check_repo_records(self, api_json): # repo id repo_id = str(api_json.get("id")) ''' Read the Download_Recrod from json ''' # If the Download_Record File doesn't exsit, then create it if not (os.path.exists(self.download_record_filepath) and os.path.isfile(self.download_record_filepath)): # Logging self.log_writer.write_error_log( "Download_Record Json File doesn't exist") # Create the Download_Record Json File when it is the first time to run with open(self.download_record_filepath, 'w') as f: return True else: with open(self.download_record_filepath, 'r') as f: if not f.read() == "": download_info_json = json.load(f) else: return True ''' Check the repo has been downloaded or not ''' # If the repo has been downloaded if (download_info_json.get(repo_id) != None): ''' Check and Update the stars # Old stars old_stars = download_info_json.get(repo_id).get("stars") print(old_stars) # New stars new_stars = api_json.get("stargazers_count") print(new_stars) if(old_stars != new_stars): # Prompt print("Star changes") # Record the new stars download_info_json[repo_id]["stars"] = new_stars # Update the downloaded repo info in the Download_Record Json File with open(self.download_record_filepath, 'w') as f: f.write(json.dumps(download_info_json, indent = 2)) ''' ''' Check and Update the repo Whether the repo has been updated after last time we downloaded it ''' # Old updated time old_updated = download_info_json.get(repo_id).get("updated_at") print(old_updated) # New updated time new_updated = api_json.get("updated_at") print(new_updated) if (old_updated != new_updated): # Prompt print("Repo Updated") # Record the new updated time download_info_json[repo_id]["updated_at"] = new_updated # Update the downloaded repo info in the Download_Record Json File with open(self.download_record_filepath, 'w') as f: f.write(json.dumps(download_info_json, indent=2)) # Logging self.log_writer.write_info_log( "Check repo records ---- Repo need to update." + " url: " + api_json.get("url")) # Need update -- repo has been updated return True else: # Logging self.log_writer.write_info_log( "Check repo records ---- Repo does not need to update." + " url: " + api_json.get("url")) # don't need update -- repo has not been updated return False # If it is a new repo else: # Logging self.log_writer.write_info_log( "Check repo records ---- Repo is new! Need to be downloaded." + " url: " + api_json.get("url")) # Need download return True def save_download_info_to_json(self, api_json): ''' Read the Download_Recrod from json ''' # If the Download_Record File doesn't exsit, then create it if not (os.path.exists(self.download_record_filepath) and os.path.isfile(self.download_record_filepath)): with open(self.download_record_filepath, 'w') as f: pass else: with open(self.download_record_filepath, 'r') as f: download_record_json = json.load(f) # Create Repo info dict repo_info = {} # Store Field -- Repo Updated time repo_info["updated_at"] = api_json.get("updated_at") # Store Field -- Repo Updated time repo_info["stars"] = api_json.get("stargazers_count") # Store Field -- Repo ID download_record_json[api_json.get("id")] = repo_info # Save to json file with open("download_records_test" + ".json", "w") as f: f.write(json.dumps(download_record_json, indent=2))
def __init__(self): self.log_writer = LogWriter() self.ssh_client = paramiko.SSHClient() self.sftp = None
def __init__(self, connection): self.log_writer = LogWriter() self.unclean_projects_path = None self.clean_projects_path = None self.connection = connection
class CleanZippedProjects: def __init__(self): self.log_writer = LogWriter() self.file_paths = "file_paths.json" self.unclean_projects_path = None self.clean_projects_path = None def load_file_paths(self): file_paths_config_file = open(self.file_paths) file_paths = json.load(file_paths_config_file) self.clean_projects_path = file_paths["Linux"]["clean_dir"] self.unclean_projects_path = file_paths["Linux"]["unclean_dir"] file_paths_config_file.close() # Unzips all zipped folders in the directories in unclean to corresponding directories in clean def unzip(self): self.log_writer.write_info_log("Unzipping files") try: for directory in os.listdir(self.unclean_projects_path): if os.path.isdir(self.unclean_projects_path + "/" + directory): for file in os.listdir(self.unclean_projects_path + "/" + directory): file_path = self.unclean_projects_path + "/" + directory + "/" + file if file.endswith(".zip"): unzip_path = self.clean_projects_path + "/" + directory + "/" + file[:-4] if not os.path.isdir(unzip_path): os.makedirs(unzip_path) else: continue unzip_command = "unzip " + file_path + " -d " + unzip_path os.system(unzip_command) self.log_writer.write_info_log(directory + "/" + file + " unzipped") self.log_writer.write_info_log("Files unzipped") except Exception as command_error: self.log_writer.write_error_log("Could not unzip files: " + str(command_error)) def compare_projects(self): unclean_projects = [] clean_projects = [] for directory in os.listdir(self.unclean_projects_path): for project in os.listdir(self.unclean_projects_path + "/" + directory): if project.endswith(".zip"): unclean_projects.append(project[:-4]) for directory in os.listdir(self.clean_projects_path): if os.path.isdir(self.clean_projects_path + "/" + directory): for project in os.listdir(self.clean_projects_path + "/" + directory): if os.path.isdir(self.clean_projects_path + "/" + directory + "/" + project): clean_projects.append(project) for not_unzipped in set(unclean_projects).difference(clean_projects): self.log_writer.write_error_log("Could not unzip: " + not_unzipped) # Uses the delete_files_script to delete all files in a folder # who's extensions aren't in the language configuration file def delete_files(self): self.log_writer.write_info_log("Deleting unwanted files") try: os.system("cd /home/ubuntu/test_files/clean; python3 delete_unwanted_files.py") self.log_writer.write_info_log("Unwanted files deleted") except Exception as command_error: self.log_writer.write_error_log("Could not delete files: " + str(command_error)) def run(self): self.load_file_paths() self.unzip()
class Ui_MainWindow(object): def setupLog(self): self.log = LogWriter() self.log.resetLog() #sets up basic ui with buttons: manual, graphs, settings and info def setupUi(self, mainWindow): stylesheetFile = "Stylesheet.css" #styling fh = open(stylesheetFile) qstr = str(fh.read()) self.MainWindow = mainWindow #assign mainwindow self.MainWindow.setStyleSheet(qstr) self.MainWindow.setObjectName("MainWindow") self.MainWindow.resize(1000, 650) #setup basic variables self.devices = [] self.currentDevice = None self.setupLog() self.mainQueue = Queue() self.lang = Language(0) #fill mainwindow self.centralwidget = QtWidgets.QWidget(self.MainWindow) self.centralwidget.setObjectName("centralwidget") self.verticalLayoutWidget = QtWidgets.QWidget(self.centralwidget) self.verticalLayoutWidget.setGeometry(QtCore.QRect(0, 0, 90, 650)) self.verticalLayoutWidget.setObjectName("verticalLayoutWidget") self.verticalLayout = QtWidgets.QVBoxLayout(self.verticalLayoutWidget) self.verticalLayout.setContentsMargins(0, 0, 0, 0) self.verticalLayout.setObjectName("buttonBar") spacerItem = QtWidgets.QSpacerItem(20, 50, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum) self.verticalLayout.addItem(spacerItem) self.addADevice = QtWidgets.QPushButton(self.verticalLayoutWidget) self.addADevice.setObjectName("addADevice") self.addADevice.setFixedSize(90, 90) self.verticalLayout.addWidget(self.addADevice) self.Manual = QtWidgets.QPushButton(self.verticalLayoutWidget) self.Manual.setObjectName("Manual") self.Manual.setFixedSize(90, 90) self.verticalLayout.addWidget(self.Manual) self.Graphs = QtWidgets.QPushButton(self.verticalLayoutWidget) self.Graphs.setObjectName("Graphs") self.Graphs.setFixedSize(90, 90) self.verticalLayout.addWidget(self.Graphs) self.Settings = QtWidgets.QPushButton(self.verticalLayoutWidget) self.Settings.setObjectName("Settings") self.Settings.setFixedSize(90, 90) self.verticalLayout.addWidget(self.Settings) spacerItem1 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding) self.verticalLayout.addItem(spacerItem1) self.Info = QtWidgets.QPushButton(self.verticalLayoutWidget) self.Info.setObjectName("Info") self.Info.setFixedSize(90, 90) self.verticalLayout.addWidget(self.Info) spacerItem2 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed) self.verticalLayout.addItem(spacerItem2) self.horizontalLayoutWidget = QtWidgets.QWidget(self.centralwidget) self.horizontalLayoutWidget.setGeometry(QtCore.QRect(90, 0, 910, 50)) self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget") self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget) self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0) self.horizontalLayout_2.setObjectName("horizontalLayout_2") self.Logo = QtWidgets.QLabel(self.horizontalLayoutWidget) self.Logo.setEnabled(True) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.Logo.sizePolicy().hasHeightForWidth()) self.Logo.setSizePolicy(sizePolicy) self.Logo.setMinimumSize(QtCore.QSize(0, 0)) self.Logo.setMaximumSize(QtCore.QSize(250, 50)) font = QtGui.QFont() font.setFamily("Calibri") font.setPointSize(16) font.setBold(True) font.setWeight(75) self.Logo.setFont(font) self.Logo.setAutoFillBackground(True) self.Logo.setFrameShape(QtWidgets.QFrame.Box) self.Logo.setFrameShadow(QtWidgets.QFrame.Raised) self.Logo.setObjectName("Logo") pic = QPixmap('rsz_1aerosdev') self.Logo.setPixmap(pic) self.horizontalLayout_2.addWidget(self.Logo) spacerItem3 = QtWidgets.QSpacerItem(20, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_2.addItem(spacerItem3) self.fSkyTemp = QtWidgets.QFrame(self.horizontalLayoutWidget) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Expanding) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.fSkyTemp.sizePolicy().hasHeightForWidth()) self.fSkyTemp.setSizePolicy(sizePolicy) self.fSkyTemp.setMinimumSize(QtCore.QSize(180, 100)) self.fSkyTemp.setFrameShape(QtWidgets.QFrame.StyledPanel) self.fSkyTemp.setFrameShadow(QtWidgets.QFrame.Raised) self.fSkyTemp.setObjectName("fSkyTemp") self.Sky = QtWidgets.QLabel(self.fSkyTemp) self.Sky.setGeometry(QtCore.QRect(10, 20, 75, 13)) self.Sky.setObjectName("Sky") self.TempUp = QtWidgets.QLabel(self.fSkyTemp) self.TempUp.setGeometry(QtCore.QRect(100, 20, 75, 13)) sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.TempUp.sizePolicy().hasHeightForWidth()) self.TempUp.setSizePolicy(sizePolicy) self.TempUp.setMinimumSize(QtCore.QSize(60, 0)) self.TempUp.setObjectName("TempUp") self.horizontalLayout_2.addWidget(self.fSkyTemp) spacerItem4 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Fixed, QtWidgets.QSizePolicy.Minimum) self.horizontalLayout_2.addItem(spacerItem4) self.stackedWidget = QtWidgets.QStackedWidget(self.centralwidget) self.stackedWidget.setGeometry(QtCore.QRect(90, 50, 910, 600)) # self.stackedWidget.setMinimumSize(QtCore.QSize(600, 600)) #400, 400 # self.stackedWidget.move(100,100) # self.stackedWidget.setStyleSheet("background-color: black") # sets up maingrid and adds it to stacked widget self.page0 = QtWidgets.QWidget(self.MainWindow) self.mainGrid = MainGrid(self.page0, self.devices) self.stackedWidget.addWidget(self.mainGrid.page0) #sets up pages self.setupSettingsWindow() self.setupEnterDevice() self.setupGraphsWindow() self.setupManual() #sets starting page self.stackedWidget.setCurrentIndex(0) #binds functions to mainwindow buttons self.addADevice.clicked.connect(lambda: self.setIndex(2)) self.Manual.clicked.connect(lambda: self.setIndex(4)) self.Graphs.clicked.connect(lambda: self.setIndex(3)) self.Settings.clicked.connect(lambda: self.setIndex(1)) self.Info.clicked.connect(self.showInfo) QtCore.QMetaObject.connectSlotsByName(self.MainWindow) self.MainWindow.setCentralWidget(self.centralwidget) self.retranslateUi(0) def setIndex(self, index): try: self.stackedWidget.setCurrentIndex(index) # update devices # empty devicesBox self.devicesBox.clear() self.devicesBoxGraphs.clear() self.devicesBoxManual.clear() # fill devicesBox for device in self.devices: self.devicesBox.addItem(device.name) self.devicesBoxGraphs.addItem(device.name) self.devicesBoxManual.addItem(device.name) #assign function to startRoll button if a device exist if len(self.devices) > 0: self.startRoll.setDisabled(False) if self.currentDevice.status == 1: self.startRoll.clicked.connect(lambda: self.rollOut) self.startRoll.setText(self.lang.but_StartRollOut) elif self.currentDevice.status == 0: self.startRoll.clicked.connect(lambda: self.rollUp) self.startRoll.setText(self.lang.but_startRollUp) except Exception as e: print(e) #set the selected sensortype def setSensorType(self, type): self.sensorType = type #change the minimum value of the current device def changeMinVal(self, minVal): try: if self.checkStringForNumber(minVal): self.currentDevice.minVal = int(minVal) self.log.writeInLog("i", "Minimum value from " + self.currentDevice.name + " changed to " + minVal) else: if minVal == "aeros development": self.showPopup("e", self.lang.pop_TitleEasterEgg, self.lang.pop_TextEasterEgg) self.log.writeInLog("i", "EASTER EGG FOUND!!!") self.showPopup("e", self.lang.pop_TitleNotValidNumber, self.lang.pop_TextNotValidNumber) except: self.showPopup("e", self.lang.pop_TitleDevNotAttached, self.lang.pop_TextDevNotAttached) def checkStringForNumber(self, string): numbers = ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"] chrs = list(string) if len([chr for chr in chrs if chr not in numbers]) > 0: return False return True # sets up settingswidget that shows the settings def setupSettingsWindow(self): try: self.page1 = QtWidgets.QWidget() self.settingsWindowWidget = QtWidgets.QWidget(self.page1) self.settingsWindowWidget.setMinimumSize(QtCore.QSize(400, 160)) self.settingsWindowWidget.setMaximumSize(QtCore.QSize(400, 160)) layout = QtWidgets.QFormLayout(self.settingsWindowWidget) # self.minLight = QtWidgets.QLineEdit(self.settingsWindowWidget) # self.minTemp = QtWidgets.QLineEdit(self.settingsWindowWidget) self.minVal = QtWidgets.QLineEdit(self.settingsWindowWidget) self.minVal.setText("0") self.chgMinVal = QtWidgets.QPushButton(self.settingsWindowWidget) self.chgMinVal.setText(self.lang.but_ChgMinVal) self.chgMinVal.clicked.connect(lambda: self.changeMinVal(self.minVal.text())) self.goBack = QtWidgets.QPushButton(self.settingsWindowWidget) self.goBack.setText(self.lang.but_Ok) self.goBack.clicked.connect(lambda: self.stackedWidget.setCurrentIndex(0)) self.devicesBox = QtWidgets.QComboBox(self.settingsWindowWidget) for device in self.devices: self.devicesBox.addItem(device.name) self.devicesBox.activated[str].connect(self.setCurrentDevice) self.languageBox = QtWidgets.QComboBox(self.settingsWindowWidget) self.languageBox.addItem("English") self.languageBox.addItem("Nederlands") #could add more languages self.languageBox.activated[str].connect(self.changeLanguage) layout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.minVal) # layout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.minTemp) layout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.chgMinVal) # layout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.chgMinTemp) layout.setWidget(2, QtWidgets.QFormLayout.FieldRole, self.goBack) layout.setWidget(2, QtWidgets.QFormLayout.LabelRole, self.devicesBox) layout.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.languageBox) # self.stackedWidget.insertWidget(1,self.page_1) self.stackedWidget.addWidget(self.page1) self.log.writeInLog("i", "Page 1: settings window created") except Exception as e: print(e) self.log.writeInLog("w", "Could not create page 1: settings window") #change the language, def changeLanguage(self, lang): if lang == "English": self.retranslateUi(0) elif lang == "Nederlands": self.retranslateUi(1) #setup page3 containing the graph def setupGraphsWindow(self): try: self.page3 = QtWidgets.QWidget() self.graphWidget = QtWidgets.QWidget(self.page3) self.graphWidget.setGeometry(QtCore.QRect(50, 50, 400, 500)) self.graphWidget.setMinimumSize(QtCore.QSize(600, 600)) self.canvas = PlotCanvas(self.graphWidget) self.goBack2 = QtWidgets.QPushButton(self.graphWidget) self.goBack2.setText(self.lang.but_Ok) self.goBack2.clicked.connect(lambda: self.stackedWidget.setCurrentIndex(0)) self.goBack2.move(450, 400) self.update = QtWidgets.QPushButton(self.graphWidget) self.update.setText(self.lang.but_Update) self.update.clicked.connect(self.fillGraph) self.update.move(450, 375) self.devicesBoxGraphs = QtWidgets.QComboBox(self.graphWidget) for device in self.devices: self.devicesBoxGraphs.addItem(device.name) self.devicesBoxGraphs.move(450, 0) self.devicesBoxGraphs.activated[str].connect(self.setCurrentDevice) self.stackedWidget.addWidget(self.page3) self.log.writeInLog("i", "Page 3: graphs window created") except: self.log.writeInLog("w", "Could not create page 3: graphs window") #fill graph def fillGraph(self): dataList = [] try: q = self.currentDevice.getQueue() except:# Exception as e: #print(e) self.showPopup("e", self.lang.pop_TitleDevNotAttached, self.lang.pop_TextDevNotAttached) return # fill graph #print("test1") #print("test2") #print(self.currentDevice.queue.get()) transmis = None try: for i in range(10): #transmis = q.get(True, 2) if transmis == None: if self.currentDevice.sensorType == "Light": dataList.append(random.uniform(50,100)) elif self.currentDevice.sensorType == "Temperature": dataList.append(random.uniform(20,25)) else: self.log.writeInLog("i", "Data from " + self.currentDevice.name + " received: " + str(transmis)) dataList.append(transmis) #time.sleep(1) except: pass try: #print("test6") self.canvas.plot(dataList, self.currentDevice.sensorType) #time.sleep(1) except Exception as e: print(e) #setup page2 containing the settingswindow def setupEnterDevice(self): try: self.page2 = QtWidgets.QWidget() self.sensorType = "" self.enterDeviceWidget = QtWidgets.QWidget(self.page2) self.enterDeviceWidget.setMinimumSize(QtCore.QSize(400, 300)) self.enterDeviceWidget.setMaximumSize(QtCore.QSize(400, 300)) layout = QtWidgets.QFormLayout(self.enterDeviceWidget) namelabel = QLabel(self.lang.lab_Name) # lightlabel = QLabel("Min light") # templabel = QLabel("Min temp") valuelabel = QLabel(self.lang.lab_MinVal) maxRollLengthLabel = QLabel(self.lang.lab_MaxRollLength) portlabel = QLabel(self.lang.lab_PortNum) sensorlabel = QLabel(self.lang.lab_SensorType) self.name = QtWidgets.QLineEdit(self.enterDeviceWidget) # .setText("") # self.light = QtWidgets.QLineEdit(self.enterDeviceWidget)#.setText("0") # self.temp = QtWidgets.QLineEdit(self.enterDeviceWidget)#.setText("0") self.port = QtWidgets.QLineEdit(self.enterDeviceWidget) # .setText("COM0") self.value = QtWidgets.QLineEdit(self.enterDeviceWidget) self.maxRollLength = QtWidgets.QLineEdit(self.enterDeviceWidget) self.name.setText("") # self.light.setText("0") # self.temp.setText("0") self.maxRollLength.setText("0") self.value.setText("0") self.port.setText("COM0") self.name.setMaximumSize(QtCore.QSize(100, 200)) # self.light.setMaximumSize(QtCore.QSize(100,200)) # self.temp.setMaximumSize(QtCore.QSize(100,200)) self.value.setMaximumSize(QtCore.QSize(100, 200)) self.maxRollLength.setMaximumSize(QtCore.QSize(100, 200)) self.port.setMaximumSize(QtCore.QSize(100, 200)) self.sensor = QtWidgets.QComboBox(self.enterDeviceWidget) self.sensor.addItem(self.lang.selBox_light) self.sensor.addItem(self.lang.selBox_temp) self.sensor.setMaximumSize(QtCore.QSize(100, 200)) self.sensor.activated[str].connect(self.setSensorType) self.addDevice = QtWidgets.QPushButton(self.enterDeviceWidget) self.addDevice.setText(self.lang.but_AddDevice) self.addDevice.setMaximumSize(QtCore.QSize(120, 300)) self.addDevice.clicked.connect(self.addDeviceNoPar) self.goBack3 = QtWidgets.QPushButton(self.enterDeviceWidget) self.goBack3.setText(self.lang.but_Ok) self.goBack3.setMaximumSize(QtCore.QSize(100, 200)) self.goBack3.clicked.connect(lambda: self.setIndex(0)) layout.addRow(namelabel, self.name) # layout.addRow(lightlabel, self.light) # layout.addRow(templabel, self.temp) layout.addRow(valuelabel, self.value) layout.addRow(maxRollLengthLabel, self.maxRollLength) layout.addRow(portlabel, self.port) layout.addRow(sensorlabel, self.sensor) layout.addRow(self.addDevice, self.goBack3) # self.stackedWidget.insertWidget(2,self.page_2) self.setSensorType("Light") self.stackedWidget.addWidget(self.page2) self.log.writeInLog("i", "Page 2: enter device window created") except: self.log.writeInLog("w", "Could not create Page 2: enter device window") # makes inputdialog in which you can enter a percentage #setup page4 containing the manual mode window def setupManual(self): try: self.page4 = QtWidgets.QWidget() self.manualWidget = QtWidgets.QWidget(self.page4) layout = QtWidgets.QFormLayout(self.manualWidget) self.startRoll = QtWidgets.QPushButton(self.manualWidget) self.startRoll.setText(self.lang.but_StartRoll) self.startRoll.setDisabled(True) self.devicesBoxManual = QtWidgets.QComboBox(self.manualWidget) for device in self.devices: self.devicesBoxManual.addItem(device.name) self.devicesBoxManual.activated[str].connect(self.setCurrentDevice) self.ok = QtWidgets.QPushButton(self.manualWidget) self.ok.setText(self.lang.but_Ok) self.ok.setMaximumSize(QtCore.QSize(100, 200)) self.ok.clicked.connect(lambda: self.setIndex(0)) layout.addRow(self.devicesBoxManual, self.startRoll) layout.addRow(self.ok, self.ok) self.manualWidget.setLayout(layout) self.stackedWidget.addWidget(self.page4) self.log.writeInLog("i", "Page 4: manual window created") except Exception as e: print(e) self.log.writeInLog("w", "Could not create Page 4: manual window") #roll out the selected device def rollOut(self): self.showPopup("i", self.lang.pop_TitleRollOut, self.currentDevice.name + self.lang.pop_TextRollOut) self.log.writeInLog("i", self.currentDevice.name + " rolled out") self.currentDevice.rollDown() self.currentDevice.status = 0 self.updateMaingrid(self.MainWindow) #roll up the selected device def rollUp(self): self.showPopup("i", self.lang.pop_TitleRollUp, self.currentDevice.name + self.lang.pop_TextRollUp) self.log.writeInLog("i", self.currentDevice.name + " rolled up") self.currentDevice.rollUp() self.currentDevice.status = 1 self.updateMaingrid(self.MainWindow) #connect to the device and add it to the dashboard def addDeviceNoPar(self): if not self.checkStringForNumber(self.name.text()): nameRes = self.name.text() else: self.showPopup("e", self.lang.pop_TitleNotValidName, self.lang.pop_TextNotValidName) self.name.setText("") return if "COM" in self.port.text(): portRes = self.port.text() else: self.showPopup("e", self.lang.pop_TitleNotValidPort, self.lang.pop_TextNotValidPort) self.port.setText("COM0") return if self.checkStringForNumber(self.value.text()): valRes = int(self.value.text()) else: self.showPopup("e", self.lang.pop_TitleNotValidNumber, self.lang.pop_TextNotValidNumber) self.value.setText("0") return try: maxRollRes = float(self.maxRollLength.text()) except: self.showPopup("e", self.lang.pop_TitleNotValidNumber, self.lang.pop_TextNotValidNumber) self.maxRollLength.setText("0") return self.name.setText("") self.port.setText("COM0") self.value.setText("0") self.maxRollLength.setText("0") if nameRes == "": self.showPopup("e", self.lang.pop_TitleNoName, self.lang.pop_TextNoName) return for device in self.devices: if device.name == nameRes: self.showPopup("e", self.lang.pop_TitleDupNames, self.lang.pop_TextDupNames) self.name.setText("") return try: newDevice = Device(nameRes, portRes, self.sensorType, valRes, maxRollRes, self.mainQueue) # lightRes, tempRes) self.devices.append(newDevice) self.setCurrentDevice(self.devices[0].name) try: receiving = Thread(target=self.currentDevice.receive) receiving.daemon = True receiving.start() except Exception as e: print(e) self.log.writeInLog("i", "New device added: name: " + nameRes + " | Port: " + portRes + " | Sensor type: " + self.sensorType + " | Minimum value: " + str( valRes) + " | Max roll length: " + str(maxRollRes)) self.showPopup("i", self.lang.pop_TitleNewDevice, self.lang.pop_TextNewDevice_1 + nameRes + self.lang.pop_TextNewDevice_2) try: self.updateMaingrid(self.MainWindow) except Exception as e: print(e) except Exception as e: print(e) self.log.writeInLog("w", "Could not add device: " + nameRes) self.showPopup("e", self.lang.pop_TitleNoNewDevice, self.lang.pop_TextNoNewDevice) newDevice = None #show a popup, can be error or info def showPopup(self, type, popupText, popupIText): popup = QMessageBox() if type == "e": popup.setIcon(QMessageBox.Critical) popup.setWindowTitle("Error") self.log.writeInLog("w", "Error popup shown: " + popupText + " | " + popupIText) elif type == "i": popup.setIcon(QMessageBox.Information) popup.setWindowTitle("Info") self.log.writeInLog("i", "Information popup shown: " + popupText + " | " + popupIText) popup.setText(popupText) popup.setInformativeText(popupIText) popup.exec() #set the current selected device def setCurrentDevice(self, name): for device in self.devices: if device.name == name: self.currentDevice = device # sets te text in the given language def retranslateUi(self, type): # choose language self.lang.setLang(type) _translate = QtCore.QCoreApplication.translate self.MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow")) self.addADevice.setText(_translate("MainWindow", self.lang.but_AddADevice)) self.Manual.setText(_translate("MainWindow", self.lang.but_Manual)) self.Graphs.setText(_translate("MainWindow", self.lang.but_Graphs)) self.Settings.setText(_translate("MainWindow", self.lang.but_Settings)) self.Info.setText(_translate("MainWindow", self.lang.but_Info)) self.Sky.setText(_translate("MainWindow", self.lang.lab_Sky)) self.TempUp.setText(_translate("MainWindow", self.lang.lab_Temp)) self.startRoll.setText("Roll out") self.startRoll.setText("Roll up") self.goBack.setText(self.lang.but_Ok) self.chgMinVal.setText(self.lang.but_ChgMinVal) self.goBack2.setText(self.lang.but_Ok) self.update.setText(self.lang.but_Update) self.addDevice.setText(self.lang.but_AddDevice) self.sensor.clear() self.sensor.addItem(self.lang.selBox_light) self.sensor.addItem(self.lang.selBox_temp) self.goBack3.setText(self.lang.but_Ok) self.startRoll.setText(self.lang.but_StartRoll) self.ok.setText(self.lang.but_Ok) # Makes popup with info def showInfo(self): self.showPopup("i", self.lang.pop_TitleInfo, self.lang.pop_TextInfo) #update the maingrid def updateMaingrid(self, MainWindow): self.page0.setParent(None) self.page0 = QtWidgets.QWidget(MainWindow) self.mainGrid = MainGrid(self.page0, self.devices) self.stackedWidget.insertWidget(0, self.mainGrid.page0) # this changed right
def __init__(self): self.file_paths = "file_paths.json" self.json_path = None self.log_writer = LogWriter() self.load_file_paths()
def PlayMessage(volume, prefix_file): MediaPlayer = MediaPlayerClass(DFPlayerMiniCommander, 4, [200], 1) MediaPlayer.PlayingFolder = 0 MediaPlayer.SetVolume(volume) while True: MediaPlayer.PlayingFile = random()%10 + prefix_file MediaPlayer.PlayFile() sleep(0.1) if(MediaPlayer.BusyPin.value() == 0): break while MediaPlayer.BusyPin.value() == 0: pass DFPlayerMiniCommander = MicroPlayer() NTPTime = NTPTimeClass() Log = LogWriter("/logs/log.txt") Config = ConfigReader("/configs/config.txt") if(len(Config.Errors) != 0): Log.Write("Ошибка(-и) загрузки конфига! :-(\n - ", "!!!ERROR!!!") for i in Config.Errors: Log.Write(i, "!!!ERROR!!!") MediaPlayer = MediaPlayerClass(DFPlayerMiniCommander, 4, [200], 1) PlayMessage(Config.Volume[7 -1], 109) raise ValueError("Ошибка загрузки конфига! :-(") else: Log.Write("Конфиг загружен! =-)", "CONFIG", TimeWithMyTimeZone(NTPTime.GetNTPTime(), Config.TimeZone)) MediaPlayer = MediaPlayerClass(DFPlayerMiniCommander, 4, [200], 1) wifi = network.WLAN(network.STA_IF) wifi.active(True) wifi.ifconfig((Config.WLAN_static_ip, Config.WLAN_netmask, Config.WLAN_gateway, Config.WLAN_DNS_server))
class GithubCrawler(object): """docstring for GithubCrawler""" def __init__(self): super(GithubCrawler, self).__init__() # self.arg = arg self.log_writer = LogWriter() # Change Account ? # self.g = GitHub("SoapKe", "BBC19951228Soap") # Config Github Account self.g = GitHub("", "") self.project_filename = "download_test" self.output_path = "" def config_github_account(self, username, password): self.g = GitHub(username, password) # return g def call_api_search(self, account, language, page, per_page): # Check Github Account Rate Limit if self.check_api_ratelimit(account): g = account status, data = g.search.repositories.get( q = "language:" + language, sort = "stars", order = "desc", page = page, per_page = per_page ) return status, data else: # self.log_writer.write_error_log("call_api_search failed") pass def check_api_ratelimit(self, account): # Config Github Account g = account status, data = g.rate_limit.get() print(status) print(json.dumps(data, indent = 2)) # Get remaning number of times for Search API Request remaining_times = data.get("resources").get("search").get("remaining") # Get API rate limit reset time -- in Unix Time reset_time = data.get("resources").get("search").get("reset") if remaining_times > 0: return True else: # Get current sys time in Unix Time current_time = int(time.time()) # Get wait time wait_time = reset_time - current_time # Log self.log_writer.write_info_log("Wait for API rate limit reset. Thread sleep for " + str(wait_time) + " seconds") # Test print("Wait for API reset" + str(wait_time) + " seconds") # Hold the theard time.sleep(wait_time) # Check again self.check_api_ratelimit() def save_project_info_to_json(self, api_json): # Create JSON project_json = {} # Store Field -- Repo Name project_json["name"] = api_json.get("name") # Store Field -- Repo Full Name project_json["full_name"] = api_json.get("full_name") # Store Field -- Repo Owner Name project_json["owner_name"] = api_json.get("owner").get("login") # Store Field -- Repo HTML URL project_json["html_url"] = api_json.get("html_url") # Store Field -- Repo Description project_json["description"] = api_json.get("description") # Store Field -- Repo Api URL for downloading project_json["api_url"] = api_json.get("url") # Store Field -- Repo Created Time project_json["created_at"] = api_json.get("created_at") # Store Field -- Repo Updated Time project_json["updated_at"] = api_json.get("updated_at") # Store Field -- Repo Programming Language project_json["language"] = api_json.get("language") # Store Field -- Repo Has Wiki project_json["has_wiki"] = api_json.get("has_wiki") # Store Field -- Source project_json["source"] = "github" # Save to JSON file project_name = project_json["full_name"].replace("/", "-") with open(project_name + ".json", "w") as file: file.write(json.dumps(project_json, indent = 2)) def download(self, download_url, download_filename): ### Download does not cost the API Times ### # Authentication? # Header -- if need? response = requests.get(download_url + "/zipball", stream = True, auth = HTTPBasicAuth('SoapKe', 'BBC19951228Soap')) print(response.status_code) if(response.status_code == 200): # Content length -- Chunked ##### TO DO ##### file_size = response.headers.get("content-length") print(file_size) file_name = response.headers.get("Content-Disposition") print(file_name) # print(response.headers) if file_size != None: # Unit in byte pbar = tqdm(total = int(file_size), ncols = 80, ascii = False, unit = 'b', unit_scale = True, desc = "download_test") else: # Unit in byte pbar = tqdm(total = None, ncols = 80, ascii = False, unit = 'b', unit_scale = True, desc = "download_test") # Have a chance to fail, when the Internet Connection is bad # Retry? How to catch breakconnection when "stream = True" with open(download_filename + ".zip", "wb") as f: # Chunk size unit in byte for chunk in response.iter_content(chunk_size = 1024): if chunk: f.write(chunk) pbar.update(1024) pbar.close() response.close() else: print("Download Connection Error") response.close() print("") def run(self): pass
def __init__(self, daemon_uuid, log_path): LogWriter.__init__(self, daemon_uuid) self.log_path = log_path
class Trainer(ABC): def __init__(self,h_params:HParams,seed: int = None): self.h_params = h_params self.model = None self.train_data_loader = None self.valid_data_loader = None self.test_data_loader = None self.criteria = None self.optimizer = None if seed is None: self.seed = torch.cuda.initial_seed() torch.manual_seed(self.seed) else: self.seed = seed torch.manual_seed(self.seed) self.check_point_num = 0 #binary self.current_epoch = 0 self.total_epoch = self.h_params.train.epoch self.best_valid_metric = None self.best_valid_epoch = 0 self.global_step = 0 self.local_step = 0 self.log_writer = LogWriter(self.h_params) def set_data_loader(self,train,valid,test): self.train_data_loader = train self.valid_data_loader = valid self.test_data_loader = test def fit(self,use_val_metric=True): for _ in range(self.current_epoch, self.total_epoch): self.log_writer.print_and_log(f'----------------------- Start epoch : {self.current_epoch} / {self.h_params.train.epoch} -----------------------',self.global_step) self.log_writer.print_and_log(f'current best epoch: {self.best_valid_epoch}',self.global_step) self.log_writer.print_and_log(f'-------------------------------------------------------------------------------------------------------',self.global_step) #Train self.log_writer.print_and_log('train_start',self.global_step) train_metric = self.run_epoch(self.train_data_loader,TrainState.TRAIN) #Valid self.log_writer.print_and_log('valid_start',self.global_step) with torch.no_grad(): valid_metric = self.run_epoch(self.valid_data_loader,TrainState.VALIDATE) self.best_valid_metric = self.save_best_model(self.best_valid_metric, valid_metric) self.current_epoch += 1 #Test self.log_writer.print_and_log(f'test_best_epoch: {self.best_valid_epoch}',self.global_step) self.load_module() with torch.no_grad(): test_metric = self.run_epoch(self.test_data_loader,TrainState.TEST) self.final_report(test_metric) print("Training complete") def run_epoch(self, dataloader: DataLoader, train_state:TrainState): if train_state == TrainState.TRAIN: self.model.train() else: self.model.eval() dataset_size = len(dataloader) metric = self.metric_init() for step,data in enumerate(dataloader): self.local_step = step loss,metric = self.run_step(data,metric) if train_state == TrainState.TRAIN: self.optimizer.zero_grad() loss.backward() self.optimizer.step() if self.local_step % self.h_params.log.log_every_local_step == 0: self.log_metric(metrics=metric,data_size=dataset_size) self.global_step += 1 if train_state == TrainState.VALIDATE or train_state == TrainState.TEST: self.log_metric(metrics=metric,data_size=dataset_size,train_state=train_state) if train_state == TrainState.TRAIN: self.save_checkpoint(self.check_point_num) self.check_point_num = int((self.check_point_num+1)%2) return metric def save_module(self,name,prefix=''): path = os.path.join(self.h_params.log.model_save_path,f'{prefix}_{name}.pth') torch.save(self.model.state_dict(), path) def load_module(self,name,prefix=''): path = os.path.join(self.h_params.log.model_save_path,f'{prefix}_{name}.pth') best_model_load = torch.load(path) self.model.load_state_dict(best_model_load) def save_checkpoint(self,prefix=""): train_state = { 'epoch': self.current_epoch, 'step': self.global_step, 'seed': self.seed, 'models': self.model.state_dict(), 'optimizers': self.optimizer.state_dict(), 'best_metric': self.best_valid_metric, 'best_model_epoch' : self.best_valid_epoch, } path = os.path.join(self.h_params.log.model_save_path,f'{self.model.__class__.__name__}_checkpoint{prefix}.pth') torch.save(train_state,path) def resume(self,filename:str): cpt = torch.load(filename) self.seed = cpt['seed'] torch.manual_seed(self.seed) self.current_epoch = cpt['epoch'] self.global_step = cpt['step'] self.model.load_state_dict(cpt['models']) self.optimizer.load_state_dict(cpt['optimizers']) self.best_valid_result = cpt['best_metric'] self.best_valid_epoch = cpt['best_model_epoch'] @abstractmethod def run_step(self,data,metric): """ run 1 step return loss,metric """ raise NotImplementedError @abstractmethod def metric_init(self): """ return np array of chosen metric form """ raise NotImplementedError @abstractmethod def save_best_model(self,prev_best_metric, current_metric): """ compare what is the best metric If current_metric is better, 1.save best model 2. self.best_valid_epoch = self.current_epoch Return better metric """ raise NotImplementedError @abstractmethod def log_metric(self, metrics ,data_size: int,train_state=TrainState.TRAIN): """ log and tensorboard log """ raise NotImplementedError
def setupLog(self): self.log = LogWriter() self.log.resetLog()
# coding=utf-8 """ Created on 14/03/2018 Author: Ciarán """ from LogWriter import LogWriter LogWriter().write_info_log("Test info log") LogWriter().write_warning_log("Test warning log") LogWriter().write_error_log("Test error log")
def __init__(self): self.log_writer = LogWriter() self.file_paths = "file_paths.json" self.unclean_projects_path = None self.clean_projects_path = None
class TestGetInfo: def __init__(self, connection): # A relative path works when I run this code on its own but # when I run from main.py I need an absolute path for some reason # self.local_path = "../../Hester'sWorkSpace/files" self.local_path = "C:/Users/CeXGalway/PycharmProjects/Final-Year-Project/Hester'sWorkSpace/files" self.remote_path = "/home/ubuntu/test_files/json_files" self.clean_projects_path = "/home/ubuntu/test_files/clean" self.unclean_projects_path = "/home/ubuntu/test_files/unclean" self.json_data = None self.files_in_project = [] self.project_info = {} # Dictionary with project name as key and corresponding json data as value self.log_writer = LogWriter() self.connection = connection # For each json file from Kirk find the corresponding clean project # For each file within that project crete an fci object with the details of that file def run(self): self.find_all_json_files() for project_name in self.project_info: self.json_data = self.project_info[project_name] self.create_fci_objects(self.clean_projects_path + project_name) self.save_fci_objects_to_json_files() # Goes through each unclean folder and searches for all json files from Kirk # When a file is found it saves it to a directory with the folder and file name as a key # and the json data as the element def find_all_json_files(self): for directory in self.connection.listdir(self.unclean_projects_path): for file in self.connection.listdir(self.unclean_projects_path + "/" + directory): if file.endswith(".json"): json_path = "/" + directory + "/" + file json_file = self.connection.open_file(self.unclean_projects_path + json_path) self.project_info[json_path[:-5]] = json.load(json_file) # Goes through all files in a cleaned project and creates an fci object for each def create_fci_objects(self, parent_directory): try: for file_name in self.connection.listdir(parent_directory): file_path = parent_directory + '/' + file_name if file_name.endswith(".py"): self.save_file_details_to_fci_object(file_path, file_name) else: if self.connection.isdir(file_path): self.create_fci_objects(file_path) else: # Just an extra check to make sure no other files are left self.log_writer.write_warning_log(file_path + " not deleted:" + "\n" + getframeinfo(currentframe()).lineno) except Exception as e: self.log_writer.write_error_log(str(e) + "\n" + getframeinfo(currentframe()).lineno) # Saves the details of an individual file to an fci object def save_file_details_to_fci_object(self, file_path, file_name): fci = FormattedCodeInterface() fci.set_file_name(file_name) fci.set_save_path(file_path) self.set_content(file_path, fci) self.set_project_details(fci) self.files_in_project.append(fci) self.log_writer.write_info_log(file_path + " documented.") # Save the content, code, and comments of an individual file to an fci object def set_content(self, file_path, fci): file = self.connection.open_file(file_path) content = '' comments_list = [] comments = '' python_comments = ['\"\"\"((.|\n)*)\"\"\"', '\'\'\'((.|\n)*)\'\'\'', '#.*'] # Content for line in file.readlines(): content += line fci.set_content(content) # Code code = content for comment_pattern in python_comments: comments_list += re.findall(comment_pattern, code) code = re.sub(comment_pattern, '', code) fci.set_code(code) # Comments for recorded_comment in comments_list: if type(recorded_comment) is tuple: for comment in recorded_comment: comments += comment + '\n' else: comments += recorded_comment + '\n' fci.set_comments(comments) # Saves the details of the current project to an fci object def set_project_details(self, fci): fci.set_author(self.json_data["owner_name"]) fci.set_description(self.json_data["description"]) fci.set_language(self.json_data["language"]) fci.set_project_name(self.json_data["name"]) # fci.set_quality(data["items"][0]["owner"]) # fci.set_save_time() fci.set_update_at(self.json_data["updated_at"]) fci.set_url(self.json_data["html_url"]) fci.set_wiki(self.json_data["has_wiki"]) # Save fci objects to local and remote json files def save_fci_objects_to_json_files(self): self.log_writer.write_info_log("Saving Json files") self.save_to_local_directory() self.save_to_remote_directory() # Converts fci objects to json files and saves them locally def save_to_local_directory(self): for fci_object in self.files_in_project: FCI.FCIConverter.to_json_file(self.local_path, fci_object) self.log_writer.write_info_log("Saved to local machine at " + self.local_path) # Save json files from local directory to remote directory def save_to_remote_directory(self): for file in os.listdir(self.local_path): local_path = self.local_path + "/" + file remote_path = self.remote_path + "/" + file self.connection.copy_file_to_server(local_path, remote_path) self.log_writer.write_info_log("Saved to remote machine at " + self.remote_path)