def write_upload_map(upload_map: dict, filename='upload_map.yml'): """ Writes upload map to file. """ try: with open(filename, 'w') as file: yaml.dump(upload_map, file, sort_keys=True) except Exception: logger.error(f'Tried to write {filename} but could not.') # FIXME Can this be improved? return False return True
def iGEM_upload_page(browser, contents: str, url: str) -> bool: """ Uploads source code to the iGEM server. Parameters: browser: mechanicalsoup.Browser instance contents: source code to be uploaded url: the page where source code will uploaded Returns: True if successful, False otherwise. """ # Try opening the iGEM upload page try: browser.open(url) # TODO: Check this except Exception: message = "Lost connection to iGEM. Please check your internet connection." logger.debug(message, exc_info=True) logger.error(message) return False # Select the form where source code has to be submitted. # This might fail if the source code of the page changes. try: browser.select_form('form') except Exception: message = f"Couldn't find the form at {url}. Has the page changed?" logger.debug(message, exc_info=True) logger.error(message) return False # Submit the form browser['wpTextbox1'] = contents try: browser.submit_selected() except Exception: message = f"Couldn't upload to {url}." logger.debug(message, exc_info=True) logger.error(message) return False logger.info(f'Uploaded to {url}.') return True
def iGEM_login(browser, credentials: dict, config: dict) -> bool: """ Logs into the iGEM server. Arguments: browser: mechanicalsoup.Browser instance credentials: dictionary containing 'username' and 'password' config: custom configuration dictionary Returns: True if login is successful. False along with an error message otherwise. """ # Check if we're already logged in if check_login(browser, config['team'], config['year']): logger.info("Already logged in.") return True # Try opening the login page url = "https://igem.org/Login2" try: response = browser.open(url) except Exception: message = f"Couldn't connect to {url}." logger.debug(message, exc_info=True) logger.critical(message) return False # Check if login was successful if response.status_code != 200: message = f"Failed to login. {url} was not found." logger.debug(message, exc_info=True) logger.error(message) return False # Select the form we have to fill. # This might fail if the page changes. try: browser.select_form('form[method="post"]') except Exception: message = f"Couldn't find the login form at {url}. " + \ "Has the login page changed?" logger.debug(message, exc_info=True) logger.error(message) return False # Fill the form browser["username"] = credentials['username'] browser["password"] = credentials['password'] # Try submitting the form try: response = browser.submit_selected() except Exception: message = "Lost connection to iGEM servers." logger.debug(message, exc_info=True) logger.error(message) return False soup = BeautifulSoup(response.text, 'html5lib') # Successful if "successfully logged in" in soup.text: logger.info(f"Successfully logged in as {credentials['username']}.") return True # Invalid username elif "That username is not valid" in soup.text: message = "This iGEM username is invalid." logger.error(message) # Invalid password elif "That username is valid, but the password is not" in soup.text: message = "This iGEM username is valid but the password is not." logger.error(message) # Unknown error else: message = "An unknown error occured while trying to login." logger.error(message) return False
def iGEM_upload_file(browser, file_object, year): """ Upload a file to iGEM servers. iGEM allows files only 100MB large. That check is performed in wikisync.run(), not here. Parameters: browser: mechanicalsoup.Browser instance file_object: igem_wikisync.files.OtherFile object Returns: True if uploaded, False otherwise. """ # Try opening the iGEM upload page url = file_object.upload_URL try: browser.open(url) # TODO: Check this except Exception: message = "Lost connection to iGEM. Please check your internet connection." logger.debug(message, exc_info=True) logger.error(message) return False # Select the form where the file has to be uploaded. # This might fail if the page changes. try: browser.select_form('form') except Exception: message = f"Couldn't find the form at {url}. Has the page changed?" logger.debug(message, exc_info=True) logger.error(message) return False browser['wpUploadFile'] = str(file_object.src_path) browser['wpUploadDescription'] = 'Uploaded using WikiSync' browser['wpDestFile'] = file_object.upload_filename # * Ignore all warnings # We keep track of already uploaded files internally browser['wpIgnoreWarning'] = "1" # Submit the form try: browser.submit_selected() except Exception: message = "Lost connection to iGEM servers." logger.debug(message, exc_info=True) logger.error(message) return False # Check whether there were any errors while uploading return_url = browser.get_url() if return_url == file_object.upload_URL: message = "The following error occured while uploading " + file_object.upload_filename + ': ' message += browser.get_current_page().find(class_='error').text logger.debug(message, exc_info=True) logger.error(message) return False # TODO: Write test for this using KillSwitch svg from Ameya # Extract relative link from response print(str(file_object.src_path), file_object.upload_filename, browser.get_url()) relative_link = browser.get_current_page().find( class_='fullMedia').find('a')['href'] file_object.set_link_URL('https://' + year + '.igem.org' + relative_link) logger.info( f'Uploaded {file_object.upload_filename} to {file_object.link_URL}.') return True
def build_and_upload(files, browser, config, upload_map): """ Replaces URLs in files and uploads changed files. Arguments: files: Custom file cache browser: mechanicalsoup.StatefulBrowser instance config: Configuration for this run upload_map: custom upload map Returns: Dictionary with no. of 'html', 'css' and 'js' files uploaded """ counter = { 'html': 0, 'css': 0, 'js': 0, } for file_dictionary in [files['html'], files['css'], files['js']]: for path in file_dictionary.keys(): file_object = file_dictionary[path] path_str = str(file_object.path) ext = file_object.extension # open file try: with open(file_object.src_path, 'r') as file: contents = file.read() except Exception: message = f'Could not open/read {file_object.path}. Skipping.' logger.error(message) continue # FIXME Can this be improved? processed = None # just so the linter doesn't freak out # parse and modify contents if ext == 'html': processed = HTMLparser(config, file_object.path, contents, upload_map) elif ext == 'css': processed = CSSparser(config, file_object.path, contents, upload_map) elif ext == 'js': processed = JSparser(contents) # calculate and store md5 hash of the modified contents build_hash = md5(processed.encode('utf-8')).hexdigest() if upload_map[ext][path_str]['md5'] == build_hash: message = f'Contents of {file_object.path} have been uploaded previously. Skipping.' logger.info(message) else: upload_map[ext][path_str]['md5'] = build_hash build_path = file_object.build_path try: # create directory if doesn't exist if not os.path.isdir(build_path.parent): os.makedirs(build_path.parent) # and write the processed contents with open(build_path, 'w') as file: file.write(processed) except Exception: message = f"Couldn not write {str(file_object.build_path)}. Skipping." logger.error(message) continue # FIXME Can this be improved? # upload successful = iGEM_upload_page(browser, processed, file_object.upload_URL) if not successful: message = f'Could not upload {str(file_object.path)}. Skipping.' logger.error(message) continue # FIXME Can this be improved? else: counter[ext] += 1 return counter
def cache_files(upload_map, config): """ Loads filenames into memory, along with setting up appropriate objects to generate URLs and hashes as required. Arguments: upload_map: custom upload map config: configuration for this run Returns: cache: dictionary with html, css, js and other file objects """ cache = {'html': {}, 'css': {}, 'js': {}, 'other': {}} # for each file in src_dir for root, _, files in os.walk(config['src_dir']): for filename in files: # Store path and extension infile = (Path(root) / Path(filename)).relative_to( config['src_dir']) extension = infile.suffix[1:].lower() # create appropriate file object # file objects contain corresponding paths and if extension in ['html', 'css', 'js']: file_object = None if extension == 'html': file_object = HTMLfile(infile, config) elif extension == 'css': file_object = CSSfile(infile, config) elif extension == 'js': file_object = JSfile(infile, config) # In poster mode, make sure URL starts with /Poster after team if config['poster_mode']: link_URL = file_object.link_URL after_team = link_URL.split(config['team'])[1] if len(after_team) < 7 or after_team[0:7] != "/Poster": message = 'All files must start with /Poster in poster mode.' logger.debug(message, exc_info=True) logger.critical(message) raise Exception cache[extension][file_object.path] = file_object elif extension.lower() in [ 'png', 'gif', 'jpg', 'jpeg', 'pdf', 'ppt', 'txt', 'zip', 'mp3', 'mp4', 'webm', 'mov', 'swf', 'xls', 'xlsx', 'docx', 'pptx', 'csv', 'm', 'ogg', 'gb', 'tif', 'tiff', 'fcs', 'otf', 'eot', 'ttf', 'woff', 'svg' ]: # make sure file path start with 'assets' if len(str(infile)) < 7 or infile.parts[0] != 'assets': logger.error( f'{infile} is an {extension} file outside the "assets" folder. Skipping.' ) continue # make sure file size is within limits elif (config['src_dir'] / infile).stat().st_size >= 100000000: logger.error( f'{infile} is larger than the 100MB file limit. Skipping.' ) continue # create OtherFile else: file_object = OtherFile(infile, config) if len(file_object.upload_filename) < 240: cache['other'][file_object.path] = file_object else: logger.error( f'{infile}: Upload filename too large. Skipping.') logger.error( 'Please do not nest assets too deep and take a look at our docs to see how WikiSync renames files.' ) continue else: logger.error( f'{infile} has an unsupported file extension. Skipping.') continue if extension in ['html', 'css', 'js']: if str(file_object.path) not in upload_map[extension].keys(): upload_map[extension][str(file_object.path)] = { 'md5': '', 'link_URL': file_object.link_URL } return cache