def main(): arguments = parse_args() filename, extension = arguments.input.rsplit('.', 1) temp_file = f'{filename}_temp.png' try: quantitize_image(filename, extension, arguments.colors) imagick_output = generate_histogram(temp_file) delete_file(temp_file) except: print( 'Something went wrong with ImageMagick, is it installed correctly? Its version needs to be at least 7.0.9' ) else: output_lines = imagick_output.split('\n') parsed_colors = [parse_line(line) for line in output_lines] sorted_colors = sorted(parsed_colors, key=lambda l: int(l['frequency']), reverse=True) image = generate_palette(sorted_colors, arguments.values, arguments.percentage) try: output, extension = arguments.output.rsplit('.', 1) image.save(f'{output}.{extension}') except OSError as oserror: print( f'Couldn\'t save the image for some reason\n{oserror.strerror}' )
def task(self, args): logfiles, dist_path, lock = args[0], args[1], args[2] for logfile in logfiles: self.logger.info("start work task.") self.uncompress_file(logfile, dist_path) text_logfiles = helper.scan_files( directory=dist_path, match=self.log_alert_file_name_match) for text_logfile in text_logfiles: ret = self.create_fields_csv(text_logfile, logfile) for line in ret: try: self.broadcast_to_redis_queue( self.logbee_ealsticsearch_index, line) except Exception as e: self.logger.warning("broadcast to redis fail.%s" % e) #fileio or stringio # self.to_send_redis_fail_srtingio(line, lock) self.to_send_redis_fail_fileio(line, lock) if self.log_keep_source_file == True: self.logger.info("upload log file '%s' to :%s" % (os.path.basename(logfile), self.log_sample_file_storage_path)) helper.upload_with_local( logfile, self.log_sample_file_storage_path + os.path.basename(logfile)) else: self.logger.info("delete log file '%s'" % logfile) helper.delete_file(logfile) # self.logger.info(helper.scan_files(dist_path)) self.logger.info("clean %s" % dist_path) for filename in os.listdir(dist_path): helper.delete_file(dist_path + filename)
def delete(self): name = self.request.get('name') kind = self.request.get('kind') if kind == 'file': helper.delete_file(name) elif kind == 'directory': helper.delete_directory(name)
def runworker(logbee_config, work_config): logbee_workdir = re.split(r'(/){1,}$', logbee_config.workdir)[0] + "/" logbee_logdir = re.split(r'(/){1,}$', logbee_config.logdir)[0] + "/" signal.signal(signal.SIGUSR1, end_of_job) work_name = work_config.work_name logfilename = logbee_logdir + "logbee-" + work_name + ".log" if logger.handlers: logger.handlers = [] work_logging_config = settings.get_logging_config() work_logging_config["handlers"]["file"]["filename"] = logfilename dictConfigClass(work_logging_config).configure() logger.debug("check work directory.") if not os.path.exists(logbee_workdir + work_name): logger.info("Try to create %s work directory:%s" % (work_name, logbee_workdir + work_name)) os.makedirs(logbee_workdir + work_name) else: logger.warning( "Work directory '%s' is already exist. now recreate it." % (logbee_workdir + work_name)) try: helper.delete_file(logbee_workdir + work_name) os.makedirs(logbee_workdir + work_name) except Exception as e: logger.error("recreate work directory fail.\n%s" % e) sys.exit(1) work_cache_file = logbee_workdir + work_name + "_fail_content.log" logger.info("start create cache file: %s" % work_cache_file) try: helper.make_file(work_cache_file) except Exception as e: logger.error("can't create cache file: %s" % work_cache_file) logger.error(e) sys.exit(1) logger.info("Start run logbee work: %s" % work_name) global work_job work_job = __worker(logbee_config, work_config) work_job.run_excutor()
def test_gnupg_pubkey_encryption(): "should be able to encrypt with GnuPG.pubkey_encrypt, and decrypt with GnuPGP.pubkey_decrypt with valid pubkey and seckey" passphrase = helper.random_string(128) plaintext = helper.random_string(256) # make file with random data open(plaintext_path, 'w').write(plaintext) # encrypt gpg.pubkey_encrypt(plaintext_path, '6F6467FDF4462C38FE597CD0CA6C5413CF7BCA9E') helper.delete_file(plaintext_path) # decrypt gpg.pubkey_decrypt(ciphertext_path) new_plaintext = open(plaintext_path, 'r').read() assert plaintext == new_plaintext
def test_gnupg_pubkey_encryption_missing_seckey(): "when encrypting with GnuPG.pubkey_encrypt, should fail to decrypt with GnuPGP.pubkey_decrypt if seckey is missing" passphrase = helper.random_string(128) plaintext = helper.random_string(256) # make file with random data open(plaintext_path, 'w').write(plaintext) # encrypt gpg.pubkey_encrypt(plaintext_path, '77D4E195BE81A10047B06E4747AA62EF2712261B') helper.delete_file(plaintext_path) # decrypt try: gpg.pubkey_decrypt(ciphertext_path) new_plaintext = open(plaintext_path, 'r').read() except MissingSeckey: assert True else: assert False
def clear_directories(self, base_path, delete_all = False): # delete all files from a directory print("Clearing directories...") dir_names = self.dataloader.get_all_directory_namelist() base_path = os.path.join(self.f_prefix, base_path) for dir_ in dir_names: dir_path = os.path.join(base_path, dir_) file_names = get_all_file_names(dir_path) if delete_all: base_file_names = [] else: base_file_names = self.dataloader.get_base_file_name(dir_) [delete_file(dir_path, [file_name]) for file_name in file_names if file_name not in base_file_names]
def uncompress_file(self, logfile, dist_path): self.logger.info("Start uncommpress file '%s' to '%s' ." % (logfile, dist_path)) if self.log_compression_method == None: try: self.logger.debug("copy logfile to %s" % dist_path) helper.copy_file(logfile, dist_path) except Exception as e: helper.delete_file(logfile) # common.upload_with_local(logfile, self.logbee_workdir+self.log_work_name+"_error/"+os.path.basename(logfile)) self.logger.warning("Copy file fail.%s" % e) elif self.log_compression_method in settings.UNCOMPRESSION_FUNCTION.keys( ): try: settings.UNCOMPRESSION_FUNCTION[self.log_compression_method]( logfile, dist_path) except Exception as e: helper.delete_file(logfile) # common.upload_with_local(logfile, self.logbee_workdir+self.log_work_name+"_error/"+os.path.basename(logfile)) self.logger.warning("uncompress logfile fail .%s" % e) else: self.logger.error("Unsupported commpression method: %s" % self.log_compression_method)
async def rm(ctx, arg=None): try: if ctx.message.channel.name != CHANNEL_NAME: raise WrongChannel uploaded_doc = DB.table(LAST_UPLOADED_TABLE).get( User.userId == ctx.author.id) if uploaded_doc is None: raise KeyError if uploaded_doc['removed']: raise AlreadyDeleted delete_file(service, uploaded_doc['file_id']) print("Deleted latest file!") DB.table(LAST_UPLOADED_TABLE).upsert({'removed': True}, User.userId == ctx.author.id) DB.table(UPLOADED_STAGES).remove( where('file_id') == uploaded_doc['file_id']) await ctx.message.add_reaction('👍') await ctx.send( f"Deleted file, original message was: {uploaded_doc['message']}") message = await ctx.message.channel.fetch_message( uploaded_doc['message_id']) if message is not None: await message.delete() except KeyError: await ctx.message.add_reaction('👎') await ctx.author.send( "You haven't uploaded a screenshot yet to delete!") except AlreadyDeleted: await ctx.message.add_reaction('👎') await ctx.author.send( "You already used this command to delete latest uploaded SS.") except WrongChannel: await ctx.message.add_reaction('👎') await ctx.author.send( f"Can't use command in this channel, only in {CHANNEL_NAME}.")
def content_check(check_url, strategy='mobile'): """ Checks the Pagespeed Insights with Google In addition to the 'mobile' strategy there is also 'desktop' aimed at the desktop user's preferences Returns a dictionary of the results. attributes: check_url, strategy """ check_url = check_url.strip() return_dict = {} try: get_content = helper.httpRequestGetContent(check_url) soup = BeautifulSoup(get_content, "html.parser") # soup = soup.encode("ascii") pagetitle = soup.title.string return_dict['pagetitle'] = '"{0}"'.format(pagetitle) pagetitle_length = len(pagetitle) return_dict['pagetitle_length'] = pagetitle_length num_links = len(soup.find_all('a')) return_dict['num_links'] = num_links # checking images num_images = len(soup.find_all('img')) return_dict['num_images'] = num_images images = soup.find_all('img') i = 0 for image in images: if image.get('alt') is not None: i = i + 1 # print(image.get('alt')) # for debugging num_images_without_alt = num_images - i return_dict['num_images_without_alt'] = num_images_without_alt try: meta_desc = soup.findAll( attrs={"name": "description"})[0]['content'] return_dict['meta_desc'] = '"{0}"'.format(meta_desc) meta_desc_length = len(meta_desc) return_dict['meta_desc_length'] = meta_desc_length except IndexError: return_dict['meta_desc'] = '' return_dict['meta_desc_length'] = 0 pass except: print('Meta desc check for URL \'{0}\' failed, reason: {1}'.format( check_url, sys.exc_info()[0])) # checking readability [ s.extract() for s in soup(['style', 'script', '[document]', 'head', 'title']) ] if 1 is 1: # if you want get readability for the whole page then the statement above should read "if 1 is 1:", otherwise "if 1 is 2:" to enter else below visible_text = soup.getText() else: # attribute "main" might in your code be a "div", "pagecontent" is the class where you want to get the content from. CHANGE IT to what ever you are using. visible_text = soup.find("main", class_="main-wrapper").getText() visible_text = "?\n".join(visible_text.split("?")) visible_text = "!\n".join(visible_text.split("!")) visible_text = ".\n".join(visible_text.split(".")) file_name = 'tmp/{0}_{1}_{2}.txt'.format( str(datetime.today())[:10], 'contentCheck', helper.getUniqueId()) helper.writeFile(file_name, visible_text) # readability = os.system('readability {0}'.format(file_name)) readability = subprocess.check_output(['readability', file_name]) readability = readability.decode("utf-8") helper.delete_file( file_name ) # uncomment if you'd like to see the text files that are used # helper.writeFile('tmp/readability-output.txt', readability) # uncomment if you'd like to see the readability output for line in readability.split('\n'): # first_entry = line.split(':')[0].strip() try: return_dict[line.split(':')[0].strip()] = line.split( ':')[1].strip() except: pass # print(meta_desc) except: # breaking and hoping for more luck with the next URL print( 'Error! Unfortunately the request for URL "{0}" failed, message:\n{1}' .format(check_url, sys.exc_info()[0])) pass return return_dict
def setup(): "make a new tmp_dir" shutil.rmtree(output_dir, ignore_errors=True) helper.delete_file(output_file)
def setup_clean_crypto_files(): "clean up files from older tests if needed" helper.delete_file(plaintext_path) helper.delete_file(ciphertext_path)
def do_POST(self): """Receives pings from client devices and respond with any updated information""" if DEBUG: print("POST Received", flush=True) print(f" Active threads: {threading.active_count()} ", end="\r", flush=True) try: self.send_response(200, "OK") self.send_header("Access-Control-Allow-Origin", "*") self.send_header('Access-Control-Allow-Headers', 'Content-Type,Authorization') self.send_header('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS') self.send_header('Access-Control-Allow-Credentials', 'true') self.end_headers() except BrokenPipeError: pass # Get the data from the request length = int(self.headers['Content-length']) data_str = self.rfile.read(length).decode("utf-8") # Unpack the data try: # JSON data = json.loads(data_str) except json.decoder.JSONDecodeError: # not JSON data = {} split = data_str.split("&") for seg in split: split2 = seg.split("=") if len(split2) > 1: data[split2[0]] = split2[1] if "action" in data: if DEBUG: print(f' {data["action"]}') if data["action"] == "getDefaults": config_to_send = dict(config.defaults_dict.items()) if config.dictionary_object is not None: config_to_send["dictionary"] = dict(config.dictionary_object.items("CURRENT")) json_string = json.dumps(config_to_send) try: self.wfile.write(bytes(json_string, encoding="UTF-8")) except BrokenPipeError: pass elif data["action"] == "updateDefaults": if DEBUG: print(" waiting for defaultWriteLock") with defaultWriteLock: helper.update_defaults(data) if DEBUG: print(" defaultWriteLock released") elif data["action"] == "deleteFile": if "file" in data: helper.delete_file(os.path.join("/", "home", "sos", "sosrc", data["file"]), absolute=True) response = {"success": True} else: response = {"success": False, "reason": "Request missing field 'file'"} json_string = json.dumps(response) try: self.wfile.write(bytes(json_string, encoding="UTF-8")) except BrokenPipeError: pass elif data["action"] == "restart": helper.reboot() elif data["action"] in ["shutdown", "power_off"]: helper.shutdown() elif data["action"] == "SOS_getCurrentClipName": current_clip = send_SOS_command("get_clip_number") dataset = send_SOS_command("get_clip_info " + current_clip) try: self.wfile.write(bytes(dataset, encoding="UTF-8")) except BrokenPipeError: pass elif data["action"] == "SOS_getClipList": # First, get a list of clips reply = send_SOS_command("get_clip_info *", multiline=True) split = reply.split('\r\n') clip_list = [] for segment in split: split2 = segment.split(" ") clip_list.append(" ".join(split2[1:])) # Then, get other improtant info clip_dict_list = [] counter = 1 for clip in clip_list: if clip != '': temp = {'name': clip, 'clipNumber': counter} path = send_SOS_command(f"get_clip_info {counter} clip_filename") split = path.split('/') try: if split[-2] == "playlist": icon_root = '/'.join(split[:-2]) else: icon_root = '/'.join(split[:-1]) except IndexError: print(f"Clip path error: {path}") icon_path = icon_root + '/media/thumbnail_big.jpg' filename = ''.join(e for e in clip if e.isalnum()) + ".jpg" temp["icon"] = filename # Cache the icon locally for use by the app. os.system(f'cp "{icon_path}" ./thumbnails/{filename}') clip_dict_list.append(temp) counter += 1 json_string = json.dumps(clip_dict_list) try: self.wfile.write(bytes(json_string, encoding="UTF-8")) except BrokenPipeError: pass elif data["action"] == "SOS_getPlaylistName": reply = send_SOS_command("get_playlist_name") playlist = reply.split("/")[-1] try: self.wfile.write(bytes(playlist, encoding="UTF-8")) except BrokenPipeError: pass elif data["action"] == "SOS_openPlaylist": if "name" in data: SOS_open_playlist(data["name"]) elif data["action"] == "SOS_getState": reply = send_SOS_command("get_state 0") # Parse the response (with nested braces) and build a dictionary state_dict = {} segment_list = [] for char in reply: if char == '{': segment_list.append([]) elif char == '}': if len(segment_list) == 1: # Key-value are separated by a space segment = ''.join(segment_list.pop()) split = segment.split(" ") state_dict[split[0]] = split[1] elif len(segment_list) == 2: # Key-value are separated into two lists key = ''.join(segment_list[0]) value = ''.join(segment_list[1]) state_dict[key] = value segment_list = [] elif len(segment_list) > 2: print("Error parsing state: too many nested braces") else: if len(segment_list) > 0: segment_list[-1].append(char) json_string = json.dumps(state_dict) try: self.wfile.write(bytes(json_string, encoding="UTF-8")) except BrokenPipeError: pass elif data["action"] == "SOS_gotoClip": if "clipNumber" in data: send_SOS_command("play " + data["clipNumber"]) elif data["action"] == "SOS_moveSphere": if ("dLat" in data) and ("dLon" in data): tilt = send_SOS_command("get_tilt") split = tilt.split(' ') tilt_x = float(split[0]) tilt_y = float(split[1]) tilt_z = float(split[2]) dLat = float(data["dLat"]) dLon = float(data["dLon"]) send_SOS_command(f"set_tilt {tilt_x} {tilt_y + dLat/2} {tilt_z + dLon/2}") elif data["action"] == "SOS_rotateX": if "increment" in data: tilt = send_SOS_command("get_tilt") split = tilt.split(' ') tilt_x = float(split[0]) tilt_y = float(split[1]) tilt_z = float(split[2]) dX = float(data['increment']) send_SOS_command(f"set_tilt {tilt_x + dX} {tilt_y} {tilt_z}") elif data["action"] == "SOS_rotateY": if "increment" in data: tilt = send_SOS_command("get_tilt") split = tilt.split(' ') tilt_x = float(split[0]) tilt_y = float(split[1]) tilt_z = float(split[2]) dY = float(data['increment']) send_SOS_command(f"set_tilt {tilt_x} {tilt_y + dY} {tilt_z}") elif data["action"] == "SOS_rotateZ": if "increment" in data: tilt = send_SOS_command("get_tilt") split = tilt.split(' ') tilt_x = float(split[0]) tilt_y = float(split[1]) tilt_z = float(split[2]) dZ = float(data['increment']) send_SOS_command(f"set_tilt {tilt_x} {tilt_y} {tilt_z + dZ}") elif data["action"] == "SOS_startAutorun": send_SOS_command("set_auto_presentation_mode 1") elif data["action"] == "SOS_stopAutorun": send_SOS_command("set_auto_presentation_mode 0") elif data["action"] == "SOS_readPlaylist": if "playlistName" in data: reply = send_SOS_command(f"playlist_read {data['playlistName']}", multiline=True) try: self.wfile.write(bytes(reply, encoding="UTF-8")) except BrokenPipeError: pass elif data["action"] == 'getAvailableContent': active_content = \ [s.strip() for s in config.defaults_dict.get("content", "").split(",")] all_content = list(Path("/home/sos/sosrc/").rglob("*.[sS][oO][sS]")) response = {"all_exhibits": [str(os.path.relpath(x, '/home/sos/sosrc/')) for x in all_content], "active_content": active_content, "system_stats": helper.getSystemStats()} json_string = json.dumps(response) try: self.wfile.write(bytes(json_string, encoding="UTF-8")) except BrokenPipeError: pass else: print(f"Warning: action {data['action']} not recognized!") if DEBUG: print("POST complete")