def upgrade(): #Read index, if available try: index = configparser.ConfigParser() index.read(config["paths"]["userPath"] + "/.pluginstore/index.ini") #Index not found, suggest running pm.update() except: print("Could not find packages list, maybe run pm.update()") #Check if installed list exists, if so, load it if os.path.exists(config["paths"]["userPath"] + "/.pluginstore/installed.ini"): installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #If not, create it else: with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as installedFile: installedFile.close() installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") updates = 0 reinstall = 0 #Iterate through installed plugins for plugin in installed.sections(): #Read installed list installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") index.read(config["paths"]["userPath"] + "/.pluginstore/index.ini") if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" #Make sure plugin's file exists if os.path.exists(location + "/" + installed[plugin]["filename"]): #Check plugin against hash if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: installed[plugin]["verified"] = "false" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) #Check plugin update time against the latest version if float(index[plugin]["lastUpdate"]) > float(installed[plugin]["lastUpdate"]): #print("Updating " + plugin + "...") install(plugin) updates = updates + 1 #Plugin is marked as unverified, offer to reinstall it elif installed[plugin]["verified"] == "false": print("Hash: " + hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256")) print("Expected: " + index[plugin]["hash"]) if input(plugin + " appears to be damaged, would you like to reinstall it? (Y/n) ").lower() != "n": install(plugin) reinstall = reinstall + 1 else: #Plugin file is missing, offer to reinstall it print("File not found: " + location + "/" + installed[plugin]["filename"]) if input(plugin + " appears to be damaged, would you like to reinstall it? (Y/n) ").lower() != "n": install(plugin) reinstall = reinstall + 1 print("Done:") print(str(updates) + " packages updated") print(str(reinstall) + " damaged packages reinstalled")
def checksum(target_id): src_file = './input/%d' % target_id output_file = './output/%d' % target_id hash1 = hs.fileChecksum(src_file, 'sha256') hash2 = hs.fileChecksum(output_file, 'sha256') if hash1 != hash2: return False else: return True
def test_put_file_object_store(monkeypatch, s3_setup, tmpdir): bucket_name = "test-bucket" file_to_put = "LICENSE" current_directory = os.getcwd() + '/' monkeypatch.setattr(bootstrapper, "get_object_storage_filename", lambda x: file_to_put) bootstrapper.put_file_to_object_storage(s3_setup, bucket_name, file_to_put) with tmpdir.as_cwd(): s3_setup.fget_object(bucket_name, file_to_put, file_to_put) assert os.path.isfile(file_to_put) assert hs.fileChecksum(file_to_put, "sha256") == hs.fileChecksum(current_directory+file_to_put, "sha256")
def test_put_file_object_store(monkeypatch, s3_setup, tmpdir): bucket_name = "test-bucket" file_to_put = "LICENSE" current_directory = os.getcwd() + '/' op = _get_operation_instance(monkeypatch, s3_setup) op.put_file_to_object_storage(file_to_upload=file_to_put) with tmpdir.as_cwd(): s3_setup.fget_object(bucket_name, file_to_put, file_to_put) assert os.path.isfile(file_to_put) assert hs.fileChecksum(file_to_put, "sha256") == hs.fileChecksum( current_directory + file_to_put, "sha256")
def test_get_file_object_store(monkeypatch, s3_setup, tmpdir): file_to_get = "README.md" current_directory = os.getcwd() + '/' bucket_name = "test-bucket" monkeypatch.setattr(bootstrapper, "get_object_storage_filename", lambda x: file_to_get) s3_setup.fput_object(bucket_name=bucket_name, object_name=file_to_get, file_path=file_to_get) with tmpdir.as_cwd(): bootstrapper.get_file_from_object_storage(s3_setup, bucket_name, file_to_get) assert os.path.isfile(file_to_get) assert hs.fileChecksum(file_to_get, "sha256") == hs.fileChecksum(current_directory+file_to_get, "sha256")
def verify(plugin): #Load index, if available try: index = configparser.ConfigParser() index.read(config["paths"]["userPath"] + "/.pluginstore/index.ini") #If not, suggest running pm.update() except: print("\nCould not find package list, maybe run pm.update()") if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" #Load installed list if available if os.path.exists(config["paths"]["userPath"] + "/.pluginstore/installed.ini"): installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #If not, create it else: with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as installedFile: installedFile.close() installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: return False if not os.path.exists(config["paths"]["userPath"] + "/" + index[plugin]["type"] + "/" + index[plugin]["filename"]): return False else: return True
def generateStoreInfo(plugin): if os.path.exists(plugin): name = input("Plugin name (No spaces): ") type = input("Plugin Type (plugin/theme): ").lower() description = input("Plugin description: ") version = input("Plugin version: ") maintainer = input("Maintainer email address: ") link = input( "Direct download link (Please use GitHub or GitLab for hosting): ") summary = input("Description summary: ") lastUpdate = time.time() hash = hs.fileChecksum(type + "s/" + plugin, "sha256") print() print("Plugin listing information:") print() print("[" + name + "]") print("description = " + description) print("maintainer = " + maintainer) print("version = " + version) print("download = " + link) print("hash = " + hash) print("lastupdate = " + str(time.time())) print("summary = " + summary) print("filename = " + plugin) print("rating = 5") print("ratings = 0") else: print("File not found: plugins/" + plugin)
def Check(File, Full_File, API_Data, Backup_File): try: State = 0 Host = f"{API_Data['host']}:{str(int(API_Data['port']))}/chain" Response = requests.get(Host, verify=API_Data['verify_ssl']).text Response = json.loads(Response) if 'chain' in Response: for Transactions in Response['chain']: if Transactions.get('transactions') and Transactions.get( 'transactions') != []: for Transaction in Transactions['transactions']: if Transaction[ 'backup'] == Backup_File and Transaction[ 'log_file'] == File: if Transaction['data_hash'] == hs.fileChecksum( Full_File, "sha256"): return 2 else: return 1 return State except Exception as e: print(e)
def get_design(w3): designDb = create_contract_object(w3) filePath = input("Enter filepath: ") try: fileHash = hs.fileChecksum(filePath, 'sha256', False) except TypeError: print("File does not exist") return print("Hash of the given file: ", fileHash) index = designDb.functions.findDesign(fileHash).call() if (index == -1): print("Design with given hash does not exist in database") return design = designDb.functions.getDesign(index).call() print("Designer:", design[0]) print("Filename:", design[1]) print( "Version history \n=================================================================" ) print("Version:", design[3]) print("Filehash:", design[2]) print("Timestamp:", design[4]) print("=================================================================") historyLen = designDb.functions.getHistoryLength(index).call() for i in range(historyLen): version = designDb.functions.getHistory(index, i).call() print("Version:", version[0]) print("FileHash:", version[1]) print("Timestamp:", version[2]) print( "=================================================================" )
def upload_design(w3): designDb = create_contract_object(w3) filePath = input("Enter filepath: ") try: fileHash = hs.fileChecksum(filePath, 'sha256', False) except TypeError: print("File does not exist") return print("Hash of the given file: ", fileHash) fileName = input("Enter filename: ") version = input("Enter version: ") passphrase = getpass.getpass("Enter passphrase: ") w3.personal.unlockAccount(w3.eth.accounts[0], passphrase) try: tx_hash = designDb.functions.addDesign(fileName, fileHash, version).transact() except ValueError: print("File with same hash exists") return w3.miner.start(4) print("Waiting for transaction to be mined...") w3.eth.waitForTransactionReceipt(tx_hash) w3.miner.stop() print("Design uploaded successfully. TX Hash: ", Web3.toHex(tx_hash))
def activate_announcer(): start = time.time() while True: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) host = '<broadcast>' port = 12345 d = "./torrent_share" count = 0 data_string = str(ip_address) + ".0*announcetorrent*[" for path in os.listdir(d): full_path = os.path.join(d, path) if os.path.isfile(full_path): if count == 3: s.sendto((data_string + "]").encode("utf-8"), (host, port)) data_string = str(ip_address) + ".0*announcetorrent*[" count = 0 stats = os.stat(full_path) data_string += str(os.path.basename(full_path)) + "*" + full_path + "*" + str( hs.fileChecksum(full_path, "sha256")) + "*" + str(stats.st_size) + "|" count += 1 s.sendto((data_string + "]").encode("utf-8"), (host, port)) data_string = str(ip_address) + ".0*announce*[" + user_name + "," + str(ip_address) + ",announce]" if time.time()-start >= 10: start=time.time() active_users.clear() torrentable_files.clear() s.sendto(data_string.encode("utf-8"), (host, port)) time.sleep(1)
def test_get_file_object_store(monkeypatch, s3_setup, tmpdir): file_to_get = "README.md" current_directory = os.getcwd() + '/' bucket_name = "test-bucket" s3_setup.fput_object(bucket_name=bucket_name, object_name=file_to_get, file_path=file_to_get) with tmpdir.as_cwd(): op = _get_operation_instance(monkeypatch, s3_setup) op.get_file_from_object_storage(file_to_get) assert os.path.isfile(file_to_get) assert hs.fileChecksum(file_to_get, "sha256") == hs.fileChecksum( current_directory + file_to_get, "sha256")
def Transfer_File(Source_Directory, Real_Source_Directory, File, API_Data, Backup_File): # try: Full_File = Source_Directory + "/" + File Real_Full_File = Real_Source_Directory + "/" + File File_Hash = hs.fileChecksum(Full_File, "sha256") Add_to_SIEMChain_Ledger(File_Hash, API_Data, File, Backup_File) logging.info(f"{str(Date())} Added {Real_Full_File} to chain.")
def test_convert_notebook_to_html(tmpdir): notebook_file = os.getcwd() + "/etc/tests/resources/test-notebookA.ipynb" notebook_output_html_file = "test-notebookA.html" html_sha256 = '7b375914a055f15791f0f68a3f336a12d73adca6893e45081920bd28dda894c3' with tmpdir.as_cwd(): bootstrapper.convert_notebook_to_html(notebook_file, notebook_output_html_file) assert os.path.isfile(notebook_output_html_file) assert hs.fileChecksum(notebook_output_html_file, "sha256") == HTML_SHA256
def test_convert_notebook_to_html(tmpdir): notebook_file = os.getcwd() + "/etc/tests/resources/test-notebookA.ipynb" notebook_output_html_file = "test-notebookA.html" html_sha256 = 'dfff0325b8551b75a76fb3357bee60694a0e71b8fc8438c6382ce06777b14498' with tmpdir.as_cwd(): bootstrapper.convert_notebook_to_html(notebook_file, notebook_output_html_file) assert os.path.isfile(notebook_output_html_file) assert hs.fileChecksum(notebook_output_html_file, "sha256") == html_sha256
def test_convert_notebook_to_html(tmpdir): notebook_file = os.getcwd() + "/etc/tests/resources/test-notebookA.ipynb" notebook_output_html_file = "test-notebookA.html" with tmpdir.as_cwd(): bootstrapper.NotebookFileOp.convert_notebook_to_html( notebook_file, notebook_output_html_file) assert os.path.isfile(notebook_output_html_file) assert hs.fileChecksum(notebook_output_html_file, "sha256") == HTML_SHA256
def update_design(w3): designDb = create_contract_object(w3) fileName = input("Enter filename: ") filesLength = designDb.functions.getFilesLength().call() index = -1 designerAddress = w3.eth.accounts[0] design = [] for i in range(filesLength): design = designDb.functions.getDesign(i).call() if (design[0] == designerAddress and design[1] == fileName): index = i break if (index == -1): print("No design file uploaded by you matches with the name given") return newFilePath = input("Enter new filepath: ") try: newFileHash = hs.fileChecksum(newFilePath, 'sha256', False) except TypeError: print("File does not exist") return print("Hash of the given file: ", newFileHash) i = designDb.functions.findDesign(newFileHash).call() if (i != -1): print("Design not update(design with same hash exists)") return version = input("Enter version: ") passphrase = getpass.getpass("Enter passphrase: ") w3.personal.unlockAccount(w3.eth.accounts[0], passphrase) try: tx_hash = designDb.functions.modifyDesign(index, newFileHash, version).transact() except ValueError: print("Design can only be updated by designer") return w3.miner.start(4) print("Waiting for transaction to be mined...") w3.eth.waitForTransactionReceipt(tx_hash) w3.miner.stop() print("Design updated successfully. TX Hash: ", Web3.toHex(tx_hash)) design = designDb.functions.getDesign(index).call()
####################################################################### This python code generates md5 , sha1 , sha256 hash value for input file example : python md5-sha1-sha256-hash-generate.py Enter the the filename: file.exe ####################################################################### from py_essentials import hashing as hs inputFile = raw_input("Enter the filename or path: ") hashmd5sum = hs.fileChecksum(inputFile, "md5") hashsha1 = hs.fileChecksum(inputFile, "sha1") hash256 = hs.fileChecksum(inputFile, "sha256") print "\n" print "MD5 for the file is: %s" %hashmd5sum print "\n" print "MD5 for the file is: %s" %hashsha1 print "\n" print "MD5 for the file is: %s" %hash256
""" Aung Phyo 06/10/2018 following coding can scan the file for threat check using the metadefender API from https://portal.opswat.com/ """ import json import requests import sys from py_essentials import hashing as hs #calculating hash of the file hash256 = hs.fileChecksum(sys.argv[1], "sha256") hash1 = hs.fileChecksum(sys.argv[1], "sha1") hashMD5 = hs.fileChecksum(sys.argv[1], "md5") #using the hash to create the url url = "https://api.metadefender.com/v2/hash/" + str(hash1) headers = {'apikey': "API_KEY", 'file-metadata': "1"} #request the url and create the json object response = requests.get(url, headers=headers).json() #call the key and get the value for r in response: dataid = response[r]
#generating the hash of the certificate import sys import os from py_essentials import hashing as hs args = sys.argv #Index PDF fileName = args[1] hash = hs.fileChecksum(fileName, "sha256") print(hash)
def file_to_address(file_path, algorithm): hsh = hs.fileChecksum(file_path, algorithm) return save_message_in_address(hsh)
def install(plugin): #update(silent=True) #Load index, if available try: index = configparser.ConfigParser() index.read(config["paths"]["userPath"] + "/.pluginstore/index.ini") #If not, suggest running pm.update() except: print("Could not find package list, maybe run pm.update()") #Load installed list if available #print(config.sections()) if os.path.exists(config["paths"]["userPath"] + "/.pluginstore/installed.ini"): installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #If not, create it else: with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as installedFile: installedFile.close() installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Set verified to none if it is not set in the installed list try: verified = installed[plugin]["verified"] except: verified = "none" #Set dependencies to none if it is not set in the installed file try: dependencies = index[plugin]["depends"] except: index[plugin]["depends"] = "none" #Plugin is already installed if installed.has_section(plugin) and not verified == "false": #Newer version available, update if float(index[plugin]["lastUpdate"]) > float(installed[plugin]["lastUpdate"]): print("Updating " + plugin + "...") try: print("Installing dependencies...") dependencies = index[plugin]["depends"].split(",") #Iterate through dependencies for dependency in dependencies: with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Install from index if available if index.has_section(dependency): with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) install(dependency) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Dependancy already installed, do nothing elif installed.has_section(dependency): print("Dependancy already satisfied") elif dependency.startswith("pypi:"): try: subprocess.check_call([sys.executable, "-m", "pip","install", "-q", dependency[5:]]) except: print("Dependency not unsatisfyable: " + dependency) return elif dependency != "none": print("Dependency unsatisfyable: " + dependency) return else: pass installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" download(index[plugin]["download"], location + "/" + index[plugin]["filename"]) installed[plugin] = index[plugin] installed[plugin]["source"] = "index" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) except Exception as e: print("Could not download file: " + str(e)) pass #Verify plugin against hash in index print("Verifying...") if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: print("Package verification failed, the package should be reinstalled.") installed[plugin]["verified"] = "false" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) else: print("Package verification passed") installed[plugin]["verified"] = "true" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) #No updates available, nothing to do else: print(plugin + " is already installed and has no update available") #Plugin has failed verification, reinstall it elif verified != "true" and installed.has_section(plugin): print("Redownloading damaged package " + plugin + "...") try: print("Installing dependencies...") dependencies = index[plugin]["depends"].split(",") #Iterate through dependencies for dependency in dependencies: #Install from index if available if index.has_section(dependency): with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) install(dependency) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Dependancy already installed, do nothing elif installed.has_section(dependency): print("Dependancy already satisfied") elif dependency.startswith("pypi:"): try: subprocess.check_call([sys.executable, "-m", "pip","install", "-q", dependency[5:]]) except: print("Dependency not unsatisfyable: " + dependency) return elif dependency != "none": print("Dependency unsatisfyable: " + dependency) return else: pass #Download plugin if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" download(index[plugin]["download"], location + "/" + index[plugin]["filename"]) #Mark plugin as installed from index installed[plugin] = index[plugin] installed[plugin]["source"] = "index" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) except Exception as e: print("Could not download file: " + str(e)) pass #Verify plugin against hash stored in index print("Verifying...") if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: print("File hash: " + hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256")) print("Expected: " + index[plugin]["hash"]) print("Package verification failed, the plugin should be reinstalled.") installed[plugin]["verified"] = "false" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) else: print("Package verification passed") installed[plugin]["verified"] = "true" #Plugin is not installed, install it elif index.has_section(plugin): print("Downloading " + plugin + "...") try: print("Installing dependencies...") dependencies = index[plugin]["depends"].split(",") #Iterate through dependencies for dependency in dependencies: #Install dependency from index if available if index.has_section(dependency): with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) install(dependency) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Dependancy already installed, do nothing elif installed.has_section(dependency): print("Dependancy already satisfied") #Dependency not satisfyable, abort elif dependency.startswith("pypi:"): try: subprocess.check_call([sys.executable, "-m", "pip","install", "-q", dependency[5:]]) except: print("Dependency not unsatisfyable: " + dependency) return elif dependency != "none": print("Dependency unsatisfyable: " + dependency) return else: pass #Download plugin if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" download(index[plugin]["download"], location + "/" + index[plugin]["filename"]) #Mark plugin as installed installed[plugin] = index[plugin] installed[plugin]["source"] = "index" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) except Exception as e: print("Could not download file: " + str(e)) pass #Check plugin against hash in index print("Verifying...") if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: print("File hash: " + hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256")) print("Expected: " + index[plugin]["hash"]) print("Packages verification failed, the plugin should be reinstalled.") installed[plugin]["verified"] = "false" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) else: print("Package verification passed") installed[plugin]["verified"] = "true" with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) #Plugin could not be found else: print("Packages " + plugin + " not found.") with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f)
sys.exit( "Command line syntax: checksum.py [filelistname] [starting directory]") if (len(sys.argv) == 1): exit_message() #settings filelist = sys.argv[1] if (len(sys.argv) == 3): rootDir = sys.argv[2] else: rootDir = '.' files = [] #walking script for dirName, subdirList, fileList in os.walk(rootDir): for fname in fileList: #creating correct formatting for folder path with pathlib data_folder = Path(dirName) #correct path format for fileChecksum current_file = ((data_folder / fname).resolve()).as_posix() #generating checksum with sha256 try: h = hasher.fileChecksum(str(current_file), "sha256") except: print('Checksum counting error') #adding new values to array files.append((str((data_folder.resolve()).as_posix()), fname, h)) #putting results on screen print(str((data_folder.resolve()).as_posix()) + ' ' + fname + ' ' + h) #dumping data with pickle pickle.dump(files, open(filelist, "wb"))
def guiStoreInfo(): d = Dialog() d.add_persistent_args(["--title", "Generate Store Info"]) #Get plugin choices = [] pluginlist = plugins(False, True) for i in range(len(pluginlist)): choices.append((pluginlist[i], "")) if len(choices) == 0: choices = [("No Plugins Are Installed", "")] resp = d.menu("Choose plugin", choices=choices) if resp[0] == d.OK: if resp[1] == "No Plugins Are Installed": clear() return else: #Continue Asking name = "" while name == "": name = d.inputbox("Plugin Name (No Spaces)")[1].replace( " ", "_") if resp[1].endswith(".iitheme"): type = "themes" elif resp[1].endswith(".py"): type = "plugins" description = "\n" while description == "\n": description = d.editbox_str( "", title="Plugin Description")[1].rstrip() version = "" while version == "": version = d.inputbox("Plugin Version")[1] maintainer = "" while maintainer == "": maintainer = d.inputbox("Maintainer Email Address")[1] link = "" while link == "": link = d.inputbox( "Direct Download Link (Please use GitHub or GitLab for hosting)" )[1] summary = "" while summary == "": summary = d.inputbox("Plugin Summary")[1] if type == "plugins": reqs = getReqs(resp[1]) depends = d.editbox_str( reqs, title= "Dependancies separated by line breaks. Start PiPI dependancies with \'pipy:\'" )[1] depends = depends.replace("\n", ",") depends = depends.rstrip(",") lastUpdate = time.time() hash = hs.fileChecksum(type + "/" + resp[1], "sha256") clear() print("[" + name + "]") print("description = " + description) print("maintainer = " + maintainer) print("version = " + version) print("download = " + link) print("hash = " + hash) print("lastupdate = " + str(time.time())) print("summary = " + summary) print("filename = " + resp[1]) if not depends == "": print("depends = " + depends) print("rating = 5") print("ratings = 0") print("type = " + type) else: clear() return
def main(): path = '../../../Desktop' # Directory in local computer """Shows basic usage of the Drive v3 API. Prints the names and ids of the first 10 files the user has access to. """ creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server() # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) service = build('drive', 'v3', credentials=creds) # Call the Drive v3 API results = service.files().list( pageSize=1000, fields="nextPageToken, files(parents, name, id, modifiedTime, mimeType, md5Checksum)").execute() items = results.get('files', []) currentTimesinceEpoc = time.time() currentday = time.strftime('%d', time.localtime(currentTimesinceEpoc)) currentmonth = time.strftime('%m', time.localtime(currentTimesinceEpoc)) currentyear = time.strftime('%Y', time.localtime(currentTimesinceEpoc)) currentdate = dt.datetime(int(currentyear),int(currentmonth), int(currentday),0,0,0) folderModifTimes = {} if not items: print('No files found.') else: '''location heirarchy for Drive files''' for item in items: modifyear, modifmonth, modifday = item['modifiedTime'].split('-') modifday = modifday.split('T')[0] modifdate = dt.datetime(int(modifyear),int(modifmonth), int(modifday),0,0,0) modificationTimesinceEpoc = str(currentTimesinceEpoc - (currentdate-modifdate).total_seconds()) item ['modificationTimesinceEpoc'] = modificationTimesinceEpoc if item.get('parents') != None and len(item.get('parents')) >1: #more tham 1 parent not handled print("many parents") if item.get('parents') == None: if item['mimeType'] == 'application/vnd.google-apps.folder': parentname = 'Drive' parentid = 'drive' else: parentname = 'Parent-less Files' parentid = 'parentlessfiles' else: parentname = (service.files().get(fileId=item.get('parents')[0]).execute())['name'] parentid = item.get('parents')[0] if folderModifTimes.get(parentid) == None or float(folderModifTimes[parentid]) < float(modificationTimesinceEpoc): folderModifTimes[parentid] = modificationTimesinceEpoc item['parentid'] = parentid item['parentname'] = parentname new_items = [] print('location' + '^' + 'dir' + 'root' + '^' + 'Root' + '^' + str(currentTimesinceEpoc) + '^' + 'dir'+os.path.abspath(path)+ '^' + 'Desktop' + '^' + str(currentTimesinceEpoc)) print('location' + '^' + 'dir' + 'root' + '^' + 'Root' + '^' + str(currentTimesinceEpoc) + '^' + 'dir'+'https://drive.google.com/open?id=' +'drive'+ '^' + 'Drive' + '^' + str(currentTimesinceEpoc)) print('location' + '^' + 'dir' + 'https://drive.google.com/open?id=' +'drive' + '^' + 'Drive' + '^' + str(currentTimesinceEpoc) + '^' + 'dir'+'https://drive.google.com/open?id=' +'parentlessfiles'+ '^' + 'Parent-less Files' + '^' + folderModifTimes['parentlessfiles']) for item in items: if item['mimeType'] == 'application/vnd.google-apps.folder': #if a folder if folderModifTimes.get(item['id']) == None: print('location' + '^' + 'dir' + 'https://drive.google.com/open?id=' +item['parentid'] + '^' + item['parentname'] + '^' + folderModifTimes[item['parentid']] + '^' + 'dir'+'https://drive.google.com/open?id=' +item['id']+ '^' + item['name'] + '^' + item ['modificationTimesinceEpoc']) else: print('location' + '^' + 'dir' + 'https://drive.google.com/open?id=' +item['parentid'] + '^' + item['parentname'] + '^' + folderModifTimes[item['parentid']] + '^' + 'dir'+'https://drive.google.com/open?id=' +item['id']+ '^' + item['name'] + '^' + folderModifTimes[item['id']]) else: #if not a folder new_items.append(item) #further links would only be between files print('location' + '^' + 'dir' +'https://drive.google.com/open?id=' + item['parentid'] + '^' + item['parentname'] + '^' + folderModifTimes[item['parentid']] + '^' +'https://drive.google.com/open?id=' +item['id']+ '^' + item['name'] + '^' + item ['modificationTimesinceEpoc']) items = new_items for r, d, f in os.walk(path): f = [file for file in f if not (file[0] == '.' or file[0] == '_')] d[:] = [dr for dr in d if not (dr[0] == '.' or dr[0] == '_')] for file in f: filepath = os.path.join(r, file) filepath2 = filepath.split('/') filename = filepath2[-1] filemtime = str(os.path.getmtime(os.path.abspath(filepath))) mime = magic.Magic(mime=True) mimeType = mime.from_file(os.path.abspath(filepath)) md5Checksum = hs.fileChecksum(os.path.abspath(filepath), "md5") item = {'id': os.path.abspath(filepath),'name' :filename, 'modificationTimesinceEpoc': filemtime, 'mimeType': mimeType, 'md5Checksum': md5Checksum } items.append(item) with open('email_metadata', 'r') as f: attachments = f.read().split('\n') for attachment in attachments: if len(attachment.split("^"))==1: break attachment_id, attachment_name, attachment_mimeType, attachment_md5Checksum, attachment_mtime = attachment.split("^") item = {'id': attachment_id+'~'+attachment_name,'name' : attachment_name, 'modificationTimesinceEpoc': attachment_mtime, 'mimeType': attachment_mimeType, 'md5Checksum': attachment_md5Checksum } items.append(item) ''' same hash''' for f1 in items: for f2 in items: if f1['id']!=f2['id'] and f1.get('md5Checksum') != None and f2.get('md5Checksum') != None and f1.get('md5Checksum') == f2.get('md5Checksum'): f1mtime = f1['modificationTimesinceEpoc'] f2mtime = f2['modificationTimesinceEpoc'] print('content' + '^'+ f1['id'] + '^' + f1['name'] + '^' + f1mtime + '^'+ f2['id'] + '^' + f2['name'] + '^' + f2mtime) '''content similarity''' text_files = [] '''scanning the directory''' for f in items: filepath = f['id'] file_type = f['mimeType'] major_minor = file_type.split('/') if major_minor[0] == 'text' or file_type == 'application/vnd.google-apps.document': text_files.append(f) documents =[] os.mkdir('DriveTextFiles') #To temporarily store drive text files for f in text_files: try: documents.append((open(f['id']).read(),f)) except FileNotFoundError: # downloading drive text files try : file_id = f['id'] if f['mimeType'] == 'application/vnd.google-apps.document': request = service.files().export_media(fileId=file_id, mimeType='text/plain') else: request = service.files().get_media(fileId=file_id) fh = io.FileIO(os.path.join('DriveTextFiles',f['id']), 'wb') downloader = googleapiclient.http.MediaIoBaseDownload(fh, request) done = False while done is False: status, done = downloader.next_chunk() documents.append((open(os.path.join('DriveTextFiles',f['id'])).read(),f)) except (googleapiclient.errors.HttpError, FileNotFoundError): pass lsh = MinHashLSH(threshold=0.3, num_perm=128) for f in documents: setdoc = set(f[0].split()) m = MinHash(num_perm=128) for d in setdoc: m.update(d.encode('utf8')) lsh.insert(f[1]['id'] + '^' +f[1]['name']+ '^' +f[1]['modificationTimesinceEpoc'], m) results = [] for doc in documents: setdoc = set(doc[0].split()) m = MinHash(num_perm=128) for d in setdoc: m.update(d.encode('utf8')) result = lsh.query(m) results.append((doc[1]['id'] + '^' +doc[1]['name']+ '^' +doc[1]['modificationTimesinceEpoc'],result)) '''forming links between files with similar content''' for result in results: f2mtime = result[0].split('^')[2] for r in result[1]: if r!=result[0]: f1mtime = r.split('^')[2] print('content' + '^'+ r.split('^')[0] + '^' + r.split('^')[1] + '^' + f1mtime +'^'+ result[0].split('^')[0] + '^' + result[0].split('^')[1] + '^' + f2mtime) shutil.rmtree('DriveTextFiles') '''name similarity''' for f1 in items: for f2 in items: distance1 = textdistance.jaro.distance(f1['name'],f2['name']) distance2 = textdistance.levenshtein.distance(f1['name'],f2['name']) if ((distance1<=0.30 and f1['id']!=f2['id'] and distance2<0.75*min(len(f1['name']),len(f2['name']),8)) or distance1<=0.15 or distance2<=0.25*min(len(f1['name']),len(f2['name']),8)) and f1['id']!=f2['id']: f1mtime = f1['modificationTimesinceEpoc'] f2mtime = f2['modificationTimesinceEpoc'] print('name' + '^'+ f1['id'] + '^' + f1['name'] + '^' + f1mtime +'^' + f2['id'] + '^' + f2['name'] + '^' + f2mtime) '''time similarity''' file_threshhold = 10 filetimes = [] i = 0 for item in items: filetimes.append([float(item['modificationTimesinceEpoc'])]) i = i+1 kmeans = KMeans(n_clusters=int(i/file_threshhold) +1, random_state=0).fit(filetimes) labels = kmeans.labels_ for j in range(int(i/file_threshhold)+1) : #iterating through all clusters idx = [] for i in range(0, len(labels)) : if labels[i] == j : idx.append(i) filesj = [items[i] for i in idx] #all the files in a cluster #forming similar time links for f1 in filesj: for f2 in filesj: if f1['id']!=f2['id'] : f1mtime = f1['modificationTimesinceEpoc'] f2mtime = f2['modificationTimesinceEpoc'] print('time' + '^'+ f1['id'] + '^' + f1['name'] + '^' + f1mtime + '^'+ f2['id'] + '^' + f2['name'] + '^' + f2mtime)
def installFromFile(file): #Copy the file to an ini file so configparser doesn't get mad copyfile(file, config["paths"]["userPath"] + "/.pluginstore/installer.ini") #Read index try: index = configparser.ConfigParser() index.read(config["paths"]["userPath"] + "/.pluginstore/index.ini") #Index not found, suggest running pm.update() except: print("Could not find package list, maybe run pm.update()") return #Load locak file icpk = configparser.ConfigParser() icpk.read(config["paths"]["userPath"] + "/.pluginstore/installer.ini") #Set dependencies to none if dependencies is not set in the file try: dependencies = icpk[plugin]["depends"] except: icpk[plugin]["depends"] = "none" #Check if installed list exists if os.path.exists(config["paths"]["userPath"] + "/.pluginstore/installed.ini"): installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #If not, create it else: with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as installedFile: installedFile.close() installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Iterate through all plugins in the file for plugin in icpk.sections(): print("Installing dependencies...") #Split dependencies into a list dependencies = icpk[plugin]["depends"].split(",") for dependency in dependencies: #Install dependecy from index if available if index.has_section(dependency): install(dependency) #Dependancy already installed, do nothing elif installed.has_section(dependency): print("Dependancy already satisfied") #Dependancy could not be found, abort elif dependency != "none": print("Dependency unsatisfyable: " + dependency) return else: pass print("Installing " + plugin + "...") try: if icpk[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif icpk[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" download(icpk[plugin]["download"], location + "/" + icpk[plugin]["filename"], pbarEnable=True) installed[plugin] = icpk[plugin] installed[plugin]["source"] = "icpk" print("Verifying...") if not hs.fileChecksum(location + "/" + icpk[plugin]["filename"], "sha256") == icpk[plugin]["hash"]: installed[plugin]["verified"] = "false" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) else: installed[plugin]["verified"] = "true" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) except Exception as e: print("Unable to download " + plugin + ": " + str(e))
def install(plugin, prompt=False): #update(silent=True) #Load index, if available try: index = configparser.ConfigParser() index.read(config["paths"]["userPath"] + "/.pluginstore/index.ini") #If not, suggest running pm.update() except: print("Could not find package list, maybe run pm.update()") #Load installed list if available #print(config.sections()) if os.path.exists(config["paths"]["userPath"] + "/.pluginstore/installed.ini"): installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #If not, create it else: with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as installedFile: installedFile.close() installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Set verified to none if it is not set in the installed list try: verified = installed[plugin]["verified"] except: verified = "none" #Set dependencies to none if it is not set in the installed file try: dependencies = index[plugin]["depends"] except: index[plugin]["depends"] = "none" #Plugin is already installed if installed.has_section(plugin) and not verified == "false": #Newer version available, update if float(index[plugin]["lastUpdate"]) > float( installed[plugin]["lastUpdate"]): if prompt == True: if input(plugin + " has an update available. Update it? [y/N] " ).lower() != "y": return print("Updating " + plugin + "...") # Calculator version testing for plugin compatibilities #Parse calcversion to find the operator and version calcversion = index[plugin]["calcversion"] #Get current calculator version with open(config["paths"]["systemPath"] + "/version.txt") as f: currentversion = f.read().strip() #check to see if the current version of the calculator satisfys plugin required version if not currentversion in Requirement.parse("iicalc" + calcversion): if input( "The plugin " + plugin + " is meant for version " + calcversion + " but you\'re using version " + currentversion + " of the calculator so it may misbehave. Download anyway? [Y/n] " ).lower() == "n": return False try: print("Installing dependencies...") dependencies = index[plugin]["depends"].split(",") #Iterate through dependencies for dependency in dependencies: with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Install from index if available if index.has_section(dependency): with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) install(dependency) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Dependancy already installed, do nothing elif installed.has_section(dependency): print("Dependancy already satisfied") elif dependency.startswith("pypi:"): try: subprocess.check_call([ sys.executable, "-m", "pip", "install", "-q", dependency[5:] ]) except: print("Dependency unsatisfyable: " + dependency) return elif dependency != "none": print("Dependency unsatisfyable: " + dependency) return else: pass installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" download(index[plugin]["download"], location + "/" + index[plugin]["filename"], pbarEnable=True) installed[plugin] = index[plugin] installed[plugin]["source"] = "index" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) except Exception as e: print("Could not download file: " + str(e)) pass #Verify plugin against hash in index print("Verifying...") if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: print( "Package verification failed, the package should be reinstalled." ) installed[plugin]["verified"] = "false" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) else: print("Package verification passed") installed[plugin]["verified"] = "true" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) #No updates available, nothing to do else: print(plugin + " is already installed and has no update available") #Plugin has failed verification, reinstall it elif verified != "true" and installed.has_section(plugin): if prompt == True: if input( plugin + " is damaged and should be reinstalled. Install it? [y/N] " ).lower() != "y": return print("Redownloading damaged package " + plugin + "...") # Calculator version testing for plugin compatibilities #Parse calcversion to find the operator and version calcversion = index[plugin]["calcversion"] #Get current calculator version with open(config["paths"]["systemPath"] + "/version.txt") as f: currentversion = f.read().strip() #check to see if the current version of the calculator satisfys plugin required version if not currentversion in Requirement.parse("iicalc" + calcversion): if input( "The plugin " + plugin + " is meant for version " + calcversion + " but you\'re using version " + currentversion + " of the calculator so it may misbehave. Download anyway? [Y/n] " ).lower() == "n": return False try: print("Installing dependencies...") dependencies = index[plugin]["depends"].split(",") #Iterate through dependencies for dependency in dependencies: #Install from index if available if index.has_section(dependency): with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) install(dependency) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Dependancy already installed, do nothing elif installed.has_section(dependency): print("Dependancy already satisfied") elif dependency.startswith("pypi:"): try: subprocess.check_call([ sys.executable, "-m", "pip", "install", "-q", dependency[5:] ]) except: print("Dependency not unsatisfyable: " + dependency) return elif dependency != "none": print("Dependency unsatisfyable: " + dependency) return else: pass #Download plugin if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" download(index[plugin]["download"], location + "/" + index[plugin]["filename"], pbarEnable=True) #Mark plugin as installed from index installed[plugin] = index[plugin] installed[plugin]["source"] = "index" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) except Exception as e: print("Could not download file: " + str(e)) pass #Verify plugin against hash stored in index print("Verifying...") if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: print("File hash: " + hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256")) print("Expected: " + index[plugin]["hash"]) print( "Package verification failed, the plugin should be reinstalled." ) installed[plugin]["verified"] = "false" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) else: print("Package verification passed") installed[plugin]["verified"] = "true" #Plugin is not installed, install it elif index.has_section(plugin): if prompt == True: if input("Install " + plugin + "? [y/N] ").lower() != "y": return print("Downloading " + plugin + "...") # Calculator version testing for plugin compatibilities #Parse calcversion to find the operator and version calcversion = index[plugin]["calcversion"] #Get current calculator version with open(config["paths"]["systemPath"] + "/version.txt") as f: currentversion = f.read().strip() #check to see if the current version of the calculator satisfys plugin required version if not currentversion in Requirement.parse("iicalc" + calcversion): if input( "The plugin " + plugin + " is meant for version " + calcversion + " but you\'re using version " + currentversion + " of the calculator so it may misbehave. Download anyway? [Y/n] " ).lower() == "n": return False try: print("Installing dependencies...") dependencies = index[plugin]["depends"].split(",") #Iterate through dependencies for dependency in dependencies: #Install dependency from index if available if index.has_section(dependency): with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) install(dependency) installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #Dependancy already installed, do nothing elif installed.has_section(dependency): print("Dependancy already satisfied") #Dependency not satisfyable, abort elif dependency.startswith("pypi:"): try: subprocess.check_call([ sys.executable, "-m", "pip", "install", "-q", dependency[5:] ]) except: print("Dependency not unsatisfyable: " + dependency) return elif dependency != "none": print("Dependency unsatisfyable: " + dependency) return else: pass #Download plugin if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" download(index[plugin]["download"], location + "/" + index[plugin]["filename"], pbarEnable=True) #Mark plugin as installed installed[plugin] = index[plugin] installed[plugin]["source"] = "index" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) except Exception as e: print("Could not download file: " + str(e)) pass #Check plugin against hash in index print("Verifying...") if not hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: print("File hash: " + hs.fileChecksum(location + "/" + index[plugin]["filename"], "sha256")) print("Expected: " + index[plugin]["hash"]) print( "Packages verification failed, the plugin should be reinstalled." ) installed[plugin]["verified"] = "false" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) else: print("Package verification passed") installed[plugin]["verified"] = "true" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) #Plugin could not be found else: print("Packages " + plugin + " not found.") with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f)
'WORKER') # or '0x748e091bf16048cb5103E0E10F9D5a8b7fBDd860' keyfile = os.environ.get('KEYFILE') or '/app/priv_key' if not taskid: raise ValueError('Missing TASKID') if not w3.isAddress(worker): raise ValueError('Invalid worker address') worker = w3.toChecksumAddress(worker) print("Genrating result and consensus.iexec in /iexec ...") shutil.copy("/app/result.txt", "/iexec/result.txt") shutil.copy("/app/result.txt", "/iexec/consensus.iexec") with open(keyfile) as f: private = f.read().splitlines()[0] digest = "0x" + fileChecksum("/iexec/consensus.iexec", "sha256") # hexstring hash = w3.soliditySha3(['bytes32', 'bytes32'], [taskid, digest]) seal = w3.soliditySha3(['address', 'bytes32', 'bytes32'], [worker, taskid, digest]) contrib = w3.soliditySha3(['bytes32', 'bytes32'], [hash, seal]) message = w3.soliditySha3(['bytes'], [b'\x19Ethereum Signed Message:\n32' + contrib]) signature = w3.eth.account.signHash(message, private) with open("/iexec/enclaveSig.iexec", 'w') as f: json.dump( { 'digest': digest, 'hash': w3.toHex(hash), 'seal': w3.toHex(seal), 'sign': {
#For checking if the index file uploaded is correct import sys from py_essentials import hashing as hs args = sys.argv uploadedFile = args[1] correctIndexFile = "/opt/lampp/htdocs/examples/server/Hash/finalHash.txt" hashUploadedFile = hs.fileChecksum(uploadedFile, "sha256") # hashCorrectFile = hs.fileChecksum(correctIndexFile, "sha256") #Check if the hash is same as the string in the file File_object = open(correctIndexFile, "r") finalHash = File_object.read() File_object.close() if (hashUploadedFile == finalHash): print("The uploaded index File is correct") else: print("The uploaded index File is incorrect")
def update(silent=False, theme=theme): global done done = False t = threading.Thread(target=loading, args=("Updating package list...", )) t.start() if not os.path.isdir(config["paths"]["userPath"] + "/.pluginstore"): os.makedirs(config["paths"]["userPath"] + "/.pluginstore") try: download("https://turbowafflz.azurewebsites.net/iicalc/plugins/index", config["paths"]["userPath"] + "/.pluginstore/index.ini", pbarEnable=True) except KeyboardInterrupt: done = True return with open(config["paths"]["userPath"] + "/.pluginstore/index.ini") as f: tmp = f.readlines() if "The service is unavailable." in tmp: print( theme["styles"]["error"] + "\nThe index is currently unavailable due to a temporary Microsoft Azure outage. Please try again later." ) done = True return #Load index, if available try: index = configparser.ConfigParser() index.read(config["paths"]["userPath"] + "/.pluginstore/index.ini") #If not, suggest running pm.update() except: print("\nCould not find package list, maybe run pm.update()") #Load installed list if available if os.path.exists(config["paths"]["userPath"] + "/.pluginstore/installed.ini"): installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") #If not, create it else: with open(config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as installedFile: installedFile.close() installed = configparser.ConfigParser() installed.read(config["paths"]["userPath"] + "/.pluginstore/installed.ini") updates = 0 reinstall = 0 #Iterate through installed plugins for plugin in installed.sections(): if index[plugin]["type"] == "plugins": location = config["paths"]["userPath"] + "/plugins/" elif index[plugin]["type"] == "themes": location = config["paths"]["userPath"] + "/themes/" else: print("Error installing plugin: Invalid type") return "error" #Make sure plugin file exists if os.path.exists(location + "/" + installed[plugin]["filename"]): #Check if an update is available if float(index[plugin]["lastUpdate"]) > float( installed[plugin]["lastUpdate"]) and not silent: updates = updates + 1 print("\nAn update is available for " + plugin) #Verify plugin against the hash stored in the index elif not hs.fileChecksum( location + "/" + index[plugin]["filename"], "sha256") == index[plugin]["hash"]: installed[plugin]["verified"] = "false" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) #Warn if plugin is marked as damaged if installed[plugin]["verified"] != "true" and not silent: reinstall = reinstall + 1 print("\n" + plugin + " is damaged and should be reinstalled") #Plugin missing, mark as unverified if not disabled elif not os.path.exists(location + "/" + installed[plugin]["filename"] + ".disabled"): print("File not found: " + location + "/" + installed[plugin]["filename"]) print("\n" + plugin + " is missing and needs to be reinstalled") reinstall = reinstall + 1 installed[plugin]["verified"] = "false" with open( config["paths"]["userPath"] + "/.pluginstore/installed.ini", "w+") as f: installed.write(f) #Print summary for user if not silent: print("") print("\n" + str(updates) + " packages have updates available") print( str(reinstall) + " packages are damaged and should be reinstalled") if updates > 0 or reinstall > 0 and not silent: print("Run 'pm.upgrade()' to apply these changes") done = True