def placeArtifact(artifact_file, repo_dirname, org, module, revision, status="release", meta={}, deps=[], supplied_ivy_file=None, scala=None, override=None, override_dir_only=False): if scala is not None: module = module + "_%s" % scala jarmodule = module if override is not None: org, module = override if not override_dir_only: jarmodule = module repo_dir = realpath(repo_dirname) artifact_dir = pathjoin(*[repo_dir] + [org] + [module, revision]) ivyxml_path = pathjoin(artifact_dir, "ivy.xml") artifact_repo_path = pathjoin(artifact_dir, "%s-%s.jar" % (jarmodule, revision)) if not pathexists(artifact_dir): makedirs(artifact_dir) ivyxml_file = open(ivyxml_path, "w") if supplied_ivy_file is None: writeIvyXml(org, module, revision, status, ivyxml_file, meta=meta, deps=deps) else: copyfile(supplied_ivy_file, ivyxml_path) if pathexists(artifact_repo_path): rmfile(artifact_repo_path) symlink(artifact_file, artifact_repo_path)
def gen_svn_info(self,cmd): tmp_file = '.tmp_file' subprocess.call('svn '+cmd+' > '+tmp_file,shell=True)#,'>'+self.svn_diff_file]) f=open(tmp_file,'r') svndata=f.read().split('\n') f.close() if bool(self.svn_info): self.svn_info=[] if not bool(self.svn_info[cmd]): del self.svn_info[cmd] self.svn_info[cmd]={} for l in svndata: l=l.strip() if ':' in l: a=l[0:l.index(':')] b=l[l.index(':')+2:] a=a.replace(' ','_') b=b.replace(' ','_') try: self.svn_info[cmd][a]=b except: pass if not bool(self.svn_info[cmd]): os.rmfile(self.tmp_file) else: print(' DID NOT YEILD ANY INFORMATION'.format(cmd))
def checkfile(filename): try: # OSError if file exists or is invalid with open(filename, 'x') as tempfile: pass rmfile(filename) return True except OSError: return False
def _update_ig_config(resource_dicts, ig_resource_dict, add=True, rm_file=False): """ Helper for update_ig_config """ # Collect resource ids from the input set of resources resource_set = { f'{r["content"]["resourceType"]}/{r["content"]["id"]}' for r in resource_dicts } # Reformat IG resource list into a dict so its easier to update ig_resource = ig_resource_dict["content"] resources_dict = {} for r in ig_resource["definition"]["resource"]: # Only include resources from IG config that have corresponding filess # Old IG entries will be discarded key = r["reference"]["reference"] if key in resource_set: resources_dict[key] = r else: logger.info(f"🔥 Removing old entry {key} from IG") for rd in resource_dicts: if rd["resource_type"] == "ImplementationGuide": continue # Create the config entry entry = _create_resource_config(rd, ig_resource.get("publisher")) # Add/remove configuration entries if add: resources_dict[entry["reference"]["reference"]] = entry else: del rd[entry["reference"]["reference"]] if rm_file: os.rmfile(rd["filepath"]) logger.info(f'🗑 Deleted resource file {rd["filepath"]}') logger.info(f'☑️ Added IG configuration for {rd["filename"]}') # Format resource dict back to original list ig_resource["definition"]["resource"] = [ resources_dict[k] for k in resources_dict ] write_json(ig_resource_dict["content"], ig_resource_dict["filepath"], indent=2)
def remove(f): try: if exists(f): if isdir(f): return rmdir(f, ignore_errors=True) else: return rmfile(f) except Exception as err: log('remove', f, err) pass return False
def get_options(): ''' subroutine to capture the optional inputs ''' from os import remove as rmfile from optparse import OptionParser, OptionGroup parser = OptionParser(usage="Usage: %prog [options] <station list filename>", \ description="Program to query a datacenter using the obspy fdsn client. " \ "All station returned in this query are saved into both a csv format 1sls " \ "file as well as a stationdb (stdb.StDbElement) pickled dictionary. The input " \ "argument, <station file name> is the prefix for the output file, which is by " \ "default <station file name>.csv and <station file name>.pkl.") # General Settings parser.add_option("-D", "--debug", action="store_true", dest="debug", default=False, \ help="Debug mode. After the client query is complete (and successful), instead of " \ "parsing the inventory, it is instead pickled to <station file name>_query_debug.pkl " \ "which can be loaded in ipython to examine manually.") parser.add_option("--long-keys", action="store_true", dest="lkey", default=False, \ help="Specify Key format. Default is Net.Stn. Long keys are Net.Stn.Chn") parser.add_option("-a", "--ascii", action="store_false", dest="use_binary", default=True, \ help="Specify to write ascii Pickle files instead of binary. Ascii are larger file size, " \ "but more likely to be system independent.") # Server Settings ServerGroup = OptionGroup(parser, title="Server Settings", description="Settings associated with " \ "which datacenter to log into.") ServerGroup.add_option("--Server", action="store", type=str, dest="Server", default="IRIS", \ help="Specify the server to connect to. Options include: BGR, ETH, GEONET, GFZ, INGV, IPGP, " \ "IRIS, KOERI, LMU, NCEDC, NEIP, NERIES, ODC, ORFEUS, RESIF, SCEDC, USGS, USP. [Default IRIS]") ServerGroup.add_option("--User-Auth", action="store", type=str, dest="UserAuth", default="", \ help="Enter your IRIS Authentification Username and Password (--User-Auth='username:authpassword') " \ "to access and download restricted data. [Default no user and password]") # Selection Settings SelectGroup = OptionGroup(parser, title="Channel Priority/Selection Settings", description="Settings " \ "associated with selecting the channels to retain.") SelectGroup.add_option("--channel-rank", action="store", type=str, dest="chnrank", default="HH,BH,LH", \ help="If requesting more than one type of channel, specify a comma separated list of the first two " \ "lettres of the desired components to retain. Default is HH > BH > LH : ['HH,BH,LH']") # Channel Settings ChannelGroup=OptionGroup(parser, title="Station-Channel Settings", description="Options to narrow down " \ "the specific channels based on network, station, etc") ChannelGroup.add_option("-N","--networks", action="store", type=str, dest="nets", default="*", \ help="Specify a comma separated list of network codes to search for [Default *]") ChannelGroup.add_option("-S","--stations", action="store", type=str, dest="stns", default="*", \ help="Specify a comma separated list of station names. If you want wildcards, enclose in quotes [Default *]") ChannelGroup.add_option("-L","--locations", action="store", type=str, dest="locs", default="*", \ help="Specify a comma separated list of location codes. If you want wildcards, enclose in quotes [Default *]") ChannelGroup.add_option("-C","--channels", action="store", type=str, dest="chns", default="HH*,BH*,LH*", \ help="Specify a comma separated, wildcarded list of channel names. [Default HH*,BH*,LH*]") # Geographic Settings BoxGroup = OptionGroup(parser, title="Geographic Lat/Lon Box Search", description="Define the coordinates " \ "of a lat/lon box in which to select stations. If filled out, takes precedence over values for " \ "Radius Search (below).") BoxGroup.add_option("--minlat","--min-latitude", action="store", type="float", dest="minlat", default=None, \ help="Specify minimum latitude to search (must specify all of minlat, maxlat, minlon, maxlon).") BoxGroup.add_option("--maxlat","--max-latitude", action="store", type="float", dest="maxlat", default=None, \ help="Specify maximum latitude to search (must specify all of minlat, maxlat, minlon, maxlon).") BoxGroup.add_option("--minlon","--min-longitude", action="store", type="float", dest="minlon", default=None, \ help="Specify minimum longitude to search (must specify all of minlat, maxlat, minlon, maxlon).") BoxGroup.add_option("--maxlon","--max-longitude", action="store", type="float", dest="maxlon", default=None, \ help="Specify maximum longitude to search (must specify all of minlat, maxlat, minlon, maxlon).") RadGroup=OptionGroup(parser, title="Geographic Radius Search", description="Central point and min/max " \ "radius search settings. Box Search Settings take precedence over radius search.") RadGroup.add_option("--lat","--latitude", action="store", type="float", dest="lat", default=None, \ help="Specify a Lat (if any of --lon --min-radius and --max-radius are empty, an error will prompt).") RadGroup.add_option("--lon","--longitude", action="store", type="float", dest="lon", default=None, \ help="Specify a Lon (if any of --lat --min-radius and --max-radius are empty, an error will prompt).") RadGroup.add_option("--minr","--min-radius", action="store", type="float", dest="minr", default=0., \ help="Specify a minimum search radius (in degrees) around the point defined by --lat and --lon " \ "(if any of --lat --lon and --max-radius are empty, an error will prompt). [Default 0. degrees]") RadGroup.add_option("--maxr","--max-radius", action="store", type="float", dest="maxr", default=None, \ help="Specify a maximum search radius (in degrees) around the point defined by --lat and --lon " \ "(if any of --lat --lon and --min-radius are empty, an error will prompt).") # Temporal Settings FixedRangeGroup = OptionGroup(parser, title="Fixed Time Range Settings", description="Find all stations " \ "operating within the start and end date/time. If either are filled out, they take precedence over " \ "Non-Specific time range search (below)") FixedRangeGroup.add_option("--start","--start-date", action="store", type=None, dest="stdate", default=None, \ help="Specify the Start Date/Time in a UTCDateTime compatible String (ie, 2010-01-15 15:15:45.2). [Default Blank]") FixedRangeGroup.add_option("--end","--end-date", action="store", type=None, dest="enddate", default=None, \ help="Specify the End Date/Time in a UTCDateTime compatible String (ie, 2010-01-15 15:15:45.2). [Default Blank]") VarRangeGroup = OptionGroup(parser, title="Non-Specific Time Range Settings", description="Time settings " \ "with less specificity. Ensure that those you specify do not interfere with each other. If above Fixed " \ "Range values are set, they will take precedence over these values.") VarRangeGroup.add_option("--start-before", action="store", type=None, dest="stbefore", default=None, \ help="Specify a Date/Time which stations must start before (must be UTCDateTime compatible string, " \ "ie 2010-01-15 15:15:45.2). [Default empty]") VarRangeGroup.add_option("--start-after", action="store", type=None, dest="stafter", default=None, \ help="Specify a Date/Time which stations must start after (must be UTCDateTime compatible string, " \ "ie 2010-01-15 15:15:45.2). [Default empty]") VarRangeGroup.add_option("--end-before", action="store", type=None, dest="endbefore", default=None, \ help="Specify a Date/Time which stations must end before (must be UTCDateTime compatible string, " \ "ie 2010-01-15 15:15:45.2). [Default empty]") VarRangeGroup.add_option("--end-after", action="store", type=None, dest="endafter", default=None, \ help="Specify a Date/Time which stations must end after (must be UTCDateTime compatible string, " \ "ie 2010-01-15 15:15:45.2). [Default empty]") # Add All Groups parser.add_option_group(ServerGroup) parser.add_option_group(SelectGroup) parser.add_option_group(ChannelGroup) parser.add_option_group(BoxGroup) parser.add_option_group(RadGroup) parser.add_option_group(FixedRangeGroup) parser.add_option_group(VarRangeGroup) # Run Parser (opts, args) = parser.parse_args() # Check output file name if len(args) != 1: parser.error("Need station database file") outpref = args[0] if not opts.debug: if exists(outpref + ".csv") and exists(outpref + ".pkl"): print ("Warning: Output Files " + outpref + ".csv and " + outpref + \ ".pkl already exist. These will be overwritten...") rmfile(outpref + ".pkl") rmfile(outpref + ".csv") elif exists(outpref + ".csv"): print("Warning: Output File " + outpref + ".csv already exists. It will be overwritten...") rmfile(outpref + ".csv") elif exists(outpref + ".pkl"): print("Warning: Output File " + outpref + ".pkl already exists. It will be overwritten...") rmfile(outpref + ".pkl") # Parse User Authentification if not len(opts.UserAuth) == 0: tt = opts.UserAuth.split(':') if not len(tt) == 2: parser.errer( "Error: Incorrect Username and Password Strings for User Authentification" ) else: opts.UserAuth = tt else: opts.UserAuth = [] # Parse Channel Rank to List opts.chnrank = opts.chnrank.split(',') # Check Geographic Settings if opts.minlat is not None or opts.maxlat is not None or opts.minlon is not None or opts.maxlon is not None: if opts.minlat is None or opts.maxlat is None or opts.minlon is None or opts.maxlon is None: # Not all value set opts.minlat = None opts.maxlat = None opts.minlon = None opts.maxlon = None print( "Warning: one of minlat,maxlat,minlon,maxlon were not set. All values reset to None. " ) print("") else: # Ensure proper min/max set tempminlat = min([opts.minlat, opts.maxlat]) tempmaxlat = max([opts.minlat, opts.maxlat]) opts.minlat = tempminlat opts.maxlat = tempmaxlat tempminlon = min([opts.minlon, opts.maxlon]) tempmaxlon = max([opts.minlon, opts.maxlon]) opts.minlon = tempminlon opts.maxlon = tempmaxlon print("Performing Geographic Box Search:") print(" LL: {0:9.4f}, {1:8.4f}".format(opts.minlat, opts.minlon)) print(" UR: {0:9.4f}, {1:8.4f}".format(opts.maxlat, opts.maxlon)) print(" ") # set all other box parameters to none opts.minr = None opts.maxr = None opts.lat = None opts.lon = None elif opts.lat is not None or opts.lon is not None or opts.minr is not None or opts.maxr is not None: if opts.lat is None or opts.lon is None or opts.minr is None or opts.maxr is None: opts.lat = None opts.lon = None opts.minr = None opts.maxr = None print( "Warning: one of lat,lon,minr,maxr were not set. All values reset to None. " ) print(" ") else: # Ensure minr/maxr set opts.minr = min([opts.minr, opts.maxr]) opts.maxr = max([opts.minr, opts.maxr]) print("Performing Geographic Radius Search: ") print(" Centre Point: {0:9.4f}, {1:8.4f}".format( opts.lon, opts.lat)) print(" Radius: {0:6.2f} to {1:6.2f} degrees".format( opts.minr, opts.maxr)) print(" ") # Check Time Settings if opts.stdate is not None or opts.enddate is not None: # Use Fixed Range, not other opts.stbefore = None opts.stafter = None opts.endbefore = None opts.endafter = None # Fix End Date if opts.enddate is None: opts.enddate = UTCDateTime("2599-12-31 23:59:59.9") else: opts.enddate = UTCDateTime(opts.enddate) # Assign stdate as UTCDateTime if opts.stdate is not None: opts.stdate = UTCDateTime(opts.stdate) print("Performing Fixed Time Range Search: ") print(" Start: " + opts.stdate.strftime("%Y-%m-%d %H:%M:%S")) print(" End: " + opts.enddate.strftime("%Y-%m-%d %H:%M:%S")) print(" ") else: # No Fixed Range Set. Are other values set? if opts.stbefore is not None or opts.stafter is not None or opts.endbefore is not None or opts.endafter is not None: print("Performing Non-Specific Time Search: ") if opts.stbefore is not None: opts.stbefore = UTCDateTime(opts.stbefore) print(" Start Before: " + opts.stbefore.strftime("%Y-%m-%d %H:%M:%S")) if opts.stafter is not None: opts.stafter = UTCDateTime(opts.stafter) print(" Start After: " + opts.stafter.strftime("%Y-%m-%d %H:%M:%S")) if opts.endbefore is not None: opts.endbefore = UTCDateTime(opts.endbefore) print(" End Before: " + opts.endbefore.strftime("%Y-%m-%d %H:%M:%S")) if opts.endafter is not None: opts.endafter = UTCDateTime(opts.endafter) print(" End After: " + opts.endafter.strftime("%Y-%m-%d %H:%M:%S")) print(" ") else: print("Warning: No Time Range Specified for Search") print(" ") # Station/Channel Search Parameters print("Station/Channel Search Parameters:") print(" Network: {0:s}".format(opts.nets)) print(" Stations: {0:s}".format(opts.stns)) print(" Locations: {0:s}".format(opts.locs)) print(" Channels: {0:s}".format(opts.chns)) print(" Channel Rank: {0:s}".format(",".join(opts.chnrank))) print(" ") if opts.debug: print("Output Files: {0:s}_query_debug.pkl and {0:s}_query_debug.kcsv". format(outpref)) else: print("Output Files: {0:s}.csv and {0:s}.pkl".format(outpref)) print(" ") # Return Values return opts, outpref
# Try to download file urlretrieve( "https://virusshare.com/hashes/VirusShare_{0}.md5".format( str(i).zfill(5)), "newhashes.txt") print("Download success!") print("Appending...") with open("newhashes.txt", "r") as ff: for ii in enumerate(ff.readlines()): if not str(ii[1]).startswith("#"): dbInsert(str(ii[1])) print("DB load Complete!") print("Removing temporary file...") # Remove temporary file rmfile("newhashes.txt") print("Operation for file " + str(i).zfill(5) + " complete.", end="\n") # Catch HTTP response code except HTTPError as e: # Check if code is 404 if e.code == 404: print("File " + str(i).zfill(5) + " not found.") print("Stopping...") break # Otherwise raise an error else: print("An error has occured: Recieved URL response code " + e.code)
def package(self, output=None, keep_box_file=False): if not output: output = "buildserver.box" logger.debug( 'no output name set for packaging \'%s\',' + 'defaulting to %s', self.srvname, output) storagePool = self.conn.storagePoolLookupByName('default') domainInfo = self.conn.lookupByName(self.srvname).info() if storagePool: if isfile('metadata.json'): rmfile('metadata.json') if isfile('Vagrantfile'): rmfile('Vagrantfile') if isfile('box.img'): rmfile('box.img') logger.debug('preparing box.img for box %s', output) vol = storagePool.storageVolLookupByName(self.srvname + '.img') imagepath = vol.path() # TODO use a libvirt storage pool to ensure the img file is readable _check_call([ 'sudo', '/bin/chmod', '-R', 'a+rX', '/var/lib/libvirt/images' ]) shutil.copy2(imagepath, 'box.img') _check_call(['qemu-img', 'rebase', '-p', '-b', '', 'box.img']) img_info_raw = _check_output( ['qemu-img', 'info', '--output=json', 'box.img']) img_info = json.loads(img_info_raw.decode('utf-8')) metadata = { "provider": "libvirt", "format": img_info['format'], "virtual_size": math.ceil(img_info['virtual-size'] / (1024.**3)), } logger.debug('preparing metadata.json for box %s', output) with open('metadata.json', 'w') as fp: fp.write(json.dumps(metadata)) logger.debug('preparing Vagrantfile for box %s', output) vagrantfile = textwrap.dedent("""\ Vagrant.configure("2") do |config| config.ssh.username = "******" config.ssh.password = "******" config.vm.provider :libvirt do |libvirt| libvirt.driver = "kvm" libvirt.host = "" libvirt.connect_via_ssh = false libvirt.storage_pool_name = "default" libvirt.cpus = {cpus} libvirt.memory = {memory} end end""".format_map({ 'memory': str(int(domainInfo[1] / 1024)), 'cpus': str(domainInfo[3]) })) with open('Vagrantfile', 'w') as fp: fp.write(vagrantfile) with tarfile.open(output, 'w:gz') as tar: logger.debug('adding metadata.json to box %s ...', output) tar.add('metadata.json') logger.debug('adding Vagrantfile to box %s ...', output) tar.add('Vagrantfile') logger.debug('adding box.img to box %s ...', output) tar.add('box.img') if not keep_box_file: logger.debug( 'box packaging complete, removing temporary files.') rmfile('metadata.json') rmfile('Vagrantfile') rmfile('box.img') else: logger.warn('could not connect to storage-pool \'default\',' + 'skipping packaging buildserver box')
print("\n-----Server Connected----\n") logging.info("Server connected") path = "/home/aysenazezgi/Desktop/myServer/" buf = sys.stdin.readline().strip() retval = -1 control1 = True control2 = True if len(buf): for f in listdir(path): logging.info("Checking if there is%s", buf) fullpath = path + f if isfile(fullpath): if f == buf: rmfile(fullpath) retval = '\nFile is Deleted\n' print retval control1 = False break #elif exists(fullpath): #print ("\n***Exists but this is not a file ***\n") #control2=False #break if retval != 0: retval = -1 if control1 and control2: logging.error("File does not exists or some error occured") print '\n***File does not exists or some error occured***\n' print("\n-----Server Disconnected------\n")
# Checks in_path = check_in_path(args.i) out_path = create_out_path(args.o) resize_value = check_resize_value(int(args.r)) resize_v = int(args.r) if args.w: watermark = Image(filename=args.w) # Run logging.info("Resizing all files from {0}. Output to {1}".format( in_path, out_path)) for f in listdir(in_path): filename = join(in_path, f) if isfile(filename): logging.info(filename) try: with Image(filename=filename) as img: logging.debug("Size before {0}.".format(img.size)) if args.r: img.transform(resize=resize_value) if args.w: watermark_position(img, watermark) img.save(filename=out_path + "/" + f) logging.debug("Size after {0}.".format(img.size)) rmfile(in_path + "/" + f) except Exception as e: logging.error("Image processing error:", e) """ END """
print 'Checking code validity...', check_r = common.jsonfetch(common.ws('/check/') + servicetoken, use_cache=False) if check_r.has_key('Error'): print check_r['Message'] return False else: username = check_r['Username'] print 'Valid for ' + username return True try: f = open('token', 'r') servicetoken = f.read() f.close() if token_check(servicetoken) == False: rmfile('token') print 'Please launch this script again to set your Token' servicetoken = None except IOError: servicetoken = auth_prompt() if token_check(servicetoken): f = open('token', 'w') f.write(servicetoken) f.close() else: servicetoken = None if servicetoken == None: exit() print '\nSelect an operation:\n1 : Scrobble tracks.\n2 : Remove previous scrobbled tracks (revert).\n3 : Unregister service token.'
def main(args=None): #TODO: Add encrypt with Key parser = argparse.ArgumentParser() parser.add_argument("--decrypt", help="La acción del CodeCryptor", action="store_true", default=False) parser.add_argument("--encrypt", help="La acción del CodeCryptor", action="store_true", default=False) parser.add_argument("-v", "--verbose", help="Aumenta la información de salida", action="store_true", default=False) parser.add_argument("-i", "--input", help="Input file", type=str, default=None) parser.add_argument( "-o", "--output", help="Output file", type=str, default=None) parser.add_argument("--password", help="Llave para encryptar", type=str, default=None) parser.add_argument("--key", help="Llave para decryptar", type=str, default=None) args = parser.parse_args() verbose = args.verbose _encrypt = args.encrypt _decrypt = args.decrypt input_file = args.input out_file = args.output password = args.password key_file = args.key use_key = False assert (_decrypt or _encrypt ), "No se selecciono la acción --decrypt o --encrypt!" assert not(_decrypt and _encrypt ), "No se puede seleccionar --decrypt y --encrypt al mismo tiempo!" assert not(password and key_file ), "No se puede seleccionar --pasword y --key al mismo tiempo!" assert input_file is not None, "No se selecciono un archivo de entrada" if _encrypt: assert out_file is not None, "No se selecciono un archivo de salida" if password is None: print("No se escogió una contraseña para encryptar") sys.exit(1) if key_file is None and password is not None: print("Usando contraseña para encriptar...") key_file = "key.key" elif key_file is not None and password is None: print("Usando llave para encriptar") password=None use_key = True key_file = path.abspath(key_file) else: key_file = path.abspath(key_file) if path.exists(key_file): print( "El Archivo de llave de salida ya existe, cuidado con sobreescribirlo!") sys.exit(1) if not checkfile(key_file): print("El Archivo de llave de salida es invalido") sys.exit(1) elif _decrypt: assert key_file is not None, "No se selecciono un archivo de llave para decryptar" key_file = path.abspath(key_file) input_file = path.abspath(input_file) out_file = path.abspath(out_file) if not path.isfile(input_file): print("El Archivo de entrada no existe o es invalido") sys.exit(1) if path.exists(out_file): print("El Archivo de salida ya existe") sys.exit(1) if not checkfile(out_file): print("El Archivo de salida es invalido") sys.exit(1) time_start = 0 if(verbose): time_start = time() try: with open(input_file, 'rb') as in_file, open(out_file, 'wb') as o_file, open(key_file, 'wb' if _encrypt else 'rb') as keyf: if(_encrypt): encrypt(in_file, password, o_file, keyf,use_key) print( "NO OLVIDES GUARDAR EL ARCHIVO {} PARA LUEGO DECRYPTAR EL ARCHIVO".format(key_file)) elif(_decrypt): decrypt(in_file, o_file,keyf) else: raise "No se selecciono una opción" except Exception as e: if(verbose): print(str(e)) print("Ocurrio un error durante la operación para {}".format( "encryptar" if _encrypt else "decryptar")) rmfile(out_file) if(_encrypt): rmfile(key_file) sys.exit(1) if(verbose): time_end = time() print("Operación finalizada en {} segundos".format(time_end-time_start))
def install_mods(): # download mods mods_dir_path = Path('mods') mods_dir_path.mkdir(exist_ok=True) local_info_mods = local_info['installed_mods'] chdir(mods_dir_path.resolve()) for mod_obj in pack_settings['mods']: simple_obj = True gdrive_type = False zip_type = False zip_obj = {} if type(mod_obj) == type(''): mod_url = mod_obj.strip() elif type(mod_obj) == type({}): simple_obj = False if 'gdrive' in mod_obj: gdrive_type = True mod_name = mod_obj['name'].strip() elif 'zip' in mod_obj: zip_type = True zip_obj['name'] = mod_obj['zip_name'] zip_obj['path'] = mod_obj['zip_path'] mod_name = zip_obj['name'].strip() mod_url = mod_obj['url'].strip() if mod_url in local_info_mods: local_mod_name = local_info_mods[mod_url] mod_path = Path(local_mod_name) if mod_path.exists(): print('Mod "{}" already downloaded.'.format(local_mod_name)) continue else: del local_info_mods[mod_url] print('Downloading from "{}"'.format(mod_url)) if gdrive_type: # if gdrive url gdrive.download_file_from_google_drive(mod_url, mod_name) else: # if regular url r = requests.get(mod_url) if simple_obj: mod_name = urllib.parse.unquote(str(r.url)).split('/')[-1] Path(mod_name).write_bytes(r.content) if zip_type: # if file zipped zip_ref = zipfile.ZipFile(zip_obj['name'], 'r') zip_ref.extract(zip_obj['path']) zip_ref.close() mod_name = mod_obj['name'] copyfile( Path(zip_obj['path']).resolve(), Path().cwd().joinpath(mod_name)) rmtree(Path(zip_obj['path']).parent) rmfile(Path(zip_obj['name'])) print('Downloaded "{}"'.format(mod_name)) local_info_mods[mod_url] = mod_name # to speed up debugging chdir('..') update_local_info() chdir('./mods/') to_remove_local_mods = [] for mod_url_raw in local_info_mods: mod_url = mod_url_raw.strip() found = False for mod_obj in pack_settings['mods']: if type(mod_obj) == type(''): if mod_url_raw == mod_obj: found = True break elif type(mod_obj) == type({}): if mod_url_raw == mod_obj['url']: found = True break if not found: to_remove_local_mods.append(mod_url_raw) for to_remove_mod in to_remove_local_mods: del local_info_mods[to_remove_mod] # install mods try: if MINECRAFT_MODS_PATH.exists(): rmtree(MINECRAFT_MODS_PATH) MINECRAFT_MODS_PATH.mkdir() if MINECRAFT_CONFIG_PATH.exists(): rmtree(MINECRAFT_CONFIG_PATH) copytree(LOCAL_VENDOR_CONFIG_PATH, MINECRAFT_CONFIG_PATH) except PermissionError: print( '***!!!Close .minecraft folder and all .minecraft subdirectories and try again!!!***' ) for mod_url in local_info_mods: mod_name = local_info_mods[mod_url] mod_path = Path(mod_name) if mod_path.exists(): copyfile(str(mod_path), str(MINECRAFT_MODS_PATH.joinpath(mod_name))) else: print('Mod at path "{}" does not exists.'.format( mod_path.resolve())) continue chdir('..') update_local_info()
use_cache=False) if check_r.has_key('Error'): print check_r['Message'] return False else: username = check_r['Username'] print 'Valid for ' + username return True try: f = open('token', 'r') servicetoken = f.read() f.close() if token_check(servicetoken) == False: rmfile('token') print 'Please launch this script again to set your Token' servicetoken = None except IOError: servicetoken = auth_prompt() if token_check(servicetoken): f = open('token', 'w') f.write(servicetoken) f.close() else: servicetoken = None if servicetoken == None: exit() print '\nSelect an operation:\n1 : Scrobble tracks.\n2 : Remove previous scrobbled tracks (revert).\n3 : Unregister service token.'