def check07thModServerConnection(): """ Makes sure that we can connect to the 07th-mod server, where the mod downloads are stored. This check is only required because Japan is blocked from downloading the 07th-mod website - the installer already downloads a file from Github during startup, which suffices to check the internet connection. This function/check can be removed if the block is removed. """ try: _ = common.downloadFile("https://07th-mod.com/", is_text=True) except Exception as error: traceback.print_exc() raise Exception( """------------------------------------------------------------------------ Error: Couldn't reach 07th Mod Server! (https://07th-mod.com/) If you have a working internet connection, most likely you are in Japan, which is blocked from the 07th-mod server. Please visit https://www.07th-mod.com - if you get a "406 Not Acceptable" error, you are definitely blocked. As a workaround, VPNs like ProtonVPN could be used... Otherwise, please check the following: - You have a working internet connection - Check if our website is down (https://07th-mod.com/) - Check our Wiki for more solutions: https://07th-mod.com/wiki/Installer/faq/#connection-troubleshooting ------------------------------------------------------------------------ Dev Error Message: {} """.format(error))
def main(argv): ''' Main Entry Point ''' args = parseArguments(argv) logging.basicConfig(level=args.log_level) logging.info("%s v%s", __appname__, __version__) logging.info(args) samples = readSamples(args.bamsheet) # We don't need the last sample in the list so let's remove it samples.pop() working_dir = generate_working_dir(args.working_dir) logging.info("Working in %s", working_dir) localMeltedResultsFiles = [] for i, sample in enumerate(samples): meltedResultFile = "%s/%s.meltedResults.txt" % (args.s3_cache_folder, sample['name']) f = "%s/%s.meltedResults.txt" % (working_dir, sample['name']) logging.info("[%d/%d] Downloading %s -> %s", i + 1, len(samples), meltedResultFile, f) downloadFile(meltedResultFile, f) localMeltedResultsFiles.append(f) meltedResultsFile = "meltedResults.txt" with open(meltedResultsFile, 'w') as outfile: header_written = False for i, fname in enumerate(localMeltedResultsFiles): logging.info("[%d/%d] Merging %s -> %s", i + 1, len(localMeltedResultsFiles), fname, meltedResultsFile) with open(fname) as infile: if header_written is False: # Write complete file out outfile.write(infile.read()) header_written = True else: # Skip first line and write out rest next(infile) for line in infile: outfile.write(line) s3_path = "%s/meltedResults.txt" % args.s3_cache_folder logging.info("Uploading %s -> %s", meltedResultsFile, s3_path) uploadFile(meltedResultsFile, s3_path) logging.info("Done.")
def downloadGameImage(self,gameId, gameName, path): valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits) safeGameName = ''.join(c for c in gameName if c in valid_chars) #log('Downloading GOG game images for {0} -> {1}'.format(gameId, urllib.quote(safeGameName))) files = [] #valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits) #safeGameName = ''.join(c for c in gameName if c in valid_chars) thumbFilePath = os.path.join(self.defaultThumbPath,'thumb_{0}_{1}.jpg'.format(safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) fanartPath = os.path.join(self.defaultFanartPath,'fanart_{0}_{1}.jpg'.format(safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) isFanartFound = False isThumbImageFound = False for fileName in glob(os.path.join(self.defaultThumbPath,'thumb_{0}*.*'.format(safeGameName))): thumbFilePath = fileName isThumbImageFound = True for fileName in glob(os.path.join(self.defaultFanartPath,'fanart_{0}*.*'.format(safeGameName))): fanartPath = fileName isFanartFound = True try: if (not isFanartFound): gameUrlQuery = {'exactname':safeGameName.encode('utf8')} requestURL = 'http://thegamesdb.net/api/GetGame.php?' + urllib.urlencode(gameUrlQuery) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] xmlResponse = opener.open(requestURL, timeout=30) strXml = xmlResponse.read() #log(strXml) root = ET.fromstring(strXml) baseImgUrl = root[0].text fanartNodes = root.findall('.//Game/Images/fanart/original') if (fanartNodes.count > 0): fanartImg = fanartNodes[0].text thumbNodes = root.findall('.//Game/Images/fanart/thumb') thumbImg = thumbNodes[0].text downloadFile(baseImgUrl + fanartImg, fanartPath) downloadFile(baseImgUrl + thumbImg, thumbFilePath) isFanartFound = True isThumbImageFound = True except: pass if(not isThumbImageFound): thumbFilePath = os.path.join(self.defaultThumbPath,'thumb_{0}_{1}.png'.format(safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) if(not self.extractIcon(os.path.join(os.path.dirname(os.path.abspath(path)), string.replace("goggame-" + gameId + ".ico","-GO","")), thumbFilePath)): thumbFilePath = os.path.join(__addonpath__,'resources','skins','Default','media','alienware','gog.png') if(not isFanartFound): fanartPath = os.path.join(__addonpath__,'resources','skins','Default','media','alienware','fanart_gog.jpg') files.append(thumbFilePath) files.append(fanartPath) return files
def downloadGameImage(self, gameId, gameName, path): valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits) safeGameName = ''.join(c for c in gameName if c in valid_chars) findGameName = string.replace(safeGameName, ' Demo', '') #log('Downloading Origin game images for {0} -> {1}'.format(gameId, urllib.quote(safeGameName))) files = [] #valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits) #safeGameName = ''.join(c for c in gameName if c in valid_chars) thumbFilePath = os.path.join( self.defaultThumbPath, 'thumb_{0}_{1}.jpg'.format( safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) fanartPath = os.path.join( self.defaultFanartPath, 'fanart_{0}_{1}.jpg'.format( safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) isFanartFound = False isThumbImageFound = False for fileName in glob( os.path.join(self.defaultThumbPath, 'thumb_{0}*.*'.format(safeGameName))): thumbFilePath = fileName isThumbImageFound = True for fileName in glob( os.path.join(self.defaultFanartPath, 'fanart_{0}*.*'.format(safeGameName))): fanartPath = fileName isFanartFound = True try: if (not isFanartFound): gameUrlQuery = {'exactname': findGameName.encode('utf8')} requestURL = 'http://thegamesdb.net/api/GetGame.php?' + urllib.urlencode( gameUrlQuery) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] xmlResponse = opener.open(requestURL, timeout=30) strXml = xmlResponse.read() #log(strXml) root = ET.fromstring(strXml) baseImgUrl = root[0].text fanartNodes = root.findall('.//Game/Images/fanart/original') if (fanartNodes.count > 0): fanartImg = fanartNodes[0].text thumbNodes = root.findall('.//Game/Images/fanart/thumb') thumbImg = thumbNodes[0].text else: screenshotNodes = root.findall( './/Game/Images/screenshot/original') fanartImg = fanartNodes[0].text thumbNodes = root.findall( './/Game/Images/screenshot/thumb') thumbImg = thumbNodes[0].text downloadFile(baseImgUrl + fanartImg, fanartPath) downloadFile(baseImgUrl + thumbImg, thumbFilePath) isFanartFound = True isThumbImageFound = True except: pass #log(path) if (not isThumbImageFound): thumbFilePath = os.path.join( self.defaultThumbPath, 'thumb_{0}_{1}.png'.format( safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) #log("extracting") #log(thumbFilePath) if (not self.extractIcon(path, thumbFilePath)): #log("failed") thumbFilePath = os.path.join(__addonpath__, 'resources', 'skins', 'Default', 'media', 'alienware', 'origin.png') if (not isFanartFound): fanartPath = os.path.join(__addonpath__, 'resources', 'skins', 'Default', 'media', 'alienware', 'fanart_origin.jpg') files.append(thumbFilePath) files.append(fanartPath) return files
def downloadGameImage(self,gameId, gameName): log('Downloading Steam game images for {0} -> {1}'.format(gameId, gameName)) files = [] valid_chars = "-_.() %s%s" % (string.ascii_letters, string.digits) safeGameName = ''.join(c for c in gameName if c in valid_chars) thumbFilePath = os.path.join(self.defaultThumbPath,'thumb_{0}_{1}.jpg'.format(safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) fanartPath = os.path.join(self.defaultFanartPath,'fanart_{0}_{1}.jpg'.format(safeGameName, int((time.mktime(datetime.datetime.now().timetuple()))))) isFanartFound = False isThumbImageFound = False for fileName in glob(os.path.join(self.defaultThumbPath,'thumb_{0}*.*'.format(safeGameName))): thumbFilePath = fileName isThumbImageFound = True for fileName in glob(os.path.join(self.defaultFanartPath,'fanart_{0}*.*'.format(safeGameName))): fanartPath = fileName isFanartFound = True try: if (not isFanartFound): gameUrlQuery = {'exactname':gameName.encode('utf8')} requestURL = 'http://thegamesdb.net/api/GetGame.php?' + urllib.urlencode(gameUrlQuery) opener = urllib2.build_opener() opener.addheaders = [('User-agent', 'Mozilla/5.0')] xmlResponse = opener.open(requestURL, timeout=30) strXml = xmlResponse.read() root = ET.fromstring(strXml) baseImgUrl = root[0].text fanartNodes = root.findall('.//Game/Images/fanart/original') fanartImg = fanartNodes[0].text downloadFile(baseImgUrl + fanartImg, fanartPath) isFanartFound = True except: pass try: if (not isThumbImageFound or not isFanartFound): response = urllib.urlopen('http://store.steampowered.com/api/appdetails?appids={0}'.format(gameId)) gameJason = json.loads(response.read()) response.close() if(not isThumbImageFound): downloadFile(gameJason[gameId]['data']['header_image'], thumbFilePath) isThumbImageFound = True if(not isFanartFound): downloadFile(gameJason[gameId]['data']['screenshots'][0]['path_full'], fanartPath) isFanartFound = True except: pass if(not isThumbImageFound): thumbFilePath = os.path.join(__addonpath__,'resources','skins','Default','media','alienware','steam.png') if(not isFanartFound): fanartPath = os.path.join(__addonpath__,'resources','skins','Default','media','alienware','fanart_steam.png') files.append(thumbFilePath) files.append(fanartPath) return files
def downloadWaterResultData(root, state): url = generateUrl(state) path = generateFilePath(root, state) return downloadFile(url, path)
def matchSamplesToPatients(patients, meltedResults_s3path, meltedResults_localpath, threshold, resultsFile): ''' Match samples to patient, find swaps, etc. For each patient that has more than 1 sample i. For each sample 1) Read <sample>.meltedResults.txt 2) Check best matching sample (BMS) 3) If BMS is from the same patient, all is okay, else report BMS. :param patients: list of patient -> samples :param meltedResults_s3path: S3 Path to meltedResults files (mutually exclusive with meltedResults_localpath) :param meltedResults_localpath: Local path to meltedResults files (m.e with meltedResults_s3path) :param threshold: Minimum # of SNPs compared against sample :param resultsFile: Output results files ''' totalSamples = 0 missingSamples = [] badSamples = [] mismatchedSamples = [] matchedSamples = 0 with open(resultsFile, 'w') as output: output.write( "Sample1\tSample2\tn_S1\tn_S2\tSNPs_Compared\tFraction_Match\n") for patient in patients: samples = patients[patient] logger.debug("Patient: %s, Samples: %s", patient, samples) if len(samples) > 1: for sample in samples: # Get the sample meltedResults if meltedResults_s3path: meltedResultsFile = "%s.meltedResults.txt" % sample downloadFile( "%s/%s.meltedResults.txt" % (meltedResults_s3path, sample), meltedResultsFile) if not os.path.exists(meltedResultsFile): logger.error("Unable to download %s", meltedResultsFile) missingSamples.append(sample) continue else: meltedResultsFile = "%s/%s.meltedResults.txt" % ( meltedResults_localpath, sample) if not os.path.exists(meltedResultsFile): logger.error("%s not found", meltedResultsFile) missingSamples.append(sample) continue cm = read_csv(meltedResultsFile, sep="\t") if meltedResults_s3path: os.remove(meltedResultsFile) # Get the samples above threshold cm = cm[cm.SNPs_Compared >= threshold] snpsCompared = len(cm) if snpsCompared == 0: logger.warn("%s -> Not enough SNPs", sample) missingSamples.append(sample) else: totalSamples += 1 samples_to_print = 1 cm = cm.sort_values(by="Fraction_Match", ascending=False) topMatch = cm.iloc[0]['Sample2'] fractionMatch = cm.iloc[0]['Fraction_Match'] if fractionMatch < 0.7: badSamples.append(sample) else: if topMatch in samples: matchedSamples += 1 logger.debug("%s -> %s", sample, topMatch) else: logger.warn("%s -> %s:%s", sample, patient, topMatch) mismatchedSamples.append(sample) samples_to_print = 5 for i in range(0, samples_to_print): output.write( "%s\t%s\t%d\t%d\t%d\t%0.4f\n" % (cm.iloc[i]['Sample1'], cm.iloc[i]['Sample2'], cm.iloc[i]['n_S1'], cm.iloc[i]['n_S2'], cm.iloc[i]['SNPs_Compared'], cm.iloc[i]['Fraction_Match'])) logger.info("Samples compared: %d", totalSamples) logger.info("Matched samples: %d", matchedSamples) logger.info("Mismatched samples: %d", len(mismatchedSamples)) logger.info("Missing samples: %d (no meltedResults files or low coverage)", len(missingSamples)) logger.info("No matching samples: %d (Fraction_Match < 0.7)", len(badSamples))
def main(argv): ''' Main Entry Point ''' args = parseArguments(argv) logging.basicConfig(level=args.log_level) logger.info("%s v%s", __appname__, __version__) logger.info(args) working_dir = generate_working_dir(args.working_dir) logger.info("Working in %s", working_dir) sampleName = args.sample # Download and read the bamsheet from the s3 cache directory downloadFile("%s/bamsheet.txt" % args.s3_cache_folder, "%s/bamsheet.txt" % working_dir) # Determine the current sample index in the list of samples samples = readSamples("%s/bamsheet.txt" % working_dir) sample_index = -1 for i, sample in enumerate(samples): if sample['name'] == sampleName: sample_index = i break if sample_index == -1: logger.error("Unable to locate sample in bamsheet") return -1 # Get the list of variants in the reference sample s3_vcfFile = "%s/%s.vcf" % (args.s3_cache_folder, sampleName) vcfFile = "%s/%s.vcf" % (working_dir, sampleName) downloadFile(s3_vcfFile, vcfFile) if os.path.exists(vcfFile): tsvFile = "%s/%s.tsv" % (working_dir, sampleName) VCFtoTSV(vcfFile, tsvFile) if os.path.exists(tsvFile): var_list = get_tsv_variants(tsvFile, args.dp_threshold) os.remove(tsvFile) else: logger.error("Failed to convert VCF to TSV, %s -> %s", vcfFile, tsvFile) return -1 os.remove(vcfFile) else: logger.error("Failed to download %s", s3_vcfFile) return -1 meltedResultsFile = "%s/%s.meltedResults.txt" % (working_dir, sampleName) with open(meltedResultsFile, "w") as fout: fout.write( "Sample1\tSample2\tn_S1\tn_S2\tSNPs_Compared\tFraction_Match\tJudgement\n" ) sample_index += 1 while sample_index < len(samples): sample = samples[sample_index] logger.info("[%d/%d] Comparing %s - %s", sample_index + 1, len(samples), sampleName, sample['name']) s3_vcfFile = "%s/%s.vcf" % (args.s3_cache_folder, sample['name']) vcfFile = "%s/%s.vcf" % (working_dir, sample['name']) downloadFile(s3_vcfFile, vcfFile) if not os.path.exists(vcfFile): logger.error("Error downloading %s. Aborting.", s3_vcfFile) return -1 tsvFile = vcfFile.replace('.vcf', '.tsv') VCFtoTSV(vcfFile, tsvFile) var_list2 = get_tsv_variants(tsvFile, args.dp_threshold) os.remove(vcfFile) os.remove(tsvFile) # compare the genotypes intersection = getIntersectingVariants(var_list, var_list2) results = compareGenotypes(var_list, var_list2, intersection) n1 = '%d' % len(var_list) n2 = '%d' % len(var_list2) fm = '%.4f' % results['frac_common'] tc = '%d' % results['total_compared'] j = results['short_judgement'] fout.write( '\t'.join([sampleName, sample['name'], n1, n2, tc, fm, j]) + "\n") sample_index += 1 logger.info("Uploading %s to %s", meltedResultsFile, args.s3_cache_folder) uploadFile( meltedResultsFile, "%s/%s" % (args.s3_cache_folder, os.path.basename(meltedResultsFile))) logger.info('Cleaning up working dir') os.remove(meltedResultsFile) delete_working_dir(working_dir) logger.info('Completed')