def getHitsOnTheFly(seqid, seq): with nested.NestedTempDir(dir=workingDir, nesting=0) as tmpDir: queryFastaPath = os.path.join(tmpDir, 'query.faa') # add 'lcl|' to make ncbi blast happy. util.writeToFile('{0}\n{1}\n'.format('>lcl|'+seqid, seq), queryFastaPath) hitsDb = getBlastHits(queryFastaPath, genomeIndexPath, evalue, workingDir=workingDir) return hitsDb.get(seqid)
def storeNewTicket(masterKey, ticket, bridge): """ Store a new session ticket and the according master key for future use. This method is only called by clients. The given data, `masterKey', `ticket' and `bridge', is YAMLed and stored in the global ticket dictionary. If there already is a ticket for the given `bridge', it is overwritten. """ assert len(masterKey) == const.MASTER_KEY_LENGTH assert len(ticket) == const.TICKET_LENGTH ticketFile = const.STATE_LOCATION + const.CLIENT_TICKET_FILE log.debug("Storing newly received ticket in `%s'." % ticketFile) # Add a new (key, ticket) tuple with the given bridge as hash key. tickets = dict() content = util.readFromFile(ticketFile) if (content is not None) and (len(content) > 0): tickets = yaml.safe_load(content) # We also store a timestamp so we later know if our ticket already expired. tickets[str(bridge)] = [int(time.time()), masterKey, ticket] util.writeToFile(yaml.dump(tickets), ticketFile)
def getDistanceForAlignedSeqPair(seqId, alignedSeq, hitSeqId, alignedHitSeq, workPath): # paranoid check: aligned and trimmed seqs need to be the same length. # if len(alignedSeq) != len(alignedHitSeq): # raise Exception('getDistanceForAlignedSeqPairs: different lengths for seqs: '+str(((seqId, alignedSeq), (hitSeqId, alignedHitSeq)))) dataFileName = 'datafile.seq' treeFileName = 'treefile.seq' outFileName = 'outfile.seq' dataFilePath = os.path.join(workPath, dataFileName) treeFilePath = os.path.join(workPath, treeFileName) outFilePath = os.path.join(workPath, outFileName) # heading is number of seqs and length of each seq (which all need to be the same len). heading = '2 %s\n' % len(alignedSeq) pamlData = heading + '%s\n%s\n' % (seqId, alignedSeq) + '%s\n%s\n' % ( hitSeqId, alignedHitSeq) # logging.debug('pamlData=%s'%pamlData) util.writeToFile(pamlData, dataFilePath) # workPath is simply your folder that will contain codeml (Yang 2000), codeml.ctl (the codeml control file), and the jones.dat (Jones et. al, 1998) # write the codeml control file that will run codeml # run the codeml try: with open(os.devnull, 'w') as devnull: subprocess.check_call(['codeml'], cwd=workPath, stdout=devnull) distance = pamlGetDistance(workPath) return distance finally: for filePath in [dataFilePath, treeFilePath, outFilePath]: if os.path.exists(filePath): os.remove(filePath)
def getDistanceForAlignedSeqPair(seqId, alignedSeq, hitSeqId, alignedHitSeq, workPath): # paranoid check: aligned and trimmed seqs need to be the same length. # if len(alignedSeq) != len(alignedHitSeq): # raise Exception('getDistanceForAlignedSeqPairs: different lengths for seqs: '+str(((seqId, alignedSeq), (hitSeqId, alignedHitSeq)))) dataFileName = 'datafile.seq' treeFileName = 'treefile.seq' outFileName = 'outfile.seq' dataFilePath = os.path.join(workPath, dataFileName) treeFilePath = os.path.join(workPath, treeFileName) outFilePath = os.path.join(workPath, outFileName) # heading is number of seqs and length of each seq (which all need to be the same len). heading = '2 %s\n'%len(alignedSeq) pamlData = heading + '%s\n%s\n'%(seqId, alignedSeq) + '%s\n%s\n'%(hitSeqId, alignedHitSeq) # logging.debug('pamlData=%s'%pamlData) util.writeToFile(pamlData, dataFilePath) # workPath is simply your folder that will contain codeml (Yang 2000), codeml.ctl (the codeml control file), and the jones.dat (Jones et. al, 1998) # write the codeml control file that will run codeml # run the codeml try: with open(os.devnull, 'w') as devnull: subprocess.check_call(['codeml'], cwd=workPath, stdout=devnull) distance = pamlGetDistance(workPath) return distance finally: for filePath in [dataFilePath, treeFilePath, outFilePath]: if os.path.exists(filePath): os.remove(filePath)
def getRHashesMap(p, fInfos, rIdOption): postSetupFIds = p.getPostSetupFIds() util.writeToFile(list(postSetupFIds), util.postSetupFIdsFile, True) rHashesMap = {} rHashStrs = set([]) for info in fInfos: fId = info.getFId() rId = info.getRId(rIdOption) #rHashesMap[fId] = rId rHashStrs.add(str(fId)+":"+str(rId)+"\n") #write mappings to the file in the glob directory util.writeToFile(rHashStrs, util.globRHashesFile, True) #read all mappings from the file in the glob directory f = open(util.globRHashesFile, "r") mappings = f.readlines() for mapping in mappings: mapping = mapping.rstrip("\n") mappingParts = mapping.split(":") rHashesMap[mappingParts[0]] = mappingParts[1] f.close() return rHashesMap
def storeNewTicket( masterKey, ticket, bridge ): """ Store a new session ticket and the according master key for future use. This method is only called by clients. The given data, `masterKey', `ticket' and `bridge', is YAMLed and stored in the global ticket dictionary. If there already is a ticket for the given `bridge', it is overwritten. """ assert len(masterKey) == const.MASTER_KEY_LENGTH assert len(ticket) == const.TICKET_LENGTH ticketFile = const.STATE_LOCATION + const.CLIENT_TICKET_FILE log.debug("Storing newly received ticket in `%s'." % ticketFile) # Add a new (key, ticket) tuple with the given bridge as hash key. tickets = dict() content = util.readFromFile(ticketFile) if (content is not None) and (len(content) > 0): tickets = yaml.safe_load(content) # We also store a timestamp so we later know if our ticket already expired. tickets[str(bridge)] = [int(time.time()), masterKey, ticket] util.writeToFile(yaml.dump(tickets), ticketFile)
def build(): global elements,main_html class_id = {} elementy = sorted(elements,key=lambda element: element[2]) htmlfinal = main_html #html and css for i in range(len(elementy)): e = elementy[i] element_type = e[0] x = e[1] y = e[2] w = e[3] h = e[4] id_ = e[5] randomColor = randint(0,len(colors) -1 ) if id_ not in class_id: color = colors[randomColor] class_id[id_] = color else: color = class_id[_id] html = '<div id= "' + str(id_) + '"style="height:' + str(h) + 'vh;width:' + str(w) + 'vw;position:absolute;top:' + str(y) + 'vh;left:' + str(x) + 'vw;background-color:' + str(color) + '"></div>' htmlfinal = htmlfinal + html soup=bs(htmlfinal) prettyHTML=soup.prettify() util.writeToFile('./output.html',prettyHTML)
def __init__(self, model, client, symbol, balance): self.model = model self.symbol = symbol self.client = client self.balance = balance log = "NEW TRADE INSTANCE SYMBOL: {0}, BALANCE: {1}".format( symbol, balance) writeToFile(log, symbol)
def OnSave(self, event): dlg = wx.FileDialog(self, "Save Screenplay As", wildcard="HTML files (*.html)|*.html|All files|*", style=wx.SAVE | wx.OVERWRITE_PROMPT) if dlg.ShowModal() == wx.ID_OK: util.writeToFile(dlg.GetPath(), self.html, self) dlg.Destroy()
def OnSave(self, event): dlg = wx.FileDialog(self, "Filename to save as", wildcard = "HTML files (*.html)|*.html|All files|*", style = wx.SAVE | wx.OVERWRITE_PROMPT) if dlg.ShowModal() == wx.ID_OK: util.writeToFile(dlg.GetPath(), self.html, self) dlg.Destroy()
def getHitsOnTheFly(seqid, seq): with nested.NestedTempDir(dir=workingDir, nesting=0) as tmpDir: queryFastaPath = os.path.join(tmpDir, 'query.faa') # add 'lcl|' to make ncbi blast happy. util.writeToFile('{0}\n{1}\n'.format('>lcl|' + seqid, seq), queryFastaPath) hitsDb = getBlastHits(queryFastaPath, genomeIndexPath, evalue, workingDir=workingDir) return hitsDb.get(seqid)
def _receiveEncryptedTicket(self, data): expected = const.TICKET_LENGTH + const.MASTER_KEY_SIZE assert len(data) >= expected data = data.read(expected) decrypted = self.recvCrypter.decrypt(data[:expected]) ticket = decrypted[: const.TICKET_LENGTH] nextMasterKey = decrypted[const.TICKET_LENGTH : expected] util.writeToFile(nextMasterKey + ticket, const.DATA_DIRECTORY + const.TICKET_FILE)
def _receiveEncryptedTicket(self, data): expected = const.TICKET_LENGTH + const.MASTER_KEY_SIZE assert len(data) >= expected data = data.read(expected) decrypted = self.recvCrypter.decrypt(data[:expected]) ticket = decrypted[:const.TICKET_LENGTH] nextMasterKey = decrypted[const.TICKET_LENGTH:expected] util.writeToFile(nextMasterKey + ticket, \ const.DATA_DIRECTORY + const.TICKET_FILE)
def createFPtsFiles(): fIdsLine = "" posExps = [] for fPtName in os.listdir(util.fPtsDirPath): fIdsLine += fPtName[1:-4] fIdsLine += " " posExps.append(fPtName[1:-4] + "\n") fIdsLine = fIdsLine.rstrip(" ") fIdsLine += "\n" fIdsLine = ":" + fIdsLine #util.writeToFile([fIdsLine], util.allFPtsFile, True) util.writeToFile(posExps, util.subqFPtsFile, True)
def BoilerData(downloadDate): ret = False read_directory = os.path.join(Constants.ROOT_FOLDER,Constants.DATA_DIR,downloadDate) write_directory = os.path.join(Constants.ROOT_FOLDER,Constants.BOILER_DATA_DIR,downloadDate) if not os.path.exists(read_directory): util.logger.error("Boilers data can't be run because folder isn't present = "+downloadDate) return ret if not os.path.exists(write_directory): os.makedirs(write_directory) onlyfiles = [ f for f in os.listdir(read_directory) if os.path.isfile(os.path.join(read_directory,f)) ] count = 0 try: for htmlFile in onlyfiles: htmlData = util.readFromFile(os.path.join(read_directory,htmlFile)) html_filename = os.path.join(write_directory,htmlFile) if os.path.isfile(html_filename) is False: htmlText = remove_boiler(htmlData) result = util.writeToFile(htmlText, html_filename) else: result = True if result == True: count = count + 1 util.logger.info('Boilered data done for ='+html_filename+str(count)) except Exception, e: util.logger.error("Exception at boiler for data : %s" % read_directory)
def findSeqIdWithFasta(fastaSeq, subjectIndexPath): ''' return first hit ''' try: path = nested.makeTempPath() util.writeToFile(fastaSeq, path) cmd = 'blastp -outfmt 6 -query %s -db %s'%(path, subjectIndexPath) results = util.run(cmd, shell=True) finally: os.remove(path) hitId = None for line in results.splitlines(): # example line: foo sp|P39709|SEO1_YEAST 100.00 40 0 0 1 40 1 40 3e-1884.7 # the second field is from the hit nameline. hitId = fasta.idFromName(line.split()[1]) break # grab the first hit return hitId
def onSaveData(self): fileName = QtGui.QFileDialog.getSaveFileName(self, u"保存数据", "", "Data File(*.txt)") if fileName == "": return if not util.writeToFile(fileName, self.ui.getRecvWidgetContent()): QtGui.QMessageBox.critical(self, u"错误", u"保存文件失败")
def alignFastaClustalw(input, path): ''' input: string containing fasta formatted sequences to be aligned. path: working directory where fasta will be written and clustal will write output files. runs alignment program clustalw Returns: fasta-formatted aligned sequences ''' clustalFastaPath = os.path.join(path, CLUSTAL_INPUT_FILENAME) clustalAlignmentPath = os.path.join(path, CLUSTAL_ALIGNMENT_FILENAME) util.writeToFile(input, clustalFastaPath) try: subprocess.check_call('clustalw -output=fasta -infile=%s -outfile=%s 2>&1 >/dev/null'%(clustalFastaPath, clustalAlignmentPath), shell=True) except Exception: logging.exception('runClustal Error: clustalFastaPath data = %s'%open(clustalFastaPath).read()) raise alignedFasta = util.readFromFile(clustalAlignmentPath) return alignedFasta
def downloadGoogleNews(downloadDate): result = False read_directory = os.path.join(Constants.ROOT_FOLDER, Constants.RECOMMENDATION_DIR, Constants.GOOGLE_LINKS_DIR, downloadDate, Constants.GOOGLE_LINKS_FILE) write_directory = os.path.join(Constants.ROOT_FOLDER, Constants.RECOMMENDATION_DIR, Constants.GOOGLE_NEWS_DIR, downloadDate) if not os.path.exists(write_directory): os.makedirs(write_directory) writeJson = {} try: hyperlinks = [line.strip() for line in open(read_directory)] count = 0 for link in hyperlinks: url = link.replace("http://", "") url = url.replace("www.", "") parsedUrl = re.sub(r'\W+', '', url) if len(parsedUrl) > 25: parsedUrl = parsedUrl[:25] try: html_filename = os.path.join(write_directory, parsedUrl) if os.path.isfile(html_filename) is False: htmlfile = urllib2.urlopen(link) html = htmlfile.read() ret = util.writeToFile(html, html_filename) if ret == True: linkDict = {} linkDict["url"] = link linkDict["content"] = "" soup = BeautifulSoup(html, 'html.parser') if soup.title and soup.title.contents[0]: title = soup.title.contents[0] else: title = "" linkDict["title"] = title writeJson[parsedUrl] = linkDict count = count + 1 print 'downloaded link =' + url except Exception, e: util.logger.error("Exception at downloading link : %s" % url) if count > Constants.MIN_GOOGLELINKS_DAILY: result = writeUrlJson(writeJson, downloadDate) if result == True: util.saveSettings(Constants.LAST_GOOGLENEWS_DOWNLOAD, downloadDate) util.logger.info("Google news downloaded for =" + downloadDate + " links=" + str(count)) else: util.logger.error("Google news failed to download for =" + downloadDate + " links=" + str(count))
def findStoredTicket(bridge): """ Retrieve a previously stored ticket from the ticket dictionary. The global ticket dictionary is loaded and the given `bridge' is used to look up the ticket and the master key. If the ticket dictionary does not exist (yet) or the ticket data could not be found, `None' is returned. """ assert bridge ticketFile = const.STATE_LOCATION + const.CLIENT_TICKET_FILE log.debug("Attempting to read master key and ticket from file `%s'." % ticketFile) # Load the ticket hash table from file. yamlBlurb = util.readFromFile(ticketFile) if (yamlBlurb is None) or (len(yamlBlurb) == 0): return None tickets = yaml.safe_load(yamlBlurb) try: timestamp, masterKey, ticket = tickets[str(bridge)] except KeyError: log.info("Found no ticket for bridge `%s'." % str(bridge)) return None # We can remove the ticket now since we are about to redeem it. log.debug("Deleting ticket since it is about to be redeemed.") del tickets[str(bridge)] util.writeToFile(yaml.dump(tickets), ticketFile) # If our ticket is expired, we can't redeem it. ticketAge = int(time.time()) - timestamp if ticketAge > const.SESSION_TICKET_LIFETIME: log.warning( "We did have a ticket but it already expired %s ago." % str( datetime.timedelta(seconds=(ticketAge - const.SESSION_TICKET_LIFETIME)))) return None return (masterKey, ticket)
def alignFastaClustalw(input, path): ''' input: string containing fasta formatted sequences to be aligned. path: working directory where fasta will be written and clustal will write output files. runs alignment program clustalw Returns: fasta-formatted aligned sequences ''' clustalFastaPath = os.path.join(path, CLUSTAL_INPUT_FILENAME) clustalAlignmentPath = os.path.join(path, CLUSTAL_ALIGNMENT_FILENAME) util.writeToFile(input, clustalFastaPath) try: cmd = ['clustalw', '-output', 'fasta', '-infile', clustalFastaPath, '-outfile', clustalAlignmentPath] with open(os.devnull, 'w') as devnull: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: logging.exception('runClustal Error: clustalFastaPath data = %s'%open(clustalFastaPath).read()) raise alignedFasta = util.readFromFile(clustalAlignmentPath) return alignedFasta
def findStoredTicket( bridge ): """ Retrieve a previously stored ticket from the ticket dictionary. The global ticket dictionary is loaded and the given `bridge' is used to look up the ticket and the master key. If the ticket dictionary does not exist (yet) or the ticket data could not be found, `None' is returned. """ assert bridge ticketFile = const.STATE_LOCATION + const.CLIENT_TICKET_FILE log.debug("Attempting to read master key and ticket from file `%s'." % ticketFile) # Load the ticket hash table from file. yamlBlurb = util.readFromFile(ticketFile) if (yamlBlurb is None) or (len(yamlBlurb) == 0): return None tickets = yaml.safe_load(yamlBlurb) try: timestamp, masterKey, ticket = tickets[str(bridge)] except KeyError: log.info("Found no ticket for bridge `%s'." % str(bridge)) return None # We can remove the ticket now since we are about to redeem it. log.debug("Deleting ticket since it is about to be redeemed.") del tickets[str(bridge)] util.writeToFile(yaml.dump(tickets), ticketFile) # If our ticket is expired, we can't redeem it. ticketAge = int(time.time()) - timestamp if ticketAge > const.SESSION_TICKET_LIFETIME: log.warning("We did have a ticket but it already expired %s ago." % str(datetime.timedelta(seconds= (ticketAge - const.SESSION_TICKET_LIFETIME)))) return None return (masterKey, ticket)
def downloadGoogleNews(downloadDate): result = False read_directory = os.path.join(Constants.ROOT_FOLDER,Constants.RECOMMENDATION_DIR,Constants.GOOGLE_LINKS_DIR,downloadDate, Constants.GOOGLE_LINKS_FILE) write_directory = os.path.join(Constants.ROOT_FOLDER,Constants.RECOMMENDATION_DIR,Constants.GOOGLE_NEWS_DIR,downloadDate) if not os.path.exists(write_directory): os.makedirs(write_directory) writeJson = {} try: hyperlinks = [line.strip() for line in open(read_directory)] count = 0 for link in hyperlinks: url=link.replace("http://", "") url=url.replace("www.", "") parsedUrl=re.sub(r'\W+', '', url) if len(parsedUrl) > 25: parsedUrl=parsedUrl[:25] try: html_filename = os.path.join(write_directory,parsedUrl) if os.path.isfile(html_filename) is False: htmlfile = urllib2.urlopen(link) html = htmlfile.read() ret = util.writeToFile(html,html_filename) if ret == True: linkDict = {} linkDict["url"] = link linkDict["content"] = "" soup = BeautifulSoup(html, 'html.parser') if soup.title and soup.title.contents[0]: title = soup.title.contents[0] else: title = "" linkDict["title"] = title writeJson[parsedUrl] = linkDict count=count+1 print 'downloaded link ='+url except Exception, e: util.logger.error("Exception at downloading link : %s" % url) if count > Constants.MIN_GOOGLELINKS_DAILY: result = writeUrlJson(writeJson, downloadDate ) if result == True: util.saveSettings(Constants.LAST_GOOGLENEWS_DOWNLOAD, downloadDate) util.logger.info("Google news downloaded for ="+downloadDate+" links="+str(count)) else: util.logger.error("Google news failed to download for ="+downloadDate+" links="+str(count)) except Exception, e: print "Exception at open Google news links for download: %s" % read_directory
def alignFastaClustalw(input, path): ''' input: string containing fasta formatted sequences to be aligned. path: working directory where fasta will be written and clustal will write output files. runs alignment program clustalw Returns: fasta-formatted aligned sequences ''' clustalFastaPath = os.path.join(path, CLUSTAL_INPUT_FILENAME) clustalAlignmentPath = os.path.join(path, CLUSTAL_ALIGNMENT_FILENAME) util.writeToFile(input, clustalFastaPath) try: cmd = [ 'clustalw', '-output', 'fasta', '-infile', clustalFastaPath, '-outfile', clustalAlignmentPath ] with open(os.devnull, 'w') as devnull: subprocess.check_call(cmd, stdout=devnull, stderr=devnull) except Exception: logging.exception('runClustal Error: clustalFastaPath data = %s' % open(clustalFastaPath).read()) raise alignedFasta = util.readFromFile(clustalAlignmentPath) return alignedFasta
def BoilerNews(downloadDate): jsonData = readBoilerJson(downloadDate) if jsonData is None: return False result = False read_directory = os.path.join(Constants.ROOT_FOLDER,Constants.RECOMMENDATION_DIR,Constants.GOOGLE_NEWS_DIR,downloadDate) write_directory = os.path.join(Constants.ROOT_FOLDER,Constants.RECOMMENDATION_DIR,Constants.BOILER_GOOGLE_NEWS_DIR,downloadDate) if not os.path.exists(read_directory): util.logger.error("Boilers news can't be run because folder isn't present = "+downloadDate) return result if not os.path.exists(write_directory): os.makedirs(write_directory) onlyfiles = [ f for f in os.listdir(read_directory) if os.path.isfile(os.path.join(read_directory,f)) ] count = 0 for htmlFile in onlyfiles: try: htmlData = util.readFromFile(os.path.join(read_directory,htmlFile)) if htmlData is not None: html_filename = os.path.join(write_directory,htmlFile) if os.path.isfile(html_filename) is False: htmlText = remove_boiler(htmlData) result = util.writeToFile(htmlText, html_filename) if result == True: if htmlFile in jsonData: jsonData[htmlFile]["content"] = htmlText else: result = True if result == True: count = count + 1 else: if htmlFile in jsonData: del jsonData[htmlFile] print 'Boilered done for ='+html_filename+str(count) except Exception, e: util.logger.error( "Exception at boiler for google news : %s" % read_directory)
def BoilerData(downloadDate): ret = False read_directory = os.path.join(Constants.ROOT_FOLDER, Constants.DATA_DIR, downloadDate) write_directory = os.path.join(Constants.ROOT_FOLDER, Constants.BOILER_DATA_DIR, downloadDate) if not os.path.exists(read_directory): util.logger.error( "Boilers data can't be run because folder isn't present = " + downloadDate) return ret if not os.path.exists(write_directory): os.makedirs(write_directory) onlyfiles = [ f for f in os.listdir(read_directory) if os.path.isfile(os.path.join(read_directory, f)) ] count = 0 for htmlFile in onlyfiles: try: htmlData = util.readFromFile(os.path.join(read_directory, htmlFile)) html_filename = os.path.join(write_directory, htmlFile) if os.path.isfile(html_filename) is False: htmlText = remove_boiler(htmlData) result = util.writeToFile(htmlText, html_filename) else: result = True if result == True: count = count + 1 util.logger.info('Boilered data done for =' + html_filename + str(count)) except Exception, e: util.logger.error("Exception at boiler for data : " + read_directory + "/" + htmlFile)
# Get all the main page links. #Initialize for it browser.get('http://www.news.google.com') links = browser.find_elements_by_xpath('//a') getLinksPerCategory(links, 'HomePage') if len(downloadedLinks) > Constants.MIN_GOOGLELINKS_DAILY: linksToBeWritten = "\n".join(downloadedLinks) directory = os.path.join(Constants.ROOT_FOLDER, Constants.RECOMMENDATION_DIR, Constants.GOOGLE_LINKS_DIR, todayDate) if not os.path.exists(directory): os.makedirs(directory) result = util.writeToFile( linksToBeWritten, os.path.join(directory, Constants.GOOGLE_LINKS_FILE)) if result == True: util.saveSettings(Constants.LAST_GOOGLELINKS_DOWNLOAD, todayDate) util.logger.info("Google links downloaded for =" + todayDate) return result util.logger.error("Google links not downloaded for =" + todayDate) return result def GoogleNews(): downloadedLinks = [] todayDate = util.getTodayDateFolder() lastNewsDownloaded = util.loadSettings(Constants.LAST_GOOGLENEWS_DOWNLOAD) lastLinksDownloaded = util.loadSettings(
def refreshTradeSession(balance): log = " ------ NEW TRADE SESSION ------" writeToFile(log, "main") #construct a new pool pool = Pool.construct(binancecClient) log = logPool(pool) writeToFile(log, "main") #train the missing models in the new pool newModels = Train.start(pool, models) log = "Added New Models for Symbols: " log += logPool(newModels.keys()) writeToFile(log, "main") #stop trading the models that is not in the new pool writeToFile("Terminating OUT-DATED symbols...", "main") for symbol in models.keys(): if not (symbol in pool): writeToFile("terminating {0}...".format(symbol), "main") #terminate the process for this symbol processes[symbol].terminate() #liquadate any assest for this symbol if we have some balance += tradeIntances[symbol].liquidateAssets() #remove the symbol from session del processes[symbol] del tradeIntances[symbol] del models[symbol] writeToFile("SUCCESS", "main") #refresh the instances for newModels writeToFile("Creating New Trade Instances...", "main") for symbol in newModels.keys(): writeToFile("creating a Trade instance for {0}...".format(symbol), "main") #add symbol to the models models[symbol] = newModels[symbol] #create a new trade instance newTradeInstance = Trade(models[symbol], binancecClient, symbol, int(balance / len(pool))) tradeIntances[symbol] = newTradeInstance #create and new process for trade processes[symbol] = multiprocessing.Process( target=newTradeInstance.start) processes[symbol].start() writeToFile("SUCCESS for {0}...".format(symbol), "main") writeToFile(" ------- NEW SESSION STARTED!! -------", "main") return balance
def BoilerSuggNews(downloadDate): jsonData = readBoilerJson(downloadDate) if jsonData is None: return False result = False read_directory = os.path.join(Constants.ROOT_FOLDER, Constants.RECOMMENDATION_DIR, Constants.GOOGLE_NEWS_DIR, downloadDate, Constants.SUGG_GOOGLENEWS) write_directory = os.path.join(Constants.ROOT_FOLDER, Constants.RECOMMENDATION_DIR, Constants.BOILER_GOOGLE_NEWS_DIR, downloadDate, Constants.SUGG_GOOGLENEWS) if not os.path.exists(read_directory): util.logger.error( "Boilers sugg news can't be run because folder isn't present = " + downloadDate) return result if not os.path.exists(write_directory): os.makedirs(write_directory) suggGoogle = jsonData['suggestGoogle'] googleLinks = suggGoogle[Constants.GOOGLE] finalJson = { 'GoogleNews': jsonData['GoogleNews'], 'suggestGoogle': { Constants.GOOGLE: [] } } count = 0 for linkObj in googleLinks: download = linkObj['download'] htmlFile = linkObj['id'] if download == 'yes': try: htmlData = util.readFromFile( os.path.join(read_directory, htmlFile)) if htmlData is not None: html_filename = os.path.join(write_directory, htmlFile) if os.path.isfile(html_filename) is False: htmlText = remove_boiler(htmlData) result = util.writeToFile(htmlText, html_filename) if result == True: linkObj['content'] = htmlText soup = BeautifulSoup(htmlData, 'html.parser') if soup.title and soup.title.contents[0]: title = soup.title.contents[0] else: title = "" linkObj['title'] = title else: result = True if result == True: count = count + 1 util.logger.info('Boilered done for sugg_news =' + html_filename + str(count)) except Exception, e: util.logger.error("Exception at boiler for google news : %s" % read_directory) else: pass finalJson['suggestGoogle'][Constants.GOOGLE].append(linkObj)
parser = argparse.ArgumentParser() parser.add_argument("ip_addr", type=str, help="The IPv4 address of the " "%s server." % const.TRANSPORT_NAME) parser.add_argument("tcp_port", type=int, help="The TCP port of the %s " "server." % const.TRANSPORT_NAME) parser.add_argument("ticket_file", type=str, help="The file, the newly " "issued ticket is written to.") args = parser.parse_args() print "[+] Loading server state file." serverState = state.load() print "[+] Generating new session ticket." masterKey = mycrypto.strongRandom(const.MASTER_KEY_LENGTH) ticket = SessionTicket(masterKey, serverState).issue() print "[+] Writing new session ticket to `%s'." % args.ticket_file tickets = dict() server = IPv4Address('TCP', args.ip_addr, args.tcp_port) tickets[str(server)] = [int(time.time()), masterKey, ticket] util.writeToFile(yaml.dump(tickets), args.ticket_file) print "[+] Success."
def OnGenerate(self, event): watermarks = self.itemsEntry.GetValue().split("\n") common = self.commonMark.GetValue() directory = self.dirEntry.GetValue() fontsize = self.markSize.GetValue() fnprefix = self.filenamePrefix.GetValue() watermarks = set(watermarks) # keep track of ids allocated so far, just on the off-chance we # randomly allocated the same id twice usedIds = set() if not directory: wx.MessageBox("Please set directory.", "Error", wx.OK, self) self.dirEntry.SetFocus() return count = 0 for item in watermarks: s = item.strip() if not s: continue basename = item.replace(" ", "-") fn = directory + "/" + fnprefix + '-' + basename + ".pdf" pmldoc = self.sp.generatePML(True) ops = [] # almost-not-there gray ops.append(pml.SetFillGray(0.85)) if common: wm = pml.TextOp(util.cleanInput(common), self.sp.cfg.marginLeft + 20, self.sp.cfg.paperHeight * 0.45, fontsize, pml.BOLD, angle=45) ops.append(wm) wm = pml.TextOp(util.cleanInput(s), self.sp.cfg.marginLeft + 20, self.sp.cfg.paperHeight * 0.6, fontsize, pml.BOLD, angle=45) ops.append(wm) # ...and back to black ops.append(pml.SetFillGray(0.0)) for page in pmldoc.pages: page.addOpsToFront(ops) pmldoc.uniqueId = self.getUniqueId(usedIds) pdfdata = pdf.generate(pmldoc) if not util.writeToFile(fn, pdfdata, self): wx.MessageBox("PDF generation aborted.", "Error", wx.OK, self) return else: count += 1 if count > 0: wx.MessageBox( "Generated %d files in directory %s." % (count, directory), "PDFs generated", wx.OK, self) else: wx.MessageBox("No watermarks specified.", "Error", wx.OK, self)
def OnGenerate(self, event): watermarks = self.itemsEntry.GetValue().split("\n") common = self.commonMark.GetValue() directory = self.dirEntry.GetValue() fontsize = self.markSize.GetValue() fnprefix = self.filenamePrefix.GetValue() watermarks = set(watermarks) # keep track of ids allocated so far, just on the off-chance we # randomly allocated the same id twice usedIds = set() if not directory: wx.MessageBox("Please set directory.", "Error", wx.OK, self) self.dirEntry.SetFocus() return count = 0 for item in watermarks: s = item.strip() if not s: continue basename = item.replace(" ", "-") fn = directory + "/" + fnprefix + '-' + basename + ".pdf" pmldoc = self.sp.generatePML(True) ops = [] # almost-not-there gray ops.append(pml.PDFOp("0.85 g")) if common: wm = pml.TextOp( util.cleanInput(common), self.sp.cfg.marginLeft + 20, self.sp.cfg.paperHeight * 0.45, fontsize, pml.BOLD, angle = 45) ops.append(wm) wm = pml.TextOp( util.cleanInput(s), self.sp.cfg.marginLeft + 20, self.sp.cfg.paperHeight * 0.6, fontsize, pml.BOLD, angle = 45) ops.append(wm) # ...and back to black ops.append(pml.PDFOp("0.0 g")) for page in pmldoc.pages: page.addOpsToFront(ops) pmldoc.uniqueId = self.getUniqueId(usedIds) pdfdata = pdf.generate(pmldoc) if not util.writeToFile(fn, pdfdata, self): wx.MessageBox("PDF generation aborted.", "Error", wx.OK, self) return else: count += 1 if count > 0: wx.MessageBox("Generated %d files in directory %s." % (count, directory), "PDFs generated", wx.OK, self) else: wx.MessageBox("No watermarks specified.", "Error", wx.OK, self)
def trade(self): #set up log noTradeMade = True log = "{0}USDT ".format(self.symbol) #get variables next_hour_closing_price_prediction, current_hour_open_val, last3_real_closing_price = self.getVariables( ) #calculate the correction self.calculateCorrection(next_hour_closing_price_prediction, last3_real_closing_price) #calculate delta delta = (next_hour_closing_price_prediction + self.correction) - current_hour_open_val #get current price trades = self.client.get_recent_trades(symbol=self.symbol + 'USDT', limit=1) curent_price = float(trades[0]['price']) #set the threshold to 0.1% threshold = curent_price * 0.001 #make decision if (delta > threshold) and self.buy: #buy code goes here #set up logs noTradeMade = False log += "Signal: BUY: " #try to buy for all of balance quantity = int(self.balance / curent_price) buySuccess = False while not buySuccess: try: self.client.create_test_order( symbol=self.symbol + 'USDT', side=Client.SIDE_BUY, type=Client.ORDER_TYPE_MARKET, quantity=quantity) buySuccess = True self.assets += quantity self.balance = 0 #we start to look for sell opportinues self.buy = False log += "BALANCE: {0}, {1}: {2}".format( self.balance, self.symbol, self.assets) except BinanceAPIException as e: #market can move realy quick between the time we got the last price and make our order #if this move is uptrend we can get unsefficent balance exception #we will have an exceptin in this case and if we do we decrease the amount we want to buy buySuccess = True log += "Exceptin Raised while tyrig to place a buy order: {0}".format( e.message) if (e.message == "Account has insufficient balance for requested action." ): quantity = quantity - int( quantity * 0.01) #dcrease the buying amount 1% buySuccess = False elif (delta < -threshold) and not self.buy: #sell code goes here #set up log noTradeMade = False log += "Signal: SEl: " try: self.client.create_test_order(symbol=self.symbol + 'USDT', side=Client.SIDE_SELL, type=Client.ORDER_TYPE_MARKET, quantity=self.assets) self.balance += curent_price * self.assets self.assets = 0 #we start to look for sell opportinues self.buy = True log += "BALANCE: {0}, {1}: {2}".format(self.balance, self.symbol, self.assets) except BinanceAPIException as e: log += "Sell order failed skippin this trade.. {0}".format( e.message) if noTradeMade: if self.buy: log += "No BUY signal Found. Skpipping this cycle..." else: log += "No SEL signal Found. Skpipping this cycle..." #write log writeToFile(log, self.symbol)
# Give ScrambleSuit server operators a way to manually issue new session # tickets for out-of-band distribution. if __name__ == "__main__": import argparse parser = argparse.ArgumentParser() parser.add_argument("ip_addr", type=str, help="The IPv4 address of the " "%s server." % const.TRANSPORT_NAME) parser.add_argument("tcp_port", type=int, help="The TCP port of the %s " "server." % const.TRANSPORT_NAME) parser.add_argument("ticket_file", type=str, help="The file, the newly " "issued ticket is written to.") args = parser.parse_args() print "[+] Loading server state file." serverState = state.load() print "[+] Generating new session ticket." masterKey = mycrypto.strongRandom(const.MASTER_KEY_LENGTH) ticket = SessionTicket(masterKey, serverState).issue() print "[+] Writing new session ticket to `%s'." % args.ticket_file tickets = dict() server = IPv4Address('TCP', args.ip_addr, args.tcp_port) tickets[str(server)] = [int(time.time()), masterKey, ticket] util.writeToFile(yaml.dump(tickets), args.ticket_file) print "[+] Success."
def build(): global elements,main_html doc = '<?xml version="1.0" ?> <LinearLayout android:id="@+id/activity_main" android:layout_height="match_parent" android:layout_width="match_parent" android:orientation="vertical" android:paddingBottom="@dimen/activity_vertical_margin" android:paddingLeft="@dimen/activity_horizontal_margin" android:paddingRight="@dimen/activity_horizontal_margin" android:paddingTop="@dimen/activity_vertical_margin" ns1:context="com.example.d4079125.myapplication.MainActivity" xmlns:android="http://schemas.android.com/apk/res/android" xmlns:ns1="http://schemas.android.com/tools"> </LinearLayout>' filepath = "../../AndroidStudioProjects/MyApplication/app/src/main/res/layout/activity_main.xml" doc = ET.ElementTree(ET.fromstring(doc)) #file.close() root = doc.getroot() #elementz = sorted(elements,key=lambda element: element[3]*element[4]) elementx = sorted(elements,key=lambda element: element[2] + element[4]) tree = [] htmlfinal = main_html #html and css for i in range(len(elementx)): e = elementx[i] print(e) margin_top = 0 if i != 0: prev = elementx[i-1] margin_top = e[2] - (prev[2] + prev[4]) else: margin_top = e[2] print('Old', margin_top) margin_top = getClean(margin_top) print('New margin',margin_top) element_type = e[0] x = e[1] y = e[2] w = e[3] h = e[4] id_ = e[5] html = '' if(element_type == 3): html = ImageView(i,margin_top,x,y,w,h,id_) xml = ImageViewXml(i,margin_top,x,y,w,h,id_) xmlFilePath = "../../AndroidStudioProjects/MyApplication/app/src/main/res/layout/activity_main.xml" root = util.appendElementToXML(xmlFilePath,10,root,xml) elif(element_type == 4): html = TextView(i,margin_top,x,y,w,h,id_) xml = TextViewXml(i,margin_top,x,y,w,h,id_) xmlFilePath = "../../AndroidStudioProjects/MyApplication/app/src/main/res/layout/activity_main.xml" root = util.appendElementToXML(xmlFilePath,10,root,xml) elif(element_type == 2): #djdj html = Header(i,margin_top,x,y,w,h,id_) xml1,xml2 = HeaderXml(i,margin_top,x,y,w,h,id_) xmlFilePath = "../../AndroidStudioProjects/MyApplication/app/src/main/res/layout/activity_main.xml" root = util.appendElementToXML(xmlFilePath,10,root,xml1) root = util.appendElementToXML(xmlFilePath,10,root,xml2) elif(element_type == 1): #ded html = EditText(i,margin_top,x,y,w,h,id_) xml = EditTextXml(i,margin_top,x,y,w,h,id_) xmlFilePath = "../../AndroidStudioProjects/MyApplication/app/src/main/res/layout/activity_main.xml" root = util.appendElementToXML(xmlFilePath,10,root,xml) elif(element_type == 0): #djdj html = Button(i,margin_top,x,y,w,h,id_) xml = ButtonXml(i,margin_top,x,y,w,h,id_) xmlFilePath = "../../AndroidStudioProjects/MyApplication/app/src/main/res/layout/activity_main.xml" root = util.appendElementToXML(xmlFilePath,10,root,xml) htmlfinal = htmlfinal + html roughString = ET.tostring(root, 'utf-8') soup=bs(htmlfinal) prettyHTML=soup.prettify() util.writeToFile('./output/output.html',prettyHTML) reparsed = minidom.parseString(roughString)
links = browser.find_elements_by_xpath('//a') getLinksPerCategory(links, category) # Get all the main page links. #Initialize for it browser.get('http://www.news.google.com') links = browser.find_elements_by_xpath('//a') getLinksPerCategory(links, 'HomePage') if len(downloadedLinks) > Constants.MIN_GOOGLELINKS_DAILY: linksToBeWritten = "\n".join(downloadedLinks) directory = os.path.join(Constants.ROOT_FOLDER,Constants.RECOMMENDATION_DIR,Constants.GOOGLE_LINKS_DIR,todayDate) if not os.path.exists(directory): os.makedirs(directory) result = util.writeToFile(linksToBeWritten,os.path.join(directory,Constants.GOOGLE_LINKS_FILE)) if result == True: util.saveSettings(Constants.LAST_GOOGLELINKS_DOWNLOAD, todayDate) util.logger.info("Google links downloaded for ="+todayDate) return result util.logger.error("Google links not downloaded for ="+todayDate) return result def GoogleNews(): downloadedLinks = [] todayDate = util.getTodayDateFolder() lastNewsDownloaded = util.loadSettings(Constants.LAST_GOOGLENEWS_DOWNLOAD) lastLinksDownloaded = util.loadSettings(Constants.LAST_GOOGLELINKS_DOWNLOAD) googleLinksStatus = True