def import_requests(): global __requests global platform if __requests: return __requests if platform.system() == 'Linux': extra = ("Ubuntu users: You may be able to install 'pip'\n" "with 'apt-get install python3-pip' and requests with\n" "'pip3 install --upgrade requests'.\n") elif platform.system() == 'Windows': extra = ("\n\nThe requests package can be installed with\n" "'pip3 install --upgrade requests'.\n\n" "If requests is installed, you may have bits\n" "of 32-bit and 64-bit Python installed.\n" "Consider using control panel to uninstall Python,\n" "delete the Python folder (usually C:\\Python34\\),\n" "and then re-install Python.\n") else: extra = "" print("ERROR: Unable to load the Python 'requests' package.\n" + extra) approval = input( "I can try and install the package automatically using 'pip'.\n" "Try to install 'requests' now (y/n)? ") # Idiot-proofing: take just the first character in case the user typed # 'YES' (upper or lower case) instead of 'Y'. if approval[0:1].lower() != 'y': raise TradeException("Missing package: 'requests'") try: import pip except ImportError as e: import platform raise TradeException( "Python 3.4.2 includes a package manager called 'pip', " "except it doesn't appear to be installed on your system:\n" "{}{}".format(str(e), extra)) from None # Let's use "The most reliable approach, and the one that is fully supported." # Especially since the old way produces an error for me on Python 3.6: # "AttributeError: 'module' object has no attribute 'main'" #pip.main(["install", "--upgrade", "requests"]) subprocess.check_call( [sys.executable, '-m', 'pip', 'install', '--upgrade', 'requests']) try: import requests __requests = requests except ImportError as e: raise TradeException( "The requests module did not install correctly.{}".format( extra)) from None return __requests
def importDataFromFile(tdb, tdenv, path, pricesFh=None, reset=False): """ Import price data from a file on a per-station basis, that is when a new station is encountered, delete any existing records for that station in the database. """ if not pricesFh and not path.exists(): raise TradeException("No such file: {}".format(str(path))) if reset: tdenv.DEBUG0("Resetting price data") with tdb.getDB() as db: db.execute("DELETE FROM StationItem") db.commit() tdenv.DEBUG0("Importing data from {}".format(str(path))) processPricesFile( tdenv, db=tdb.getDB(), pricesPath=path, pricesFh=pricesFh, ) # If everything worked, we may need to re-build the prices file. if path != tdb.pricesPath: regeneratePricesFile(tdb, tdenv)
def import_requests(): global __requests global platform if __requests: return __requests if platform.system() == 'Linux': extra = ("\nUbuntu users: You may be able to install 'pip' " "with 'apt-get install python3-pip' and requets with " "'pip3 install --upgrade requests'.") elif platform.system() == 'Windows': extra = ("\nThis often happens if you have bits of 32-bit and " "64-bit Python installed.\n" "Consider using control panel to uninstall Python, " "delete the Python folder (usually C:\\Python34\\) " "and re-install Python.") else: extra = "" print("ERROR: Unable to load the Python 'requests' package." + extra + "\n") approval = input( "I can try and install the package automatically using 'pip'.\n" "Try to install 'requests' now (y/n)? ") if approval.lower() != 'y': raise TradeException("Missing package: 'requests'") try: import pip except ImportError as e: import platform raise TradeException( "Python 3.4.2 includes a package manager called 'pip', " "except it doesn't appear to be installed on your system:\n" "{}{}".format(str(e), extra)) from None pip.main(["install", "--upgrade", "requests"]) try: import requests __requests = requests except ImportError as e: raise TradeException( "The requests module did not install correctly.{}".format( extra)) from None return __requests
def render(results, cmdenv, tdb): from formatting import RowFormat, ColumnFormat if not results or not results.rows: raise TradeException("No data found") # Compare system names so we can tell longestNamed = max(results.rows, key=lambda row: len(row.station.name())) longestNameLen = len(longestNamed.station.name()) rowFmt = RowFormat().append( ColumnFormat("Station", '<', longestNameLen, key=lambda row: row.station.name())) if cmdenv.quiet < 2: if cmdenv.nearSystem: rowFmt.addColumn('DistLy', '>', 6, '.2f', key=lambda row: row.dist) rowFmt.append( ColumnFormat("Age/days", '>', '8', '.2f', key=lambda row: row.age) ).append( ColumnFormat("StnLs", '>', '10', key=lambda row: row.station.distFromStar()) ).append( ColumnFormat("Pad", '>', '3', key=lambda row: \ TradeDB.padSizes[row.station.maxPadSize]) ) if not cmdenv.quiet: heading, underline = rowFmt.heading() print(heading, underline, sep='\n') for row in results.rows: print(rowFmt.format(row))
def exportTableToFile(tdb, tdenv, tableName, dataPath=None): """ Generate the csv file for tableName in dataPath returns lineCount, exportPath """ # path for csv file dataPath = dataPath or tdb.dataPath if not dataPath.is_dir(): raise TradeException("Save location '{}' not found.".format(str(dataPath))) # connect to the database conn = tdb.getDB() conn.row_factory = sqlite3.Row # prefix for unique/ignore columns uniquePfx = "unq:" ignorePfx = "!" # create CSV files exportPath = (dataPath / Path(tableName)).with_suffix(".csv") tdenv.DEBUG0("Export Table '{table}' to '{file}'".format( table=tableName, file=str(exportPath) )) lineCount = 0 with exportPath.open("w", encoding='utf-8', newline="\n") as exportFile: exportOut = csv.writer(exportFile, delimiter=",", quotechar="'", doublequote=True, quoting=csv.QUOTE_NONNUMERIC, lineterminator="\n") cur = conn.cursor() # check for single PRIMARY KEY pkCount = 0 for columnRow in cur.execute("PRAGMA table_info('%s')" % tableName): # count the columns of the primary key if columnRow['pk'] > 0: pkCount += 1 # build column list columnList = [] for columnRow in cur.execute("PRAGMA table_info('%s')" % tableName): # if there is only one PK column, ignore it if columnRow['pk'] > 0 and pkCount == 1: continue columnList.append(columnRow) if len(columnList) == 0: raise TradeException("No columns to export for table '{}'.".format(tableName)) # reverse the first two columns for some tables if tableName in reverseList: columnList[0], columnList[1] = columnList[1], columnList[0] # initialize helper lists csvHead = [] stmtColumn = [] stmtTable = [ tableName ] stmtOrder = [] unqIndex = getUniqueIndex(conn, tableName) keyList = getFKeyList(conn, tableName) tdenv.DEBUG1('UNIQUE: ' + ", ".join(unqIndex)) # iterate over all columns of the table for col in columnList: # check if the column is a foreign key key = search_keyList(keyList, col['name']) if key: # make the join statement keyStmt = buildFKeyStmt(conn, tableName, key) for keyRow in keyStmt: tdenv.DEBUG1('FK-Stmt: {}'.format(list(keyRow))) # is the join for the same table if keyRow['table'] == tableName: csvPfx = '' joinStmt = 'USING({})'.format(keyRow['joinColumn']) else: # this column must be ignored by the importer, it's only # used to resolve the FK relation csvPfx = ignorePfx joinStmt = 'ON {}.{} = {}.{}'.format(keyRow['table'], keyRow['joinColumn'], keyRow['joinTable'], keyRow['joinColumn']) if col['name'] in unqIndex: # column is part of an unique index csvPfx = uniquePfx + csvPfx csvHead += [ "{}{}@{}.{}".format(csvPfx, keyRow['column'], keyRow['joinTable'], keyRow['joinColumn']) ] stmtColumn += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ] if col['notnull']: stmtTable += [ 'INNER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ] else: stmtTable += [ 'LEFT OUTER JOIN {} {}'.format(keyRow['joinTable'], joinStmt) ] stmtOrder += [ "{}.{}".format(keyRow['joinTable'], keyRow['column']) ] else: # ordinary column if col['name'] in unqIndex: # column is part of an unique index csvHead += [ uniquePfx + col['name'] ] stmtOrder += [ "{}.{}".format(tableName, col['name']) ] else: csvHead += [ col['name'] ] stmtColumn += [ "{}.{}".format(tableName, col['name']) ] # build the SQL statement sqlStmt = "SELECT {} FROM {}".format(",".join(stmtColumn)," ".join(stmtTable)) if len(stmtOrder) > 0: sqlStmt += " ORDER BY {}".format(",".join(stmtOrder)) tdenv.DEBUG1("SQL: %s" % sqlStmt) # finally generate the csv file # write header line without quotes exportFile.write("{}\n".format(",".join(csvHead))) for line in cur.execute(sqlStmt): lineCount += 1 tdenv.DEBUG2("{count}: {values}".format(count=lineCount, values=list(line))) exportOut.writerow(list(line)) tdenv.DEBUG1("{count} {table}s exported".format(count=lineCount, table=tableName)) # Update the DB file so we don't regenerate it. os.utime(str(tdb.dbPath)) return lineCount, exportPath
def render(results, cmdenv, tdb): if not results or not results.rows: raise TradeException("No systems found within {}ly of {}.".format( results.summary.ly, results.summary.near.name())) # Compare system names so we can tell maxSysLen = max_len(results.rows, key=lambda row: row.system.name()) sysRowFmt = RowFormat().append( ColumnFormat("System", '<', maxSysLen, key=lambda row: row.system.name())).append( ColumnFormat("Dist", '>', '7', '.2f', key=lambda row: row.dist)) showStations = cmdenv.detail if showStations: maxStnLen = max_len(chain.from_iterable(row.stations for row in results.rows), key=lambda row: row.station.dbname) maxLsLen = max_len(chain.from_iterable(row.stations for row in results.rows), key=lambda row: row.station.distFromStar()) maxLsLen = max(maxLsLen, 5) stnRowFmt = RowFormat(prefix=' / ').append( ColumnFormat("Station", '.<', maxStnLen + 2, key=lambda row: row.station.dbname) ).append( ColumnFormat("StnLs", '>', maxLsLen, key=lambda row: row.station.distFromStar()) ).append( ColumnFormat("Age/days", '>', 7, key=lambda row: row.age) ).append( ColumnFormat("Mkt", '>', '3', key=lambda row: \ TradeDB.marketStates[row.station.market]) ).append( ColumnFormat("BMk", '>', '3', key=lambda row: \ TradeDB.marketStates[row.station.blackMarket]) ).append( ColumnFormat("Shp", '>', '3', key=lambda row: \ TradeDB.marketStates[row.station.shipyard]) ).append( ColumnFormat("Out", '>', '3', key=lambda row: \ TradeDB.marketStates[row.station.outfitting]) ).append( ColumnFormat("Arm", '>', '3', key=lambda row: \ TradeDB.marketStates[row.station.rearm]) ).append( ColumnFormat("Ref", '>', '3', key=lambda row: \ TradeDB.marketStates[row.station.refuel]) ).append( ColumnFormat("Rep", '>', '3', key=lambda row: \ TradeDB.marketStates[row.station.repair]) ).append( ColumnFormat("Pad", '>', '3', key=lambda row: \ TradeDB.padSizes[row.station.maxPadSize]) ).append( ColumnFormat("Plt", '>', '3', key=lambda row: \ TradeDB.planetStates[row.station.planetary]) ) if cmdenv.detail > 1: stnRowFmt.append( ColumnFormat("Itms", ">", 4, key=lambda row: row.station.itemCount)) cmdenv.DEBUG0( "Systems within {ly:<5.2f}ly of {sys}.\n", sys=results.summary.near.name(), ly=results.summary.ly, ) if not cmdenv.quiet: heading, underline = sysRowFmt.heading() if showStations: print(heading) heading, underline = stnRowFmt.heading() print(heading, underline, sep='\n') for row in results.rows: print(sysRowFmt.format(row)) for stnRow in row.stations: print(stnRowFmt.format(stnRow))
def __init__(self, tdb, tdenv=None, fit=None, items=None): """ Constructs the TradeCalc object and loads sell/buy data. Parameters: tdb The TradeDB() object to use to access data, tdenv [optional] TradeEnv() that controls behavior, fit [optional] Lets you specify a fitting function, items [optional] Iterable [itemID or Item()] that restricts loading, TradeEnv options: tdenv.avoidItems Iterable of [Item] that prevents items being loaded tdenv.maxAge Maximum age in days of data that gets loaded tdenv.supply Require at least this much supply to load an item tdenv.demand Require at least this much demand to load an item """ if not tdenv: tdenv = tdb.tdenv self.tdb = tdb self.tdenv = tdenv self.defaultFit = fit or self.fastFit if "BRUTE_FIT" in os.environ: self.defaultFit = self.bruteForceFit minSupply = self.tdenv.supply or 0 minDemand = self.tdenv.demand or 0 db = tdb.getDB() wheres, binds = [], [] if tdenv.maxAge: maxDays = datetime.timedelta(days=tdenv.maxAge) cutoff = datetime.datetime.now() - maxDays wheres.append("(modified >= ?)") binds.append(str(cutoff.replace(microsecond=0))) if tdenv.avoidItems or items: avoidItemIDs = set(item.ID for item in tdenv.avoidItems) loadItems = items or tdb.itemByID.values() loadItemIDs = set() for item in loadItems: ID = item if isinstance(item, int) else item.ID if ID not in avoidItemIDs: loadItemIDs.add(str(ID)) if not loadItemIDs: raise TradeException("No items to load.") loadItemIDs = ",".join(str(ID) for ID in loadItemIDs) wheres.append("(item_id IN ({}))".format(loadItemIDs)) demand = self.stationsBuying = defaultdict(list) supply = self.stationsSelling = defaultdict(list) whereClause = " AND ".join(wheres) or "1" lastStnID, stnAppend = 0, None dmdCount, supCount = 0, 0 stmt = """ SELECT station_id, item_id, strftime('%s', modified), demand_price, demand_units, demand_level, supply_price, supply_units, supply_level FROM StationItem WHERE {where} """.format(where=whereClause) tdenv.DEBUG1("TradeCalc loading StationItem values") tdenv.DEBUG2("sql: {}, binds: {}", stmt, binds) cur = db.execute(stmt, binds) now = int(time.time()) for (stnID, itmID, timestamp, dmdCr, dmdUnits, dmdLevel, supCr, supUnits, supLevel) in cur: if stnID != lastStnID: dmdAppend = demand[stnID].append supAppend = supply[stnID].append lastStnID = stnID try: ageS = now - int(timestamp) except TypeError: raise BadTimestampError(self.tdb, stnID, itmID, timestamp) if dmdCr > 0: if not minDemand or dmdUnits >= minDemand: dmdAppend((itmID, dmdCr, dmdUnits, dmdLevel, ageS)) dmdCount += 1 if supCr > 0 and supUnits: if not minSupply or supUnits >= minSupply: supAppend((itmID, supCr, supUnits, supLevel, ageS)) supCount += 1 tdenv.DEBUG0("Loaded {} buys, {} sells".format(dmdCount, supCount))
def download( tdenv, url, localFile, headers=None, backup=False, shebang=None, chunkSize=4096, ): """ Fetch data from a URL and save the output to a local file. Returns the response headers. tdenv: TradeEnv we're working under url: URL we're fetching (http, https or ftp) localFile: Name of the local file to open. headers: dict() of additional HTTP headers to send shebang: function to call on the first line """ requests = import_requests() tdenv.NOTE("Requesting {}".format(url)) req = requests.get(url, headers=headers or None, stream=True) req.raise_for_status() encoding = req.headers.get('content-encoding', 'uncompress') length = req.headers.get('content-length', None) transfer = req.headers.get('transfer-encoding', None) if transfer != 'chunked': # chunked transfer-encoding doesn't need a content-length if length is None: raise Exception( "Remote server replied with invalid content-length.") length = int(length) if length <= 0: raise TradeException( "Remote server gave an empty response. Please try again later." ) if tdenv.detail > 1: if length: tdenv.NOTE("Downloading {} {}ed data", makeUnit(length), encoding) else: tdenv.NOTE("Downloading {} {}ed data", transfer, encoding) tdenv.DEBUG0(str(req.headers).replace("{", "{{").replace("}", "}}")) # Figure out how much data we have if length and not tdenv.quiet: progBar = pbar.Progress(length, 20) else: progBar = None actPath = Path(localFile) tmpPath = Path("tmp/{}.dl".format(actPath.name)) histogram = deque() fetched = 0 lastTime = started = time.time() spinner, spinners = 0, [ '. ', '.. ', '... ', ' ... ', ' ...', ' ..', ' .' ] with tmpPath.open("wb") as fh: for data in req.iter_content(chunk_size=chunkSize): fh.write(data) fetched += len(data) if shebang: bangLine = data.decode().partition("\n")[0] tdenv.DEBUG0("Checking shebang of {}", bangLine) shebang(bangLine) shebang = None if progBar: now = time.time() deltaT = max(now - lastTime, 0.001) lastTime = now if len(histogram) >= 15: histogram.popleft() histogram.append(len(data) / deltaT) progBar.increment( len(data), postfix=lambda value, goal: \ " {:>7s} [{:>7s}/s] {:>3.0f}% {:1s}".format( makeUnit(value), makeUnit(sum(histogram) / len(histogram)), (fetched * 100. / length), spinners[spinner] ) ) if deltaT > 0.200: spinner = (spinner + 1) % len(spinners) tdenv.DEBUG0("End of data") if not tdenv.quiet: if progBar: progBar.clear() elapsed = (time.time() - started) or 1 tdenv.NOTE("Downloaded {} of {}ed data {}/s", makeUnit(fetched), encoding, makeUnit(fetched / elapsed)) # Swap the file into place if backup: bakPath = Path(localFile + ".bak") if bakPath.exists(): bakPath.unlink() if actPath.exists(): actPath.rename(localFile + ".bak") if actPath.exists(): actPath.unlink() tmpPath.rename(actPath) req.close() return req.headers