Beispiel #1
0
    def __init__(self, tdb, tdenv):
        super().__init__(tdb, tdenv)

        logDirName = os.getenv(self.LOGDIR_NAME, None)
        if not logDirName:
            raise PluginException("Environment variable '{}' not set "
                                  "(see 'README.md' for help).".format(
                                      self.LOGDIR_NAME))
        tdenv.NOTE("{}={}", self.LOGDIR_NAME, logDirName)

        self.logPath = pathlib.Path(logDirName)
        if not self.logPath.is_dir():
            raise PluginException("{}: is not a directory.".format(
                str(self.logPath)))
Beispiel #2
0
def init_plugin(name, module=None, optional=False):
    """
    Installs a plug-in specified by the supplied name (or name and module).

    arguments:
    name -- a name of the plugin
    module -- if supplied then name->module inference is skipped and init_plugin
              uses this module as a source of the plug-in
    optional -- if True then the module is installed only if it is configured
    """
    if not optional or has_configured_plugin(name):
        try:
            if module is None:
                if not settings.contains('plugins', name):
                    raise PluginException(
                        'Missing configuration for the "%s" plugin' % name)
                plugin_module = plugins.load_plugin_module(
                    settings.get('plugins', name)['module'])
            else:
                plugin_module = module
            plugins.install_plugin(name, plugin_module, settings)
        except ImportError as e:
            logging.getLogger(__name__).warn(
                'Plugin [%s] configured but following error occurred: %r' %
                (name, e))
        except (PluginException, Exception) as e:
            logging.getLogger(__name__).critical(
                'Failed to initiate plug-in %s: %s' % (name, e))
            raise e
    else:
        plugins.add_missing_plugin(name)
Beispiel #3
0
    def getJournalDirList(self):
        """
        get all Journal files
        """
        tdenv = self.tdenv
        optDate = self.getOption("date")
        logLast = self.getOption("last")

        logDate = None
        if isinstance(optDate, str):
            fmtLen = len(optDate)
            fmtDate = self.DATE_FORMATS.get(fmtLen, None)
            if fmtDate:
                tdenv.DEBUG0("date format: {}", fmtDate[0])
                try:
                    logDate = datetime.strptime(optDate, fmtDate[0])
                except ValueError:
                    logDate = None
                    pass
            if logDate:
                globDat = logDate.strftime(fmtDate[2])
                self.logGlob = "Journal." + globDat + "*.log"
                tdenv.NOTE("using date: {}", logDate.strftime(fmtDate[0]))
            else:
                raise PluginException(
                    "Wrong date '{}' format. Must be in the form of '{}'".
                    format(
                        optDate,
                        "','".join([d[1]
                                    for d in self.DATE_FORMATS.values()])))
        tdenv.NOTE("using pattern: {}", self.logGlob)

        for filePath in sorted(self.logPath.glob(self.logGlob)):
            tdenv.DEBUG0("logfile: {}", str(filePath))
            self.filePathList.append(filePath)

        listLen = len(self.filePathList)
        if listLen == 0:
            raise PluginException("No journal file found.")
        elif listLen == 1:
            tdenv.NOTE("Found one journal file.")
        else:
            tdenv.NOTE("Found {} journal files.", listLen)

        if logLast and listLen > 1:
            del self.filePathList[:-1]
Beispiel #4
0
    def getLogDirList(self):
        """
        get all netLog files
        """
        tdenv = self.tdenv
        logDate = self.getOption("date")
        logLast = self.getOption("last")

        self.logDate = None
        if isinstance(logDate, str):
            self.fmtDate = len(logDate)
            fmt = self.DATE_FORMATS.get(self.fmtDate, (None, None, None))
            if fmt[0]:
                tdenv.DEBUG0("date format: {}", fmt[0])
                try:
                    self.logDate = datetime.strptime(logDate, fmt[0])
                except ValueError:
                    pass
            if self.logDate:
                globDat = self.logDate.strftime(fmt[2])
                self.logGlob = "netLog." + globDat + "*.log"
            else:
                raise PluginException(
                    "Wrong date '{}' format. Must be in the form of '{}'".
                    format(
                        logDate,
                        "','".join([d[1]
                                    for d in self.DATE_FORMATS.values()])))
            tdenv.NOTE("using date: {}", self.logDate.strftime(fmt[0]))
        tdenv.NOTE("using pattern: {}", self.logGlob)

        self.filePathList = []
        for filePath in sorted(self.logPath.glob(self.logGlob)):
            tdenv.DEBUG0("logfile: {}", str(filePath))
            self.filePathList.append(filePath)

        if logLast and len(self.filePathList) > 1:
            del self.filePathList[:-1]

        listLen = len(self.filePathList)
        if listLen == 0:
            raise PluginException("No logfile found.")
        elif listLen == 1:
            tdenv.NOTE("Found one logfile.")
        else:
            tdenv.NOTE("Found {} logfiles.", listLen)
    def download_prices(self, lastRunDays):
        """ Figure out which file to download and fetch it. """

        # Argument checking
        use3h = 1 if self.getOption("use3h") else 0
        use2d = 1 if self.getOption("use2d") else 0
        usefull = 1 if self.getOption("usefull") else 0
        if use3h + use2d + usefull > 1:
            raise PluginException(
                "Only one of use3h/use2d/usefull can be used at once.")
        if self.getOption("skipdl"):
            if (use3h or use2d or usefull):
                raise PluginException(
                    "use3h/use2d/usefull has no effect with --opt=skipdl")
            return

        # Overrides
        if use3h:
            lastRunDays = 0.01
        elif use2d:
            lastRunDays = 1.0
        elif usefull:
            lastRunDays = 3.0

        # Use age/options to determine which file
        if lastRunDays < 3 / 24:
            priceFile = "prices-3h.asp"
        elif lastRunDays < 1.9:
            priceFile = "prices-2d.asp"
        else:
            priceFile = "prices.asp"

        # Fetch!
        transfers.download(
            self.tdenv,
            BASE_URL + priceFile,
            self.filename,
            shebang=lambda line: self.check_shebang(line, True),
        )
 def check_shebang(self, line, checkAge):
     m = re.match(r'^#!\s*trade.py\s*import\s*.*\s*--timestamp\s*"([^"]+)"',
                  line)
     if not m:
         raise PluginException(
             "Data is not Maddavo's prices list format: " + line)
     self.importDate = m.group(1)
     if checkAge and not self.getOption("force"):
         if self.importDate <= self.prevImportDate:
             raise SystemExit("Local data is already current [{}].".format(
                 self.importDate))
         if self.tdenv.detail:
             print("New timestamp: {}, Old timestamp: {}".format(
                 self.importDate, self.prevImportDate))
Beispiel #7
0
    def download_fdevids(self):
        """
            Download the current data from EDCD,
            see https://github.com/EDCD/FDevIDs
        """
        tdb, tdenv = self.tdb, self.tdenv

        BASE_URL = "https://raw.githubusercontent.com/EDCD/FDevIDs/master/"
        downloadList = []
        if self.getOption("shipyard"):
            downloadList.append(
                ('FDevShipyard', 'shipyard.csv', self.process_fdevids_table)
            )
        if self.getOption("outfitting"):
            downloadList.append(
                ('FDevOutfitting', 'outfitting.csv', self.process_fdevids_table)
            )
        if self.getOption("commodity"):
            downloadList.append(
                ('FDevCommodity', 'commodity.csv', self.process_fdevids_items)
            )

        if len(downloadList) == 0:
            tdenv.NOTE("I don't know what do to, give me some options!")
            return

        optLocal = self.getOption("local")
        for tableName, fileEDCD, callMeBack in downloadList:
            localPath = tdb.dataPath / pathlib.Path(tableName).with_suffix(".csv")
            if optLocal:
                if not localPath.exists():
                    raise PluginException(
                        "CSV-file '{}' not found.".format(str(localPath))
                    )
            else:
                transfers.download(
                    tdenv,
                    BASE_URL + fileEDCD,
                    str(localPath),
                )
            if callMeBack:
                callMeBack(localPath, tableName)
    def import_prices(self):
        """ Download and import data price data """

        tdb, tdenv = self.tdb, self.tdenv

        # It takes a while to download these files, so we want
        # to record the start time before we download. What we
        # care about is when we downloaded relative to when the
        # files were previously generated.
        startTime = time.time()

        prevImportDate, lastRunDays = self.load_timestamp()
        self.prevImportDate = prevImportDate

        self.download_prices(lastRunDays)
        if tdenv.download:
            return

        # Scan the file for the latest data.
        firstDate = None
        newestDate = prevImportDate
        numNewLines = 0
        minLen = len(prevImportDate) + 10
        dateRe = ImportPlugin.dateRe
        lastStn = None
        updatedStations = set()
        tdenv.DEBUG0("Reading prices data")
        with open(self.filename, "rUb") as fh:
            # skip the shebang.
            firstLine = fh.readline().decode(encoding="utf-8")
            self.check_shebang(firstLine, False)
            importDate = self.importDate

            lineNo = 0
            while True:
                lineNo += 1
                try:
                    line = next(fh)
                except StopIteration:
                    break
                try:
                    line = line.decode(encoding="utf-8")
                except UnicodeDecodeError as e:
                    try:
                        line = line.decode(encoding="latin1")
                        line = line.encode("utf-8")
                        line = line.decode()
                    except UnicodeDecodeError:
                        raise DecodingError(
                            "{} line {}: "
                            "Invalid (unrecognized, non-utf8) character "
                            "sequence: {}\n{}".format(
                                self.filename,
                                lineNo,
                                str(e),
                                line,
                            )) from None
                    raise DecodingError("{} line {}: "
                                        "Invalid (latin1, non-utf8) character "
                                        "sequence:\n{}".format(
                                            self.filename,
                                            lineNo,
                                            line,
                                        ))
                if line.startswith('@'):
                    lastStn = line[2:line.find('#')].strip()
                    continue
                if not line.startswith(' ') or len(line) < minLen:
                    continue
                m = dateRe.search(line)
                if not m:
                    continue
                date = m.group(1) + ' ' + m.group(2)
                if not firstDate or date < firstDate:
                    firstDate = date
                if date > prevImportDate:
                    updatedStations.add(lastStn)
                    numNewLines += 1
                    if date > newestDate:
                        newestDate = date
                        if date > importDate:
                            raise PluginException(
                                "Station {} has suspicious date: {} "
                                "(newer than the import?)".format(
                                    lastStn, date))

        if numNewLines == 0:
            tdenv.NOTE("No new price entries found.")

        forceParse = self.getOption("force") or self.getOption("skipdl")
        if numNewLines > 0 or forceParse:
            if tdenv.detail:
                print("Date of last import   : {}\n"
                      "Timestamp of import   : {}\n"
                      "Oldest update in file : {}\n"
                      "Newest update in file : {}\n"
                      "Number of new entries : {}\n".format(
                          prevImportDate,
                          importDate,
                          firstDate,
                          newestDate,
                          numNewLines,
                      ))

            numStationsUpdated = len(updatedStations)
            if not tdenv.quiet and numStationsUpdated:
                if len(updatedStations) > 12 and tdenv.detail < 2:
                    updatedStations = list(updatedStations)[:10] + ["..."]
                tdenv.NOTE("{} {} updated:\n{}", numStationsUpdated,
                           "stations" if numStationsUpdated > 1 else "station",
                           ', '.join(updatedStations))

            cache.importDataFromFile(
                tdb,
                tdenv,
                pathlib.Path(self.filename),
            )

        self.save_timestamp(importDate, startTime)
    def parseJournalDirList(self):
        """
        parse Journal files
        see: https://forums.frontier.co.uk/showthread.php/275151-Commanders-log-manual-and-data-sample
        """
        tdenv = self.tdenv

        logSysList = {}
        stnSysList = {}
        blkStnList = []
        for filePath in self.filePathList:
            tdenv.NOTE("parsing '{}'", filePath.name)
            aktStation = False
            inMultiCrew = False
            sysCount = stnCount = blkCount = 0
            with filePath.open() as logFile:
                lineCount = 0
                statHeader = True
                for line in logFile:
                    lineCount += 1
                    if inMultiCrew:
                        # ignore all events in multicrew
                        continue
                    try:
                        # parse the json-event-line of the journal
                        event = json.loads(line)
                        logDate = datetime.strptime(
                            event["timestamp"],
                            "%Y-%m-%dT%H:%M:%SZ").replace(tzinfo=timezone.utc)
                        if statHeader:
                            # check the first line
                            statHeader = False
                            if event["event"] == "Fileheader":
                                if "beta" in event["gameversion"].lower():
                                    # don't parse data from beta versions
                                    tdenv.NOTE("Ignoring Beta-Version.")
                                    break
                                # ignore the header-event
                                continue
                            else:
                                # don't stop parsing if it's not the header-line
                                tdenv.WARN(
                                    "Doesn't seem do be a FDEV Journal file")
                        if event["event"] == "JoinACrew":
                            inMultiCrew = True
                        if event["event"] == "QuitACrew":
                            inMultiCrew = False
                        if event["event"] == "FSDJump":
                            sysCount += 1
                            sysDate = logDate
                            sysName = event["StarSystem"]
                            sysPosA = event["StarPos"]
                            sysPosX, sysPosY, sysPosZ = sysPosA[0], sysPosA[
                                1], sysPosA[2]
                            sysPosX = snapToGrid32(sysPosX)
                            sysPosY = snapToGrid32(sysPosY)
                            sysPosZ = snapToGrid32(sysPosZ)
                            tdenv.DEBUG0("  SYSTEM: {} {} {} {} {}", sysDate,
                                         sysName, sysPosX, sysPosY, sysPosZ)
                            logSysList[sysName] = (sysPosX, sysPosY, sysPosZ,
                                                   sysDate)
                        if event["event"] == "Location":
                            if event.get("Docked", False):
                                event["event"] = "Docked"
                                tdenv.DEBUG0(
                                    "   EVENT: Changed Location to Docked")
                        if event["event"] == "Docked":
                            stnCount += 1
                            sysName = event["StarSystem"]
                            stnList = stnSysList.get(sysName, None)
                            if not stnList:
                                stnList = stnSysList[sysName] = {}
                            stnDate = logDate
                            stnName = event["StationName"]
                            lsFromStar = event.get("DistFromStarLS", 0)
                            if lsFromStar > 0:
                                lsFromStar = int(lsFromStar + 0.5)
                            stnType = event.get("StationType", None)
                            if stnType:
                                # conclusions from the stationtype
                                stnPlanet = "Y" if stnType == "SurfaceStation" else "N"
                                stnPadSize = "M" if stnType.startswith(
                                    "Outpost") else "L"
                            else:
                                stnPlanet = "?"
                                stnPadSize = "?"
                            tdenv.DEBUG0(
                                " STATION: {} {}/{} {}ls Plt:{} Pad:{}",
                                stnDate, sysName, stnName, lsFromStar,
                                stnPlanet, stnPadSize)
                            stnList[stnName] = (lsFromStar, stnPlanet,
                                                stnPadSize, stnDate)
                            aktStation = True
                            sysPosA = event.get("StarPos", None)
                            if sysPosA:
                                # we got system data inside a docking event
                                # use it (changed Location or maybe EDDN capture)
                                sysCount += 1
                                sysDate = logDate
                                sysPosX, sysPosY, sysPosZ = sysPosA[
                                    0], sysPosA[1], sysPosA[2]
                                sysPosX = snapToGrid32(sysPosX)
                                sysPosY = snapToGrid32(sysPosY)
                                sysPosZ = snapToGrid32(sysPosZ)
                                tdenv.DEBUG0("  SYSTEM: {} {} {} {} {}",
                                             sysDate, sysName, sysPosX,
                                             sysPosY, sysPosZ)
                                logSysList[sysName] = (sysPosX, sysPosY,
                                                       sysPosZ, sysDate)
                        if event["event"] == "MarketSell" and aktStation:
                            # check for BlackMarket
                            if event.get("BlackMarket", False):
                                stnBlackMarket = (sysName, stnName)
                                if stnBlackMarket not in blkStnList:
                                    tdenv.DEBUG0("B/MARKET: {}/{}", sysName,
                                                 stnName)
                                    blkCount += 1
                                    blkStnList.append(stnBlackMarket)
                        if event["event"] == "Undocked":
                            aktStation = False
                    except:
                        raise PluginException(
                            "Something wrong with line {}.".format(lineCount))

            tdenv.NOTE(
                "Found {} System{}, {} Station{} and {} BlackMarket{}.",
                sysCount,
                "" if sysCount == 1 else "s",
                stnCount,
                "" if stnCount == 1 else "s",
                blkCount,
                "" if blkCount == 1 else "s",
            )
        self.sysList = logSysList
        self.stnList = stnSysList
        self.blkList = blkStnList
Beispiel #10
0
    def parseLogDirList(self):
        """
        parse netLog files
        """
        # HEADER: 16-07-02-00:18 Mitteleuropäische Sommerzeit  (22:18 GMT) - part 1
        # SYSTEM: {00:20:24} System:"Caelinus" StarPos:(0.188,-18.625,52.063)ly  NormalFlight
        # or (since 2.3)
        # HEADER: ============================================
        # HEADER: Logs/netLog.170430120645.01.log (part 1)
        # HEADER: 2017-04-30 12:06 Mitteleuropäische Sommerzeit
        # HEADER: ============================================
        # SYSTEM: {10:13:33GMT 407.863s} System:"Huokang" StarPos:(-12.188,35.469,-25.281)ly  NormalFlight
        tdb, tdenv = self.tdb, self.tdenv
        optShow = self.getOption("show")

        oldHeadRegEx = re.compile(
            "^(?P<headDateTime>\d\d-\d\d-\d\d-\d\d:\d\d)\s+(?P<headTZName>.*[^\s])\s+(?P<headTimeGMT>\(.*GMT\))"
        )
        newHeadRegEx = re.compile(
            "^(?P<headDateTime>\d\d\d\d-\d\d-\d\d\s+\d\d:\d\d)\s+(?P<headTZName>.*[^\s])"
        )

        sysRegEx = re.compile(
            '^\{[^\}]+\}\s+System:"(?P<sysName>[^"]+)".*StarPos:\((?P<sysPos>[^)]+)\)ly'
        )
        dateRegEx = re.compile('^\{(?P<logTime>\d\d:\d\d:\d\d)')

        def calcSeconds(h=0, m=0, s=0):
            return 3600 * h + 60 * m + s

        sysCount = 0
        logSysList = {}
        for filePath in self.filePathList:
            tdenv.NOTE("parsing '{}'", filePath.name)
            oldCount = sysCount
            with filePath.open() as logFile:
                headDate, headMatch = None, None
                lineCount = 0
                statHeader = True
                for line in logFile:
                    lineCount += 1
                    line = line.strip('\r\n')
                    if statHeader:
                        # parse header line to get the date and timezone
                        tdenv.DEBUG0(
                            " HEADER: {}",
                            line.replace("{", "{{").replace("}", "}}"))
                        if lineCount == 1:
                            # old format
                            headMatch = oldHeadRegEx.match(line)
                            timeFormat = '%y-%m-%d-%H:%M'
                        if lineCount == 3:
                            # new format since 2.3
                            headMatch = newHeadRegEx.match(line)
                            timeFormat = '%Y-%m-%d %H:%M'
                        if headMatch:
                            headDate = headMatch.group('headDateTime')
                            headTZName = headMatch.group('headTZName')
                            if headTZName == _time.tzname[1]:
                                # daylight saving time
                                headTZInfo = timedelta(seconds=-_time.altzone)
                            else:
                                # normal time
                                headTZInfo = timedelta(seconds=-_time.timezone)
                            tdenv.DEBUG0(" HEADER: Date {}".format(headDate))
                            tdenv.DEBUG0(
                                " HEADER: TZInfo {}".format(headTZInfo))
                            try:
                                # convert it into something useable
                                headDate = datetime.fromtimestamp(
                                    _time.mktime(
                                        _time.strptime(headDate, timeFormat)),
                                    timezone(headTZInfo))
                            except:
                                headDate = None
                                pass
                        if not headDate:
                            if lineCount > 3:
                                raise PluginException(
                                    "Doesn't seem do be a FDEV netLog file")
                        else:
                            statHeader = False
                            if lineCount == 3:
                                # new format since 2.3, switch to UTC
                                headDate = headDate.astimezone()
                            tdenv.DEBUG0("   DATE: {}", headDate)
                            headSecs = calcSeconds(headDate.hour,
                                                   headDate.minute,
                                                   headDate.second)
                            lastDate = logDate = headDate
                            lastSecs = logSecs = headSecs
                    else:
                        tdenv.DEBUG1(
                            "LOGLINE: {}",
                            line.replace("{", "{{").replace("}", "}}"))
                        # check every line for new time to enhance the lastDate
                        # use time difference because of different timezone usage
                        logTimeMatch = dateRegEx.match(line)
                        if logTimeMatch:
                            h, m, s = logTimeMatch.group('logTime').split(":")
                            logSecs = calcSeconds(int(h), int(m), int(s))
                            logDiff = logSecs - lastSecs
                            if logDiff < 0:
                                # it's a new day
                                logDiff += 86400
                            logDate = lastDate + timedelta(seconds=logDiff)
                            tdenv.DEBUG1("LOGDATE: {}", logDate)

                        sysMatch = sysRegEx.match(line)
                        if sysMatch:
                            # we found a system, yeah
                            sysDate = logDate
                            sysName = sysMatch.group('sysName')
                            sysPos = sysMatch.group('sysPos')
                            sysPosX, sysPosY, sysPosZ = sysPos.split(',')
                            sysPosX = snapToGrid32(sysPosX)
                            sysPosY = snapToGrid32(sysPosY)
                            sysPosZ = snapToGrid32(sysPosZ)
                            tdenv.DEBUG0(" SYSTEM: {} {} {} {} {}", sysDate,
                                         sysName, sysPosX, sysPosY, sysPosZ)
                            logSysList[sysName] = (sysPosX, sysPosY, sysPosZ,
                                                   sysDate)

                        lastDate = logDate
                        lastSecs = logSecs
            sysCount = len(logSysList)
            tdenv.NOTE("Found {} System(s).", sysCount - oldCount)

        if not optShow:
            try:
                idNetLog = tdb.lookupAdded(self.ADDED_NAME)
            except KeyError:
                tdenv.WARN("Entry '{}' not found in 'Added' table.",
                           self.ADDED_NAME)
                tdenv.WARN("Trying to add it myself.")
                db = tdb.getDB()
                cur = db.cursor()
                cur.execute(
                    """
                    INSERT INTO Added(name) VALUES(?)
                    """, [self.ADDED_NAME])
                db.commit()
                tdenv.NOTE("Export Table 'Added'")
                _, path = csvexport.exportTableToFile(tdb, tdenv, "Added")
                pass

        addCount = 0
        oldCount = 0
        newCount = 0
        for sysName in logSysList:
            sysPosX, sysPosY, sysPosZ, sysDate = logSysList[sysName]
            utcDate = sysDate.astimezone(
                timezone.utc).strftime('%Y-%m-%d %H:%M:%S')
            tdenv.DEBUG0("log system '{}' ({}, {}, {}, '{}')", sysName,
                         sysPosX, sysPosY, sysPosZ, utcDate)
            if sysName.upper() in self.ignoreSysNames:
                tdenv.NOTE("Ignoring system: '{}'", sysName)
                continue
            systemTD = tdb.systemByName.get(sysName.upper(), None)
            if systemTD:
                # we allready know the system, check coords
                tdenv.DEBUG0("Old system '{}' ({}, {}, {})", systemTD.dbname,
                             systemTD.posX, systemTD.posY, systemTD.posZ)
                oldCount += 1
                if not (systemTD.posX == sysPosX and systemTD.posY == sysPosY
                        and systemTD.posZ == sysPosZ):
                    tdenv.WARN("System '{}' has different coordinates:",
                               sysName)
                    tdenv.WARN("   database: {}, {}, {}", systemTD.posX,
                               systemTD.posY, systemTD.posZ)
                    tdenv.WARN("     netlog: {}, {}, {}", sysPosX, sysPosY,
                               sysPosZ)
            else:
                # new system
                tdenv.NOTE("New system '{}' ({}, {}, {}, '{}')", sysName,
                           sysPosX, sysPosY, sysPosZ, utcDate)
                newCount += 1
                if not optShow:
                    tdb.addLocalSystem(sysName.upper(),
                                       sysPosX,
                                       sysPosY,
                                       sysPosZ,
                                       added=self.ADDED_NAME,
                                       modified=utcDate,
                                       commit=False)
                    addCount += 1

        tdenv.NOTE("Found {:>3} System(s) altogether.", sysCount)
        if oldCount:
            tdenv.NOTE("      {:>3} old", oldCount)
        if newCount:
            tdenv.NOTE("      {:>3} new", newCount)
        if addCount:
            tdenv.NOTE("      {:>3} added", addCount)
            tdb.getDB().commit()
            tdenv.NOTE("Export Table 'System'")
            _, path = csvexport.exportTableToFile(tdb, tdenv, "System")