Esempio n. 1
0
def loaddata(data, store, path):
    '''Loads data given to it'''
    datakeys = data.keys()
    for key in loadlist:
        if (key in datakeys):
            id = data[key]['description']['identifier']
            if (blacklistcheck(settings.settings['recipeBlacklist'],
                               key) == False):
                outcome = data[key][loadlist[key]]
                dirs.makedir(dirs.tempDir + "\\" +
                             dirs.datasetFolder)  #make temp folder
                if (isinstance(outcome, list)):  #if output is list of dicts
                    items = []  #create list of items
                    for item in outcome:  #add each item to list
                        items.append(item['item'])
                    store.append(items)
                    items = ",".join(items)  #turn items into string
                    document(id, str(items), path)
                elif (isinstance(outcome, dict)):  #if output is one dict
                    store.append(outcome)  #store the item
                    document(id, outcome['item'], path)
                elif (isinstance(outcome, str)):
                    store.append(outcome)
                    document(id, outcome, path)
            if (blacklistcheck(settings.settings['recipeBlacklist'],
                               key) == True):
                print("Skipped blacklisted recipe " + id)
            debug(
                str(len(outcomes)) + "/" + str(
                    dirs.countDir(dirs.dataDir + "\\" + dirs.datasetFolder +
                                  "\\recipes")) + " outcomes loaded.")
Esempio n. 2
0
def journal_entry(cmdr, is_beta, system, station, entry, state):
    '''

    '''
    # capture some stats when we launch
    # not read for that yet
    startup_stats(cmdr)

    if "SystemFaction" in entry:
        ''' "SystemFaction": { “Name”:"Mob of Eranin", "FactionState":"CivilLiberty" } }'''
        SystemFaction = entry.get("SystemFaction")
        # debug(SystemFaction)
        try:
            this.SysFactionState = SystemFaction["FactionState"]
        except:
            this.SysFactionState = None
        debug("SysFaction's state is" + str(this.SysFactionState))

    if "SystemAllegiance" in entry:

        SystemAllegiance = entry.get("SystemAllegiance")
        debug(SystemAllegiance)
        try:
            this.SysFactionAllegiance = SystemAllegiance
        except:
            this.SysFactionAllegiance = None
        debug("SysFaction's allegiance is" + str(this.SysFactionAllegiance))

    if "DistFromStarLS" in entry:
        '''"DistFromStarLS":144.821411'''
        try:
            this.DistFromStarLS = entry.get("DistFromStarLS")
        except:
            this.DistFromStarLS = None
        debug("DistFromStarLS=" + str(this.DistFromStarLS))

    if entry.get("event") == "FSDJump":
        Systems.storeSystem(system, entry.get("StarPos"))
        this.DistFromStarLS = None
        this.body = None

    if ('Body' in entry):
        this.body = entry['Body']
        debug(this.body)

    if entry["event"] == "JoinedSquadron":
        Squadronsend(cmdr, entry["SquadronName"])

    if system:
        x, y, z = Systems.edsmGetSystem(system)
    else:
        x = None
        y = None
        z = None

    return journal_entry_wrapper(
        cmdr, is_beta, system, this.SysFactionState, this.SysFactionAllegiance,
        this.DistFromStarLS, station, entry, state, x, y, z, this.body_name,
        this.nearloc['Latitude'], this.nearloc['Longitude'],
        this.client_version)
Esempio n. 3
0
def scramble():
    '''Scramble recipes'''
    #scrambles data
    print("Scrambling data... ", end="", flush=True)
    for file in os.listdir(dirs.tempDir + dirs.datasetFolder + "\\recipes"):
        deletefile = False
        with open(dirs.tempDir + dirs.datasetFolder + "\\recipes\\" + file,
                  "r") as json_file:
            data = json.load(json_file)
            datakeys = data.keys()
        for key in datakeys:  #checks if file is in blacklist
            if (blacklistcheck(settings.settings['recipeBlacklist'],
                               key) == True):
                json_file.close()
                deletefile = True
        if (deletefile == True):
            os.remove(json_file.name)
            debug("Removed " + json_file.name)
            continue
        with open(dirs.tempDir + dirs.datasetFolder + "\\recipes\\" + file,
                  "w+") as json_file:
            json.dump(scrambledata(data, scrambles, dirs.randomized),
                      json_file,
                      indent=4)
    print("Scrambled " + str(len(scrambles)) + " outcomes.")
Esempio n. 4
0
    def run(self):

        debug("sending gSubmitCodex")
        url = "https://us-central1-canonn-api-236217.cloudfunctions.net/submitCodex?cmdrName={}".format(self.cmdr)
        url = url + "&system={}".format(self.system)
        url = url + "&body={}".format(self.body)
        url = url + "&x={}".format(self.x)
        url = url + "&y={}".format(self.y)
        url = url + "&z={}".format(self.z)
        url = url + "&latitude={}".format(self.lat)
        url = url + "&longitude={}".format(self.lon)
        url = url + "&entryid={}".format(self.entry.get("EntryID"))
        url = url + "&name={}".format(self.entry.get("Name").encode('utf8'))
        url = url + "&name_localised={}".format(self.entry.get("Name_Localised").encode('utf8'))
        url = url + "&category={}".format(self.entry.get("Category").encode('utf8'))
        url = url + "&category_localised={}".format(self.entry.get("Category_Localised").encode('utf8'))
        url = url + "&sub_category={}".format(self.entry.get("SubCategory").encode('utf8'))
        url = url + "&sub_category_localised={}".format(self.entry.get("SubCategory_Localised").encode('utf8'))
        url = url + "&region_name={}".format(self.entry.get("Region").encode('utf8'))
        url = url + "&region_name_localised={}".format(self.entry.get("Region_Localised").encode('utf8'))
        url = url + "&is_beta={}".format(self.is_beta)

        debug(url)

        r = requests.get(url)

        if not r.status_code == requests.codes.ok:
            error("gSubmitCodex {} ".format(url))
            error(r.status_code)
            error(r.json())
Esempio n. 5
0
def scrambledata(data, store, path):
    '''Scrambles data given to it'''
    datakeys = data.keys()
    new_outcome = random.choice(outcomes)
    for key in loadlist:
        if (key in datakeys):
            id = data[key]['description']['identifier']
            data[key][loadlist[key]] = new_outcome
            if (isinstance(new_outcome, list)):  #if output is list of dicts
                items = []  #create list of items
                for item in new_outcome:  #add each item to list
                    try:
                        items.append(item['item'])
                    except:
                        items.append(item[item.index(item)])
                store.append(items)
                items = ",".join(items)  #turn items into string
                document(id, items, path)
            elif (isinstance(new_outcome, dict)):  #if output is one dict
                store.append(new_outcome['item'])
                document(id, new_outcome['item'], path)
            elif (isinstance(new_outcome, str)):  #if output is string
                store.append(new_outcome)
                document(id, new_outcome, path)
            outcomes.remove(new_outcome)
            debug("Scrambled: " + str(len(scrambles)))
    return data
Esempio n. 6
0
    def aphelion(self, tag, major, eccentricity):
        a = float(self.light_seconds(tag, major))
        e = float(eccentricity or 0)
        focus = a * (1 + e)
        debug("focus  {}ls".format(a))

        return focus
Esempio n. 7
0
def setdatasetFolder(name):
    '''Set the dataset folder to use'''
    global datasetFolder
    global randomized
    global vanilla
    datasetFolder = "\\" + name
    debug("updated datasetFolder to " + datasetFolder)
    randomized = tempDir + datasetFolder + "\\randomized.txt"
    vanilla = tempDir + datasetFolder + "\\vanilla.txt"
Esempio n. 8
0
def Squadronsend(CMDR, entry):
    if this.SQNag == 0:
        debug("SQName need to be sended")
        url = "https://docs.google.com/forms/d/e/1FAIpQLScZvs3MB2AK6pPwFoSCpdaarfAeu_P-ineIhtO1mOPgr09q8A/formResponse?usp=pp_url"
        url += "&entry.558317192=" + quote_plus(CMDR)
        url += "&entry.1042067605=" + quote_plus(entry)
        this.SQNag = this.Nag + 1

        legacy.Reporter(url).start()
Esempio n. 9
0
def Easter_Egs(entry):
    if this.AllowEasternEggs == True:
        debug("Easter Check")
        # and entry['PlayerPilot'] == True and entry["Fighter"] == False
        if entry['event'] == "HullDamage" and entry[
                'PlayerPilot'] == True and entry["Fighter"] == False:
            if entry['Health'] < 0.3:
                debug("plaing sound")
                Player(this.plugin_dir, ["sounds\\hullAlarm.wav"]).start()
Esempio n. 10
0
 def merge_poi(self, hud_category, english_name, body):
     debug("Merge POI")
     found = False
     signals = self.poidata
     for i, v in enumerate(signals):
         if signals[i].get("english_name") == english_name and signals[i].get("hud_category") == hud_category:
             if not body in signals[i].get("body").split(','):
                 self.poidata[i]["body"] = "{},{}".format(signals[i].get("body"), body)
             found = True
     if not found:
         self.poidata.append({"hud_category": hud_category, "english_name": english_name, "body": body})
Esempio n. 11
0
def load_file():
    """
    加载文件
    """
    urlmap = {}
    try:
        urlmap = unserialize_object(FILEPATH)
        return urlmap
    except SerializeError:
        debug("search", sys.exc_info())

    return None
Esempio n. 12
0
def load_file():
    """
    加载文件
    """
    urlmap = {}
    try:
        urlmap = unserialize_object(FILEPATH)
        return urlmap
    except SerializeError:
        debug("search", sys.exc_info())

    return None
Esempio n. 13
0
def main():
    """
    main function; import historical data
    """
    debug.stdout("Startup import historical covidstats to influxdb ...")
    config = configparser.ConfigParser()
    config.sections()
    config.read('coviddata.ini')

    parser = argparse.ArgumentParser(description='Options for historical data import',allow_abbrev=False)
    datafolder = config['ages']['data_folder']
    bundeslaender = json.loads(config['ages']['bundeslaender'])
    filename = "CovidFaelle_Timeline.csv"

    # add opts
    parser.add_argument('--from-date', help="Date to start import from (Format: dd.mm.yyyy) - this date is inclusive", default=False, action='store')
    parser.add_argument('--to-date', help="Date used to import to (Format: dd.mm.yyyy) - this date is inclusive", default=False, action='store')

    parser.print_help()

    args = parser.parse_args()

    if not args.from_date:
        parser.error("Option --from-date needs to be set")

    if not args.to_date:
        parser.error("Option --to-date needs to be set")

    try:
        fromdate = datetime.strptime(args.from_date,"%d.%m.%Y")
        todate = datetime.strptime(args.to_date,"%d.%m.%Y")
    except ValueError as err:
        sys.exit(err)

    delta = todate - fromdate

    if os.path.isfile(datafolder+"/"+filename):
        print("Start processing file "+filename)
        print("I will import data for districts "+str(bundeslaender)+" between "+str(fromdate)+" and "+str(todate))

        for i in range(delta.days + 1):
            day = fromdate + timedelta(days=i)
            print("Parsing "+filename+" now with date "+str(day))
            covid_data = parse_faelle_timeline_csv(datafolder,filename,bundeslaender,day)

            if config['debug']['debug'] == 'yes':
                debug.debug(covid_data)
            endpoint_influxdb.insert_influxdb(config,covid_data,'cases')

    else:
        sys.exit(filename+" does not exist in data folder - EXIT!")

    debug.stdout("Shutdown import historical covidstats to influxdb ...")
Esempio n. 14
0
    def getReportTypes(self, id):
        if not codexEmitter.reporttypes.get(id):
            url = "{}/reporttypes?journalID={}&_limit=1000".format(self.getUrl(), id)
            debug(url)
            r = requests.get("{}/reporttypes?journalID={}&_limit=1000".format(self.getUrl(), id))
            if r.status_code == requests.codes.ok:

                for exc in r.json():
                    codexEmitter.reporttypes["{}".format(exc["journalID"])] = {"endpoint": exc["endpoint"],
                                                                               "location": exc["location"],
                                                                               "type": exc["type"]}

            else:
                error("error in getReportTypes")
Esempio n. 15
0
def Alegiance_get(CMDR, SQ_old):

    global SQ
    if CMDR != this.CMDR:
        debug("Community Check started")
        url = "https://docs.google.com/spreadsheets/d/e/2PACX-1vTXE8HCavThmJt1Wshy3GyF2ZJ-264SbNRVucsPUe2rbEgpm-e3tqsX-8K2mwsG4ozBj6qUyOOd4RMe/pub?gid=1832580214&single=true&output=tsv"
        with closing(requests.get(url, stream=True)) as r:
            try:
                reader = csv.reader(r.content.splitlines(),
                                    delimiter='\t')  # .decode('utf-8')
                next(reader)
            except:
                reader = csv.reader(r.content.decode('utf-8').splitlines(),
                                    delimiter='\t')
                next(reader)

            for row in reader:

                cmdr, squadron, SQID = row

                if cmdr == CMDR:
                    try:
                        SQ = SQID
                        debug("your SQID is " + str(SQ))
                    except:
                        error("Set SQID Failed")

        if SQ != None:
            debug("SQ ID IS OK")
            this.CMDR = CMDR
            Discord.SQID_set(SQ)
            this.patrol.SQID_set(SQ)  # Функция для отправки данных о
            # сквадроне в модули, использовать как
            # шаблон
            return SQ
        else:
            if this.Nag == 0:
                debug("SQID need to be instaled")
                url = "https://docs.google.com/forms/d/e/1FAIpQLSeERKxF6DlrQ3bMqFdceycSlBV0kwkzziIhYD0ctDzrytm8ug/viewform?usp=pp_url"
                url += "&entry.42820869=" + quote_plus(CMDR)
                this.Nag = this.Nag + 1
                debug("SQID " + str(url))
                webbrowser.open(url)
    else:
        return SQ_old
Esempio n. 16
0
    def run(self):

        self.getExcluded()

        # is this a code entry and do we want to record it?
        if not codexEmitter.excludecodices.get(self.entry.get("Name").lower()) and not self.entry.get(
                "Category") == '$Codex_Category_StellarBodies;':
            self.getReportTypes(self.entry.get("EntryID"))
            url = self.getUrl()

            modules.emitter.post("https://us-central1-canonn-api-236217.cloudfunctions.net/postCodex",
                                {
                                    "cmdr": self.cmdr,
                                    "beta": self.is_beta,
                                    "system": self.system,
                                    "x": self.x,
                                    "y": self.y,
                                    "z": self.z,
                                    "entry": self.entry,
                                    "body": self.body,
                                    "lat": self.lat,
                                    "lon": self.lon,
                                    "client": self.client}
                                )

            jid = self.entry.get("EntryID")
            reportType = codexEmitter.reporttypes.get(str(jid))

            if reportType:
                debug(reportType)
                if reportType.get("location") == "body":
                    payload = self.getBodyPayload(reportType.get("type"))
                    self.modelreport = reportType.get("endpoint")
                else:
                    payload = self.getSystemPayload(reportType.get("type"))
                    self.modelreport = reportType.get("endpoint")
            else:
                payload = self.getCodexPayload()
                self.modelreport = "reportcodices"

            debug("Send Reports {}/{}".format(url, self.modelreport))

            self.send(payload, url)
Esempio n. 17
0
    def run(self):
        if self.modelreport and self.system:
            payload = self.setPayload()
            payload["userType"] = 'pc'
            payload["reportType"] = 'new'
            payload["reportStatus"] = 'pending'
            payload["type"] = self.gstype
            payload["systemAddress"] = self.entry.get("SystemAddress")
            payload["bodyName"] = self.body
            payload["latitude"] = self.lat
            payload["longitude"] = self.lon
            payload["reportComment"] = json.dumps(self.entry, indent=4)
            payload["frontierID"] = self.index

            url = self.getUrl()
            debug(payload)

            debug(url)
            self.send(payload, url)
Esempio n. 18
0
    def __init__(self, cmdr, is_beta, system, x, y, z, entry, body, lat, lon, client):

        Emitter.__init__(self, cmdr, is_beta, system, x, y, z, entry, entry.get("BodyName"), entry.get("Latitude"),
                         entry.get("Longitude"), client)

        example = {"timestamp": "2019-10-10T10:23:32Z",
                   "event": "ApproachSettlement",
                   "Name": "$Ancient_Tiny_003:#index=1;", "Name_Localised": "Guardian Structure",
                   "SystemAddress": 5079737705833,
                   "BodyID": 25, "BodyName": "Synuefe LY-I b42-2 C 2",
                   "Latitude": 52.681084, "Longitude": 115.240822}

        example = {
            "timestamp": "2019-10-10T10:21:36Z",
            "event": "ApproachSettlement",
            "Name": "$Ancient:#index=2;", "Name_Localised": "Ancient Ruins (2)",
            "SystemAddress": 5079737705833,
            "BodyID": 25, "BodyName": "Synuefe LY-I b42-2 C 2",
            "Latitude": -10.090128, "Longitude": 114.505409}

        if ":" in entry.get("Name"):
            prefix, suffix = entry.get("Name").split(':')
            self.index = self.get_index(entry.get("Name"))

            self.modelreport = None

            if prefix:
                prefix = prefix.lower()[1:]
                debug("prefix {}".format(prefix))
                if prefix in guardianSites.gstypes:
                    # This is a guardian structure
                    # self.gstype = guardianSites.gstypes.get(prefix)
                    self.gstype = prefix
                    debug("gstype {} {}".format(prefix, self.gstype))
                    self.modelreport = 'gsreports'
                if prefix == 'ancient':
                    # this is s guardian ruin
                    # self.gstype = 1
                    self.gstype = 'Unknown'
                    self.modelreport = 'grreports'
Esempio n. 19
0
    def visualise(self):

        debug("visualise")
        # we may want to try again if the data hasn't been fetched yet
        if CodexTypes.waiting:
            debug("Still waiting");
        else:

            self.set_image("Geology", False)
            self.set_image("Cloud", False)
            self.set_image("Anomaly", False)
            self.set_image("Thargoid", False)
            self.set_image("Biology", False)
            self.set_image("Guardian", False)
            self.set_image("Human", False)
            self.set_image("Ring", False)
            self.set_image("None", False)
            self.set_image("Other", False)
            self.set_image("Planets", False)
            self.set_image("Tourist", False)

            if self.poidata:
                self.grid()
                self.visible()
                for r in self.poidata:
                    debug(r)
                    self.set_image(r.get("hud_category"), True)
            else:
                self.grid()
                self.grid_remove()
Esempio n. 20
0
    def split_nearest_destination(self, nearest_destination):

        # abort if no index
        if not "index" in nearest_destination:
            return None, None

        ndarray = []
        signal_type = None

        ndarray = nearest_destination.split('#')
        if len(ndarray) == 2:
            dummy, c = nearest_destination.split('#')
            dummy, index_id = c.split("=")
            index_id = index_id[:-1]
        else:
            dummy, b, c = ndarray
            dummy, signal_type = b.split("=")
            dummy, index_id = c.split("=")
            signal_type = signal_type[:-1]
            index_id = index_id[:-1]
        debug("signal {} index {}".format(signal_type, index_id))
        return signal_type, index_id
Esempio n. 21
0
def spider():
    """
    爬虫程序入口
    """
    global opt, conf
    print("spider normal loading. ")
    spider_usage(opt)
    spider_version(opt)
    if opt.dbg:
        debug("spider entry debug mode.")
        debug("spider normal loading.")
        debug("options", opt.url, opt.deep, opt.pages, opt.thread_number)
    try:
        spider_conf(opt, conf)
        spider_stop(conf)
        if opt.dbg:
            debug("spider normal quit.")
        spider_result()
    except Exception:
        debug("Spider Exception", sys.exc_info())

    logger.info("spider normal stop.")
Esempio n. 22
0
    def light_seconds(self, tag, value):
        debug("light seconds {} {}".format(tag, value))
        if tag in ("distanceToArrival", "DistanceFromArrivalLS"):
            return value

        # Things measured in meters
        if tag in ("Radius", "SemiMajorAxis"):
            # from journal metres
            return value * 299792000

        # Things measure in kilometres
        if tag == "radius":
            # from journal metres
            return value * 1000 * 299792000

        # Things measure in astronomical units
        if tag == "semiMajorAxis":
            return value * 499.005

        # Things measured in solar radii
        if tag == "solarRadius":
            return value * 2.32061
Esempio n. 23
0
 def _crawler_update_url(self):
     """
     更新当前下载url列表
     """
     logger.info("crawler_update_url normal loading.")
     if self.opt.dbg:
         debug("crawler_update_url normal loading.")
         debug("url_pages", self.conf.url_pages)
     url_new = []
     for s in self.conf.url_pages:
         url_new += get_urls(s)
     self.conf.url_current = list(set(url_new) - set(self.conf.url_success) - set(self.conf.url_failed))
     if self.opt.dbg:
         debug("url_current", self.conf.url_current)
     logger.info("crawler_update_url normal quit.")
Esempio n. 24
0
 def _crawler_update_url(self):
     """
     更新当前下载url列表
     """
     logger.info("crawler_update_url normal loading.")
     if self.opt.dbg:
         debug("crawler_update_url normal loading.")
         debug("url_pages", self.conf.url_pages)
     url_new = []
     for s in self.conf.url_pages:
         url_new += get_urls(s)
     self.conf.url_current = list(set(url_new) -
                                  set(self.conf.url_success) -
                                  set(self.conf.url_failed))
     if self.opt.dbg:
         debug("url_current", self.conf.url_current)
     logger.info("crawler_update_url normal quit.")
Esempio n. 25
0
def spider_stop(conf):
    """
    程序终止
    """
    if opt.dbg:
        debug("spider_stop normal loading.")
        #debug("url_map", conf.url_map)
    try:
        if conf.url_map:
            serialize_object(conf.url_file, conf.url_map)
    except SerializeError:
        logger.debug("SerializeError")
        debug("SerializeError", sys.exc_info())
    conf.event.set()
    for x in conf.thread_pool:
        if x.isAlive():
            x.join(30)
    if opt.dbg:
        debug("spider_stop normal quit.")
    logger.info("spider_stop normal finish.")
Esempio n. 26
0
def main():
    """
    main function
    """
    debug.stdout("covidstats application startup ...")
    config = configparser.ConfigParser()
    config.sections()
    config.read('coviddata.ini')

    # log
    logname = config['log']['name']
    logfolder = config['log']['log_folder']

    #AGES Data
    zipurl = config['ages']['ages_zip_url']
    bezirke = json.loads(config['ages']['bezirke'])
    bundeslaender = json.loads(config['ages']['bundeslaender'])
    datafolder = config['ages']['data_folder']
    zipf = config['ages']['zipf']
    csvf = json.loads(config['ages']['csvf'])

    #Opendata Data
    og_base_url = config['opendata']['od_base_url']
    og_csv_files = json.loads(config['opendata']['og_csv_files'])
    og_data_folder = config['opendata']['og_data_folder']

    #download and get csv data - AGES
    ages_processflag = download_and_read(datafolder, zipurl, zipf, csvf)

    #download and get csv data - OpenData
    og_processflag = og_download(og_base_url, og_csv_files, og_data_folder)

    #process ages csv data
    for name, status in ages_processflag.items():
        if status:
            print("We need to process " + name + " as this is a new file." +
                  str(ages_processflag))
            print("Start parsing file: " + name + " now")
            notification(config, "covidstats: Parsing file: " + name)
            if name == 'CovidFaelle_GKZ.csv':
                covid_data = parse_faelle_csv(datafolder, name, bezirke)
            if name == 'CovidFaelle_Timeline.csv':
                covid_data = parse_faelle_timeline_csv(datafolder, name,
                                                       bundeslaender)

            if config['debug']['debug'] == 'yes':
                debug.debug(covid_data)
            if config['mqtt']['usemqtt'] == 'yes':
                endpoint_mqtt.insert_mqtt(config, covid_data, 'cases')
            if config['influxdb']['useinfluxdb'] == 'yes':
                endpoint_influxdb.insert_influxdb(config, covid_data, 'cases')
        else:
            print(
                "No need to parse " + name +
                ". Hashes match, I have already seen this file. Status of flag: "
                + str(ages_processflag))

    #process opendata csv data
    for name, status in og_processflag.items():
        if status:
            print("We need to process " + name + " as this is a new file." +
                  str(og_processflag))
            print("Start parsing file: " + name + " now")
            if name == 'timeline-eimpfpass.csv':
                covid_data = parse_vac_timeline_eimpfpass_csv(
                    og_data_folder, name, bundeslaender)
            if config['debug']['debug'] == 'yes':
                debug.debug(covid_data)
            if config['mqtt']['usemqtt'] == 'yes':
                endpoint_mqtt.insert_mqtt(config, covid_data, 'vac')
            if config['influxdb']['useinfluxdb'] == 'yes':
                endpoint_influxdb.insert_influxdb(config, covid_data, 'vac')
            notification(config, "covidstats: Parsing file: " + name)
        else:
            print(
                "No need to parse " + name +
                ". Hashes match, I have already seen this file. Status of flag: "
                + str(og_processflag))

    #cleanup
    cleanup(datafolder)
    cleanup(og_data_folder)

    debug.stdout("covidstats application shutdown ...")
Esempio n. 27
0
def spider_conf(opt, conf):
    """
    程序初始配置
    """
    s1 = datetime.datetime.now()
    d1 = s1.strftime("%Y-%m-%d %H:%M:%S")
    if opt.dbg:
        debug("spider_conf normal loading.")
        debug("spider start time", d1)
    logger.info("spider_conf normal loading")
    try:
        crl = CrawlerConf(opt, conf)
        crl.crawler()
    except CrawlerConfError:
        logger.debug("CrawlerConfError")
        debug("CrawlerConfError", sys.exc_info())
    except Exception:
        logger.debug("spider_conf Exception")
        debug("spider_conf", sys.exc_info())
    s2 = datetime.datetime.now()
    d2 = s2.strftime("%Y-%m-%d %H:%M:%S")
    if opt.dbg:
        debug("spider_conf normal quit.")
        debug("spider end time", d2)
    d = s2 - s1
    conf.time_cost = d.seconds
    logger.info("spider_conf normal finish.")
Esempio n. 28
0
def test(url):
    l = []
    fu = urllib.urlopen(url)
    s = fu.read()
    l = get_urls(s)
    debug(l)
Esempio n. 29
0
def dashboard_entry(cmdr, is_beta, entry):
    debug(entry)
    if this.plug_start == 0:
        this.plug_start = 1
        this.fuel = entry["Fuel"]
        this.old_time = datetime.strptime(entry["timestamp"],
                                          "%Y-%m-%dT%H:%M:%SZ")
    try:
        debug("Checking fuel consumption " + str(this.FuelCount))
        if this.FuelCount == 10:
            this.fuel_cons = fuel_consumption(entry, this.fuel, this.old_time,
                                              this.fuel_cons)
            this.old_time = datetime.strptime(entry["timestamp"],
                                              "%Y-%m-%dT%H:%M:%SZ")
            this.fuel = entry["Fuel"]
            this.FuelCount = 0
        else:
            this.FuelCount += 1
    except NameError:
        # debug("Can't check fuel
        # consumption, waiting for
        # data")
        this.fuel_cons = 0

    # debug("Dashboard update
    # "+str(entry["Fuel"]))

    this.landed = entry['Flags'] & 1 << 1 and True or False
    this.SCmode = entry['Flags'] & 1 << 4 and True or False
    this.SRVmode = entry['Flags'] & 1 << 26 and True or False
    this.Fightermode = entry['Flags'] & 1 << 25 and True or False
    this.landed = this.landed or this.SRVmode
    # print 'LatLon =
    # {}'.format(entry['Flags'] &
    # 1<<21 and True or False)
    # print entry
    if (entry['Flags'] & 1 << 21 and True or False):
        if ('Latitude' in entry):
            this.nearloc['Latitude'] = entry['Latitude']
            this.nearloc['Longitude'] = entry['Longitude']
    else:

        this.nearloc['Latitude'] = None
        this.nearloc['Longitude'] = None
    if entry.get("BodyName"):
        this.body_name = entry.get("BodyName")
    else:
        this.body_name = None
    debug(this.body_name)
    this.cmdr_SQID = Alegiance_get(cmdr, this.cmdr_SQID)

    # debug(this.cmdr_SQID)

    def cmdr_data(data, is_beta):
        '''
        We have new data on our commander
        '''
        #debug("def cmdr_data")
        # CAPIDebug=json.dumps(data,indent=4)
        # debug(CAPIDebug)
        this.patrol.cmdr_data(data, is_beta)
Esempio n. 30
0
 def dump(cls):
     for x in cls.systemCache.keys():
         debug('"{}":[{},{},{}],'.format(x,
                                         cls.systemCache.get(x)[0],
                                         cls.systemCache.get(x)[1],
                                         cls.systemCache.get(x)[2]))
Esempio n. 31
0
def plugin_stop():
    '''
    EDMC is closing
    '''
    debug('Stopping the plugin')
    this.patrol.plugin_stop()
Esempio n. 32
0
def test(url):
    l = []
    fu = urllib.urlopen(url)
    s = fu.read()
    l = get_urls(s)
    debug(l)
Esempio n. 33
0
def recent_journal():
    list_of_files = glob.glob(
        os.path.join(config.default_journal_dir, 'journal*.log'))
    latest_file = max(list_of_files, key=os.path.getctime)
    debug("latest journal: {}".format(latest_file))
    return latest_file
Esempio n. 34
0
 def crawler(self):
     """
     广度搜索入口
     """
     logger.info("crawler normal loading.")
     if self.opt.dbg:
         debug("crawler normal loading.")
     self.conf.url_current.append(self.opt.url)
     if self.opt.dbg:
         debug("root url", self.conf.url_current)
     try:
         depth = 1
         while depth <= self.opt.deep and len(self.conf.url_current) and self.conf.url_count < self.opt.pages:
             if self.opt.dbg:
                 debug("current depth", depth)
             logger.info("current depth : " + str(depth))
             depth = depth + 1
             self._crawler_download_url()
             self._crawler_update_url()
         if self.opt.dbg:
             debug("crawler normal quit.")
     except Exception:
         if self.opt.dbg:
             debug("crawler abnormal quit.")
             debug("crawler", sys.exc_info())
         raise CrawlerConfError