Пример #1
0
    def __create_start_threads(self):
        self.item_widgets_thread = GetFromQueueToSignalThread(
            self.stop_event, 0.05, self.gui_queue)
        self.item_widgets_thread.item_ready.connect(self.__process_frame_data)
        self.item_widgets_thread.start()

        self.item_widgets_update_thread = GetFromQueueToSignalThread(
            self.stop_event, 0.05, self.cloud_rx_queue)
        self.item_widgets_update_thread.item_ready.connect(self.__update_list)
        self.item_widgets_update_thread.start()

        self.auth_status_thread = CheckAuthThread(self.stop_event,
                                                  self.config.config)
        self.auth_status_thread.state_signal.connect(self.set_auth_status)
        self.auth_status_thread.start()

        self.update_service_thread = Updater(self.stop_event,
                                             self.config.config,
                                             self.application_path)
        self.update_service_thread.to_update.connect(
            self.update_service_thread.show_update_window)
        self.update_service_thread.start()

        self.ui.credentialsLoadButton.clicked.connect(
            self.__credentials_file_load_check_propagate)
Пример #2
0
def run_all(server, dt):
    global gridUpdates
    if gameData["waitingToStart"]:
        if "players" in gameData:
            numberOfPlayersReady = 0
            for player in gameData["players"]:
                if gameData["players"][player]["isReady"]:
                    numberOfPlayersReady += 1
            if numberOfPlayersReady == len(
                    gameData["players"]) and numberOfPlayersReady >= 1:
                gameData["waitingToStart"] = False
                gameData["isGameFinished"] = False
                gameData["scores"]["pacteam"] = 0
                gameData["scores"]["ghostteam"] = 0
                for player in gameData["players"]:
                    gameData["players"][player]["score"] = 0

    else:
        safeData = copy.deepcopy(gameData)
        gridChanges = []
        for player in safeData['players']:
            updater = Updater(player, dt, dt)
            gameData["players"][player], gridChange = updater.run(
                safeData["players"][player], server.getGame())
            if gridChange is not None:
                gridChanges.append(gridChange)
        safeData = copy.deepcopy(gameData)
        gameData["players"] = playerDefeat(safeData["players"])
        gameData["gumCount"] = server.getGumCount()
        if gameData["gumCount"] == 0:
            gameData["waitingToStart"] = True
            gameData["isGameFinished"] = True
            gridUpdates.clear()
            gridChanges.clear()
            server.reloadGrid()
            for player in gameData["players"]:
                if gameData["players"][player]["isPacman"]:
                    gameData["scores"]["pacteam"] += gameData["players"][
                        player]["score"]
                else:
                    gameData["scores"]["ghostteam"] += gameData["players"][
                        player]["score"]
                gameData["players"][player]["isReady"] = False
                gameData["players"][player]["speed"] = [0, 0]
                gameData["players"][player]["inputs"] = [0, 0]
                gameData["players"][player]["idleMove"] = True
                gameData["players"][player]["respawn"] = True
        return gridChanges
    return []
Пример #3
0
 def run_all(self, dt):
     input_delta_time = time.time() - self.last_input_change_time
     if "executionTimes" in self.gameData:
         if self.gameData["executionTimes"][-1] != 0:
             loops = dt / self.gameData["executionTimes"][-1]
         else:
             loops = 1
         print("Client delta time : ", dt, " server last delta time :",
               self.gameData["executionTimes"][-1], " loops to do : ",
               loops)
     else:
         loops = 1
     for i in range(round(loops)):
         if "players" in self.gameData:
             safeData = copy.deepcopy(self.gameData)
             for player in safeData['players']:
                 updater = Updater(player,
                                   self.gameData["executionTimes"][-1],
                                   self.gameData["executionTimes"][-1])
                 self.gameData["players"][player], gridChange = updater.run(
                     safeData["players"][player], self.game)
Пример #4
0
    parser.add_argument(
        "-o",
        "--outfile",
        default=None,
        type=str,
        help="Output file to write newly fetched info JSON (optional).")
    parser.add_argument(
        "-u",
        "--updatefile",
        default=None,
        type=str,
        help=
        "File to write the update diff to (in human-readable format). If not provided, writes to stdout."
    )
    parser.add_argument("-v",
                        "--verbose",
                        action="store_true",
                        help="If set, print out (to stdout) scraping updates.")

    args = parser.parse_args()

    upd = Updater(args.infile, args.outfile)

    if args.updatefile:
        # Write the update to a file
        with open(args.updatefile, "w") as fs:
            upd.update_studies(fs=fs, verbose=args.verbose)
    else:
        # Write the update to stdout
        upd.update_studies(verbose=args.verbose)
Пример #5
0
def main():
    """Run the main code of the program."""
    try:
        # arg parsing
        parser = argparse.ArgumentParser(
            prog=appcmdname,
            description='Prints the current system and station (if docked) to stdout and optionally writes player '
                        'status, ship locations, ship loadout and/or station data to file. '
                        'Requires prior setup through the accompanying GUI app.'
        )

        parser.add_argument('-v', '--version', help='print program version and exit', action='store_const', const=True)
        group_loglevel = parser.add_mutually_exclusive_group()
        group_loglevel.add_argument('--loglevel',
                                    metavar='loglevel',
                                    help='Set the logging loglevel to one of: '
                                         'CRITICAL, ERROR, WARNING, INFO, DEBUG, TRACE',
                                    )
        group_loglevel.add_argument('--trace',
                                    help='Set the Debug logging loglevel to TRACE',
                                    action='store_true',
                                    )
        parser.add_argument('-a', metavar='FILE', help='write ship loadout to FILE in Companion API json format')
        parser.add_argument('-e', metavar='FILE', help='write ship loadout to FILE in E:D Shipyard plain text format')
        parser.add_argument('-l', metavar='FILE', help='write ship locations to FILE in CSV format')
        parser.add_argument('-m', metavar='FILE', help='write station commodity market data to FILE in CSV format')
        parser.add_argument('-o', metavar='FILE', help='write station outfitting data to FILE in CSV format')
        parser.add_argument('-s', metavar='FILE', help='write station shipyard data to FILE in CSV format')
        parser.add_argument('-t', metavar='FILE', help='write player status to FILE in CSV format')
        parser.add_argument('-d', metavar='FILE', help='write raw JSON data to FILE')
        parser.add_argument('-n', action='store_true', help='send data to EDDN')
        parser.add_argument('-p', metavar='CMDR', help='Returns data from the specified player account')
        parser.add_argument('-j', help=argparse.SUPPRESS)  # Import JSON dump
        args = parser.parse_args()

        if args.version:
            updater = Updater(provider='internal')
            newversion: Optional[EDMCVersion] = updater.check_appcast()
            if newversion:
                print(f'{appversion} ({newversion.title!r} is available)')
            else:
                print(appversion)

            return

        if args.trace:
            edmclogger.set_channels_loglevel(logging.TRACE)

        elif args.loglevel:
            if args.loglevel not in ('CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'TRACE'):
                print('loglevel must be one of: CRITICAL, ERROR, WARNING, INFO, DEBUG, TRACE', file=sys.stderr)
                sys.exit(EXIT_ARGS)
            edmclogger.set_channels_loglevel(args.loglevel)

        logger.debug(f'Startup v{appversion} : Running on Python v{sys.version}')
        logger.debug(f'''Platform: {sys.platform}
argv[0]: {sys.argv[0]}
exec_prefix: {sys.exec_prefix}
executable: {sys.executable}
sys.path: {sys.path}'''
                     )

        log_locale('Initial Locale')

        if args.j:
            logger.debug('Import and collate from JSON dump')
            # Import and collate from JSON dump
            data = json.load(open(args.j))
            config.set('querytime', int(getmtime(args.j)))

        else:
            # Get state from latest Journal file
            logger.debug('Getting state from latest journal file')
            try:
                logdir = config.get('journaldir') or config.default_journal_dir
                logger.debug(f'logdir = "{logdir}"')
                logfiles = sorted((x for x in os.listdir(logdir) if JOURNAL_RE.search(x)),
                                  key=lambda x: x.split('.')[1:])

                logfile = join(logdir, logfiles[-1])

                logger.debug(f'Using logfile "{logfile}"')
                with open(logfile, 'r') as loghandle:
                    for line in loghandle:
                        try:
                            monitor.parse_entry(line)
                        except Exception:
                            logger.debug(f'Invalid journal entry {line!r}')

            except Exception:
                logger.exception("Can't read Journal file")
                sys.exit(EXIT_SYS_ERR)

            if not monitor.cmdr:
                logger.error('Not available while E:D is at the main menu')
                sys.exit(EXIT_SYS_ERR)

            # Get data from Companion API
            if args.p:
                logger.debug(f'Attempting to use commander "{args.p}"')
                cmdrs = config.get('cmdrs') or []
                if args.p in cmdrs:
                    idx = cmdrs.index(args.p)

                else:
                    for idx, cmdr in enumerate(cmdrs):
                        if cmdr.lower() == args.p.lower():
                            break

                    else:
                        raise companion.CredentialsError()

                companion.session.login(cmdrs[idx], monitor.is_beta)

            else:
                logger.debug(f'Attempting to use commander "{monitor.cmdr}" from Journal File')
                cmdrs = config.get('cmdrs') or []
                if monitor.cmdr not in cmdrs:
                    raise companion.CredentialsError()

                companion.session.login(monitor.cmdr, monitor.is_beta)

            querytime = int(time())
            data = companion.session.station()
            config.set('querytime', querytime)

        # Validation
        if not deep_get(data, 'commander', 'name', default='').strip():
            logger.error("No data['command']['name'] from CAPI")
            sys.exit(EXIT_SERVER)

        elif not deep_get(data, 'lastSystem', 'name') or \
                data['commander'].get('docked') and not \
                deep_get(data, 'lastStarport', 'name'):  # Only care if docked

            logger.error("No data['lastSystem']['name'] from CAPI")
            sys.exit(EXIT_SERVER)

        elif not deep_get(data, 'ship', 'modules') or not deep_get(data, 'ship', 'name', default=''):
            logger.error("No data['ship']['modules'] from CAPI")
            sys.exit(EXIT_SERVER)

        elif args.j:
            pass  # Skip further validation

        elif data['commander']['name'] != monitor.cmdr:
            logger.error(f'Commander "{data["commander"]["name"]}" from CAPI doesn\'t match "{monitor.cmdr}" from Journal')  # noqa: E501
            sys.exit(EXIT_CREDENTIALS)

        elif data['lastSystem']['name'] != monitor.system or \
                ((data['commander']['docked'] and data['lastStarport']['name'] or None) != monitor.station) or \
                data['ship']['id'] != monitor.state['ShipID'] or \
                data['ship']['name'].lower() != monitor.state['ShipType']:

            logger.error('Mismatch(es) between CAPI and Journal for at least one of: StarSystem, Last Star Port, Ship ID or Ship Name/Type')  # noqa: E501
            sys.exit(EXIT_LAGGING)

        # stuff we can do when not docked
        if args.d:
            logger.debug(f'Writing raw JSON data to "{args.d}"')
            out = json.dumps(data, ensure_ascii=False, indent=2, sort_keys=True, separators=(',', ': '))
            with open(args.d, 'wb') as f:
                f.write(out.encode("utf-8"))

        if args.a:
            logger.debug(f'Writing Ship Loadout in Companion API JSON format to "{args.a}"')
            loadout.export(data, args.a)

        if args.e:
            logger.debug(f'Writing Ship Loadout in ED Shipyard plain text format to "{args.e}"')
            edshipyard.export(data, args.e)

        if args.l:
            logger.debug(f'Writing Ship Locations in CSV format to "{args.l}"')
            stats.export_ships(data, args.l)

        if args.t:
            logger.debug(f'Writing Player Status in CSV format to "{args.t}"')
            stats.export_status(data, args.t)

        if data['commander'].get('docked'):
            print(f'{deep_get(data, "lastSystem", "name", default="Unknown")},'
                  f'{deep_get(data, "lastStarport", "name", default="Unknown")}'
                  )

        else:
            print(deep_get(data, 'lastSystem', 'name', default='Unknown'))

        if (args.m or args.o or args.s or args.n or args.j):
            if not data['commander'].get('docked'):
                logger.error("Can't use -m, -o, -s, -n or -j because you're not currently docked!")
                return

            elif not deep_get(data, 'lastStarport', 'name'):
                logger.error("No data['lastStarport']['name'] from CAPI")
                sys.exit(EXIT_LAGGING)

            # Ignore possibly missing shipyard info
            elif not (data['lastStarport'].get('commodities') or data['lastStarport'].get('modules')):
                logger.error("No commodities or outfitting (modules) in CAPI data")
                return

        else:
            return

        # Finally - the data looks sane and we're docked at a station

        if args.j:
            logger.debug('Importing data from the CAPI return...')
            # Collate from JSON dump
            collate.addcommodities(data)
            collate.addmodules(data)
            collate.addships(data)

        if args.m:
            logger.debug(f'Writing Station Commodity Market Data in CSV format to "{args.m}"')
            if data['lastStarport'].get('commodities'):
                # Fixup anomalies in the commodity data
                fixed = companion.fixup(data)
                commodity.export(fixed, COMMODITY_DEFAULT, args.m)

            else:
                logger.error("Station doesn't have a market")

        if args.o:
            if data['lastStarport'].get('modules'):
                logger.debug(f'Writing Station Outfitting in CSV format to "{args.o}"')
                outfitting.export(data, args.o)

            else:
                logger.error("Station doesn't supply outfitting")

        if (args.s or args.n) and not args.j and not \
                data['lastStarport'].get('ships') and data['lastStarport']['services'].get('shipyard'):

            # Retry for shipyard
            sleep(SERVER_RETRY)
            new_data = companion.session.station()
            # might have undocked while we were waiting for retry in which case station data is unreliable
            if new_data['commander'].get('docked') and \
                    deep_get(new_data, 'lastSystem', 'name') == monitor.system and \
                    deep_get(new_data, 'lastStarport', 'name') == monitor.station:

                data = new_data

        if args.s:
            if deep_get(data, 'lastStarport', 'ships', 'shipyard_list'):
                logger.debug(f'Writing Station Shipyard in CSV format to "{args.s}"')
                shipyard.export(data, args.s)

            elif not args.j and monitor.stationservices and 'Shipyard' in monitor.stationservices:
                logger.error('Failed to get shipyard data')

            else:
                logger.error("Station doesn't have a shipyard")

        if args.n:
            try:
                eddn_sender = eddn.EDDN(None)
                logger.debug('Sending Market, Outfitting and Shipyard data to EDDN...')
                eddn_sender.export_commodities(data, monitor.is_beta)
                eddn_sender.export_outfitting(data, monitor.is_beta)
                eddn_sender.export_shipyard(data, monitor.is_beta)

            except Exception:
                logger.exception('Failed to send data to EDDN')

    except companion.ServerError:
        logger.error('Frontier CAPI Server returned an error')
        sys.exit(EXIT_SERVER)

    except companion.SKUError:
        logger.error('Frontier CAPI Server SKU problem')
        sys.exit(EXIT_SERVER)

    except companion.CredentialsError:
        logger.error('Frontier CAPI Server: Invalid Credentials')
        sys.exit(EXIT_CREDENTIALS)
Пример #6
0
def updateSystem():
    #Update Clarissa System
    print("UPDATING IN 30 SECONDS! CLOSE ALL CLARISSA FILES!")
    time_check.sleep(30)
    update = Updater(dir_path)
    update.update()
Пример #7
0
    def run(self, opts=None):
        if opts:
            options = self.create_options(opts)
            self.set_options(options)
        options = self.options

        # check proxy options
        proxy = options.proxy
        if options.proxy:
            try:
                pattern = 'http[s]?://(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]):[0-9][0-9][0-9][0-9]'
                m = re.search(pattern, proxy)
                if m is None:
                    self.banner()
                    print("\n[Error] - Proxy malformed!\n")
                    sys.exit(2)
            except Exception:
                self.banner()
                print("\n[Error] - Proxy malformed!\n")
                sys.exit(2)

        # check tor connection
        if options.checktor:
            self.banner()
            try:
                print("\nSending request to: https://check.torproject.org\n")
                tor_reply = urllib2.urlopen(
                    "https://check.torproject.org").read()
                your_ip = tor_reply.split('<strong>')[1].split(
                    '</strong>')[0].strip()
                if not tor_reply or 'Congratulations' not in tor_reply:
                    print("It seems that Tor is not properly set.\n")
                    print("Your IP address appears to be: " + your_ip + "\n")
                else:
                    print("Congratulations!. Tor is properly being used :-)\n")
                    print("Your IP address appears to be: " + your_ip + "\n")
            except:
                print(
                    "Cannot reach TOR checker system!. Are you correctly connected?\n"
                )

        # search for 'zombies' on google results
        if options.search:
            try:
                self.banner()
                print(
                    "\nSearching for 'zombies' on google results. Good Luck ;-)\n"
                )
                zombies = self.search_zombies()
                check_url_link_reply = raw_input(
                    "Wanna check if they are valid zombies? (Y/n)\n")
                print '-' * 25
                if check_url_link_reply == "n" or check_url_link_reply == "N":
                    print "\nBye!\n"
                else:
                    test = self.testing(zombies)
            except Exception:
                print("[Error] - Something wrong searching!\n")

        # test web 'zombie' servers -> show statistics
        if options.test:
            try:
                self.banner()
                zombies = self.extract_zombies()
                test = self.testing(zombies)
            except Exception:
                print("\n[Error] - Something wrong testing!\n")

        # attack target -> exploit Open Redirect massively and connect all vulnerable servers to a target
        if options.target:
            try:
                self.banner()
                zombies = self.extract_zombies()
                attack = self.attacking(zombies)
            except Exception:
                print("\n[Error] - Something wrong attacking!\n")

        # check/update for latest stable version
        if options.update:
            self.banner()
            try:
                print(
                    "\nTrying to update automatically to the latest stable version\n"
                )
                Updater()
            except:
                print(
                    "\nSomething was wrong!. You should checkout UFONet manually with:\n"
                )
                print("$ git clone https://github.com/epsylon/ufonet\n")
Пример #8
0
def main(url, path, replace, push, html, update):
    """
    Start vulnerability analysis using command line tool.
    :param url: URL to a github repository
    :param path: path to a locally stored project
    :param replace: flag of whether detected vulnerabilities
    should be automatically replaced with safe alternative
    :param push: flag indicating whether pull-request should
    automatically be created
    :param html: path to html file which will contain report
    """
    # analyze source code of provided project
    print("Start analysis")

    # setup logging
    logger = logging.getLogger()
    logHandler = logging.FileHandler('/home/metrics.json')
    logger.addHandler(logHandler)
    formatter = jsonlogger.JsonFormatter()
    logHandler.setFormatter(formatter)
    logger.setLevel(logging.INFO)

    # store remote repo locally in /tmp
    local_repo_path = '/tmp/' + str(uuid.uuid4())
    gh_handler = None

    if url:
        # download remote repo
        gh_handler = GithubRepoHandler(url)
        gh_handler.download_repository(local_repo_path)
        gh_handler.get_repository_metrics()
    elif path:
        # analyze local repo
        local_repo_path = path

    updater = None

    try:
        updater = Updater(local_repo_path)
    except:
        print("Cannot update due to error")

    vulnerability_analyzer = VulnerabilityAnalyzer(local_repo_path)

    try:
        # check for vulnerable functions and vulnerable dependencies
        vulnerability_analyzer.analyze()
    except:
        print("Python AST cannot be parsed. Terminating analysis")

        if url:
            logger.info(url, extra={'analysis_failed': True})

        sys.exit(1)

    vulnerable_functions = vulnerability_analyzer.detected_vulnerable_functions
    vulnerable_imports = vulnerability_analyzer.detected_vulnerable_imports
    vulnerable_installed_dependencies = vulnerability_analyzer.detected_vulnerable_installed_dependencies

    outdated_dependencies = []

    if updater:
        outdated_dependencies = updater.outdated_dependencies

    pre_test_results = {}
    test_metrics_before = {}

    if len(vulnerable_functions) > 0:
        # run tests
        pre_tester = TestInfo(local_repo_path)

        try:
            pre_tester.runToxTest()
        except:
            print("An error occured while executing tests")

        print("Tests done")
        pre_test_results = pre_tester.getTestLog()
        test_metrics_before = pre_tester.get_test_metrics()

    post_test_metrics = {}
    post_test_results = {}

    if replace and len(vulnerable_functions) > 0:
        # automatically replace detected vulnerabilities if available
        print("Replace detected vulnerabilities")
        vulnerability_analyzer.replace_vulnerabilities_in_ast()

        # run tests
        post_tester = TestInfo(local_repo_path)

        try:
            post_tester.runToxTest()
            post_test_results = post_tester.getTestLog()
            post_test_metrics = post_tester.get_test_metrics()
        except:
            print("An error occured while executing tests")

    report = Report(vulnerable_functions, vulnerable_imports, pre_test_results,
                    post_test_results, outdated_dependencies, [], replace)

    # automatically create pull request
    if push and (len(vulnerable_functions) > 0 or len(vulnerable_imports) > 0):
        print("Create pull-request")

        gh_handler.push_updates("*****@*****.**",
                                "bugrevelio", "Vulnerabilities",
                                report.pull_request_report(),
                                "bugrevelio:master", "master")

    print(report.plain_text_report())

    if html:
        report.html_report(html)

    if url:
        # collect relevant metrics
        repo_metrics = gh_handler.get_repository_metrics()
        vulnerability_metrics = vulnerability_analyzer.get_vulnerability_metrics(
        )
        repo_metrics.update(vulnerability_metrics)
        repo_metrics.update(test_metrics_before)
        repo_metrics.update(post_test_metrics)

        logger.info(url, extra=repo_metrics)

    # delete downloaded repo
    shutil.rmtree(local_repo_path)