def __create_start_threads(self):
        self.item_widgets_thread = GetFromQueueToSignalThread(
            self.stop_event, 0.05, self.gui_queue)
        self.item_widgets_thread.item_ready.connect(self.__process_frame_data)
        self.item_widgets_thread.start()

        self.item_widgets_update_thread = GetFromQueueToSignalThread(
            self.stop_event, 0.05, self.cloud_rx_queue)
        self.item_widgets_update_thread.item_ready.connect(self.__update_list)
        self.item_widgets_update_thread.start()

        self.auth_status_thread = CheckAuthThread(self.stop_event,
                                                  self.config.config)
        self.auth_status_thread.state_signal.connect(self.set_auth_status)
        self.auth_status_thread.start()

        self.update_service_thread = Updater(self.stop_event,
                                             self.config.config,
                                             self.application_path)
        self.update_service_thread.to_update.connect(
            self.update_service_thread.show_update_window)
        self.update_service_thread.start()

        self.ui.credentialsLoadButton.clicked.connect(
            self.__credentials_file_load_check_propagate)
Example #2
0
def run_all(server, dt):
    global gridUpdates
    if gameData["waitingToStart"]:
        if "players" in gameData:
            numberOfPlayersReady = 0
            for player in gameData["players"]:
                if gameData["players"][player]["isReady"]:
                    numberOfPlayersReady += 1
            if numberOfPlayersReady == len(
                    gameData["players"]) and numberOfPlayersReady >= 1:
                gameData["waitingToStart"] = False
                gameData["isGameFinished"] = False
                gameData["scores"]["pacteam"] = 0
                gameData["scores"]["ghostteam"] = 0
                for player in gameData["players"]:
                    gameData["players"][player]["score"] = 0

    else:
        safeData = copy.deepcopy(gameData)
        gridChanges = []
        for player in safeData['players']:
            updater = Updater(player, dt, dt)
            gameData["players"][player], gridChange = updater.run(
                safeData["players"][player], server.getGame())
            if gridChange is not None:
                gridChanges.append(gridChange)
        safeData = copy.deepcopy(gameData)
        gameData["players"] = playerDefeat(safeData["players"])
        gameData["gumCount"] = server.getGumCount()
        if gameData["gumCount"] == 0:
            gameData["waitingToStart"] = True
            gameData["isGameFinished"] = True
            gridUpdates.clear()
            gridChanges.clear()
            server.reloadGrid()
            for player in gameData["players"]:
                if gameData["players"][player]["isPacman"]:
                    gameData["scores"]["pacteam"] += gameData["players"][
                        player]["score"]
                else:
                    gameData["scores"]["ghostteam"] += gameData["players"][
                        player]["score"]
                gameData["players"][player]["isReady"] = False
                gameData["players"][player]["speed"] = [0, 0]
                gameData["players"][player]["inputs"] = [0, 0]
                gameData["players"][player]["idleMove"] = True
                gameData["players"][player]["respawn"] = True
        return gridChanges
    return []
def main():
	u = Updater(0.5)
	with open(sys.argv[1],'r') as f:
		last = update_pool(get_a_line(f.readline()))
		lines = f.readlines()
		u.start()
		for l in lines:
			r = get_a_line(l)
			if equal(r,last):
				last['data']=last['data']+r['data']
			else:
				result.append(last)
				last = r
				last = update_pool(last)
				u.output = '%s.%s%s%s'%(len(result),last['city'],last['area'],last['road'])
		pool_dup = sorted(pool,reverse=True,key=lambda x:len(x))
		for r in result:
			r['feat']=find_feature(pool_dup,r['city']+r['area']+r['road'])
			u.output = 'changing:%s%s%s,%s'%(r['city'],r['area'],r['road'],r['feat'])
		u.stop()
		print 'writing files'
		po = codecs.open('word_pool.js','w','utf-8')
		po.write(u'var pool=')
		po.write(json.dumps(pool_dup,encoding='utf8',ensure_ascii=False))
		po.write(u';')
		po.close()
		ro = codecs.open('data.js','w','utf-8')
		ro.write(u'var database=')
		ro.write(json.dumps(result,encoding='utf8',ensure_ascii=False))
		ro.write(u';')
		ro.close()
Example #4
0
 def run_all(self, dt):
     input_delta_time = time.time() - self.last_input_change_time
     if "executionTimes" in self.gameData:
         if self.gameData["executionTimes"][-1] != 0:
             loops = dt / self.gameData["executionTimes"][-1]
         else:
             loops = 1
         print("Client delta time : ", dt, " server last delta time :",
               self.gameData["executionTimes"][-1], " loops to do : ",
               loops)
     else:
         loops = 1
     for i in range(round(loops)):
         if "players" in self.gameData:
             safeData = copy.deepcopy(self.gameData)
             for player in safeData['players']:
                 updater = Updater(player,
                                   self.gameData["executionTimes"][-1],
                                   self.gameData["executionTimes"][-1])
                 self.gameData["players"][player], gridChange = updater.run(
                     safeData["players"][player], self.game)
def main():
	u = Updater(0.5)
	extracted = {}
	with codecs.open(sys.argv[1],'r','utf8') as f:
		lines = f.readlines()
		u.start()
		for l in lines:
			for n in l:
				if n!=u'\n' and n!=u'\u3000' and n!=u'\r':
					extracted[n]=1
					u.output=n
		u.stop()
	op = codecs.open('extracted.txt','w','utf-8')
	k = extracted.keys()
	k.sort()
	for c in k:
		op.write(c)
	op.close()
Example #6
0
    parser.add_argument(
        "-o",
        "--outfile",
        default=None,
        type=str,
        help="Output file to write newly fetched info JSON (optional).")
    parser.add_argument(
        "-u",
        "--updatefile",
        default=None,
        type=str,
        help=
        "File to write the update diff to (in human-readable format). If not provided, writes to stdout."
    )
    parser.add_argument("-v",
                        "--verbose",
                        action="store_true",
                        help="If set, print out (to stdout) scraping updates.")

    args = parser.parse_args()

    upd = Updater(args.infile, args.outfile)

    if args.updatefile:
        # Write the update to a file
        with open(args.updatefile, "w") as fs:
            upd.update_studies(fs=fs, verbose=args.verbose)
    else:
        # Write the update to stdout
        upd.update_studies(verbose=args.verbose)
Example #7
0
def main():
    """Run the main code of the program."""
    try:
        # arg parsing
        parser = argparse.ArgumentParser(
            prog=appcmdname,
            description='Prints the current system and station (if docked) to stdout and optionally writes player '
                        'status, ship locations, ship loadout and/or station data to file. '
                        'Requires prior setup through the accompanying GUI app.'
        )

        parser.add_argument('-v', '--version', help='print program version and exit', action='store_const', const=True)
        group_loglevel = parser.add_mutually_exclusive_group()
        group_loglevel.add_argument('--loglevel',
                                    metavar='loglevel',
                                    help='Set the logging loglevel to one of: '
                                         'CRITICAL, ERROR, WARNING, INFO, DEBUG, TRACE',
                                    )
        group_loglevel.add_argument('--trace',
                                    help='Set the Debug logging loglevel to TRACE',
                                    action='store_true',
                                    )
        parser.add_argument('-a', metavar='FILE', help='write ship loadout to FILE in Companion API json format')
        parser.add_argument('-e', metavar='FILE', help='write ship loadout to FILE in E:D Shipyard plain text format')
        parser.add_argument('-l', metavar='FILE', help='write ship locations to FILE in CSV format')
        parser.add_argument('-m', metavar='FILE', help='write station commodity market data to FILE in CSV format')
        parser.add_argument('-o', metavar='FILE', help='write station outfitting data to FILE in CSV format')
        parser.add_argument('-s', metavar='FILE', help='write station shipyard data to FILE in CSV format')
        parser.add_argument('-t', metavar='FILE', help='write player status to FILE in CSV format')
        parser.add_argument('-d', metavar='FILE', help='write raw JSON data to FILE')
        parser.add_argument('-n', action='store_true', help='send data to EDDN')
        parser.add_argument('-p', metavar='CMDR', help='Returns data from the specified player account')
        parser.add_argument('-j', help=argparse.SUPPRESS)  # Import JSON dump
        args = parser.parse_args()

        if args.version:
            updater = Updater(provider='internal')
            newversion: Optional[EDMCVersion] = updater.check_appcast()
            if newversion:
                print(f'{appversion} ({newversion.title!r} is available)')
            else:
                print(appversion)

            return

        if args.trace:
            edmclogger.set_channels_loglevel(logging.TRACE)

        elif args.loglevel:
            if args.loglevel not in ('CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG', 'TRACE'):
                print('loglevel must be one of: CRITICAL, ERROR, WARNING, INFO, DEBUG, TRACE', file=sys.stderr)
                sys.exit(EXIT_ARGS)
            edmclogger.set_channels_loglevel(args.loglevel)

        logger.debug(f'Startup v{appversion} : Running on Python v{sys.version}')
        logger.debug(f'''Platform: {sys.platform}
argv[0]: {sys.argv[0]}
exec_prefix: {sys.exec_prefix}
executable: {sys.executable}
sys.path: {sys.path}'''
                     )

        log_locale('Initial Locale')

        if args.j:
            logger.debug('Import and collate from JSON dump')
            # Import and collate from JSON dump
            data = json.load(open(args.j))
            config.set('querytime', int(getmtime(args.j)))

        else:
            # Get state from latest Journal file
            logger.debug('Getting state from latest journal file')
            try:
                logdir = config.get('journaldir') or config.default_journal_dir
                logger.debug(f'logdir = "{logdir}"')
                logfiles = sorted((x for x in os.listdir(logdir) if JOURNAL_RE.search(x)),
                                  key=lambda x: x.split('.')[1:])

                logfile = join(logdir, logfiles[-1])

                logger.debug(f'Using logfile "{logfile}"')
                with open(logfile, 'r') as loghandle:
                    for line in loghandle:
                        try:
                            monitor.parse_entry(line)
                        except Exception:
                            logger.debug(f'Invalid journal entry {line!r}')

            except Exception:
                logger.exception("Can't read Journal file")
                sys.exit(EXIT_SYS_ERR)

            if not monitor.cmdr:
                logger.error('Not available while E:D is at the main menu')
                sys.exit(EXIT_SYS_ERR)

            # Get data from Companion API
            if args.p:
                logger.debug(f'Attempting to use commander "{args.p}"')
                cmdrs = config.get('cmdrs') or []
                if args.p in cmdrs:
                    idx = cmdrs.index(args.p)

                else:
                    for idx, cmdr in enumerate(cmdrs):
                        if cmdr.lower() == args.p.lower():
                            break

                    else:
                        raise companion.CredentialsError()

                companion.session.login(cmdrs[idx], monitor.is_beta)

            else:
                logger.debug(f'Attempting to use commander "{monitor.cmdr}" from Journal File')
                cmdrs = config.get('cmdrs') or []
                if monitor.cmdr not in cmdrs:
                    raise companion.CredentialsError()

                companion.session.login(monitor.cmdr, monitor.is_beta)

            querytime = int(time())
            data = companion.session.station()
            config.set('querytime', querytime)

        # Validation
        if not deep_get(data, 'commander', 'name', default='').strip():
            logger.error("No data['command']['name'] from CAPI")
            sys.exit(EXIT_SERVER)

        elif not deep_get(data, 'lastSystem', 'name') or \
                data['commander'].get('docked') and not \
                deep_get(data, 'lastStarport', 'name'):  # Only care if docked

            logger.error("No data['lastSystem']['name'] from CAPI")
            sys.exit(EXIT_SERVER)

        elif not deep_get(data, 'ship', 'modules') or not deep_get(data, 'ship', 'name', default=''):
            logger.error("No data['ship']['modules'] from CAPI")
            sys.exit(EXIT_SERVER)

        elif args.j:
            pass  # Skip further validation

        elif data['commander']['name'] != monitor.cmdr:
            logger.error(f'Commander "{data["commander"]["name"]}" from CAPI doesn\'t match "{monitor.cmdr}" from Journal')  # noqa: E501
            sys.exit(EXIT_CREDENTIALS)

        elif data['lastSystem']['name'] != monitor.system or \
                ((data['commander']['docked'] and data['lastStarport']['name'] or None) != monitor.station) or \
                data['ship']['id'] != monitor.state['ShipID'] or \
                data['ship']['name'].lower() != monitor.state['ShipType']:

            logger.error('Mismatch(es) between CAPI and Journal for at least one of: StarSystem, Last Star Port, Ship ID or Ship Name/Type')  # noqa: E501
            sys.exit(EXIT_LAGGING)

        # stuff we can do when not docked
        if args.d:
            logger.debug(f'Writing raw JSON data to "{args.d}"')
            out = json.dumps(data, ensure_ascii=False, indent=2, sort_keys=True, separators=(',', ': '))
            with open(args.d, 'wb') as f:
                f.write(out.encode("utf-8"))

        if args.a:
            logger.debug(f'Writing Ship Loadout in Companion API JSON format to "{args.a}"')
            loadout.export(data, args.a)

        if args.e:
            logger.debug(f'Writing Ship Loadout in ED Shipyard plain text format to "{args.e}"')
            edshipyard.export(data, args.e)

        if args.l:
            logger.debug(f'Writing Ship Locations in CSV format to "{args.l}"')
            stats.export_ships(data, args.l)

        if args.t:
            logger.debug(f'Writing Player Status in CSV format to "{args.t}"')
            stats.export_status(data, args.t)

        if data['commander'].get('docked'):
            print(f'{deep_get(data, "lastSystem", "name", default="Unknown")},'
                  f'{deep_get(data, "lastStarport", "name", default="Unknown")}'
                  )

        else:
            print(deep_get(data, 'lastSystem', 'name', default='Unknown'))

        if (args.m or args.o or args.s or args.n or args.j):
            if not data['commander'].get('docked'):
                logger.error("Can't use -m, -o, -s, -n or -j because you're not currently docked!")
                return

            elif not deep_get(data, 'lastStarport', 'name'):
                logger.error("No data['lastStarport']['name'] from CAPI")
                sys.exit(EXIT_LAGGING)

            # Ignore possibly missing shipyard info
            elif not (data['lastStarport'].get('commodities') or data['lastStarport'].get('modules')):
                logger.error("No commodities or outfitting (modules) in CAPI data")
                return

        else:
            return

        # Finally - the data looks sane and we're docked at a station

        if args.j:
            logger.debug('Importing data from the CAPI return...')
            # Collate from JSON dump
            collate.addcommodities(data)
            collate.addmodules(data)
            collate.addships(data)

        if args.m:
            logger.debug(f'Writing Station Commodity Market Data in CSV format to "{args.m}"')
            if data['lastStarport'].get('commodities'):
                # Fixup anomalies in the commodity data
                fixed = companion.fixup(data)
                commodity.export(fixed, COMMODITY_DEFAULT, args.m)

            else:
                logger.error("Station doesn't have a market")

        if args.o:
            if data['lastStarport'].get('modules'):
                logger.debug(f'Writing Station Outfitting in CSV format to "{args.o}"')
                outfitting.export(data, args.o)

            else:
                logger.error("Station doesn't supply outfitting")

        if (args.s or args.n) and not args.j and not \
                data['lastStarport'].get('ships') and data['lastStarport']['services'].get('shipyard'):

            # Retry for shipyard
            sleep(SERVER_RETRY)
            new_data = companion.session.station()
            # might have undocked while we were waiting for retry in which case station data is unreliable
            if new_data['commander'].get('docked') and \
                    deep_get(new_data, 'lastSystem', 'name') == monitor.system and \
                    deep_get(new_data, 'lastStarport', 'name') == monitor.station:

                data = new_data

        if args.s:
            if deep_get(data, 'lastStarport', 'ships', 'shipyard_list'):
                logger.debug(f'Writing Station Shipyard in CSV format to "{args.s}"')
                shipyard.export(data, args.s)

            elif not args.j and monitor.stationservices and 'Shipyard' in monitor.stationservices:
                logger.error('Failed to get shipyard data')

            else:
                logger.error("Station doesn't have a shipyard")

        if args.n:
            try:
                eddn_sender = eddn.EDDN(None)
                logger.debug('Sending Market, Outfitting and Shipyard data to EDDN...')
                eddn_sender.export_commodities(data, monitor.is_beta)
                eddn_sender.export_outfitting(data, monitor.is_beta)
                eddn_sender.export_shipyard(data, monitor.is_beta)

            except Exception:
                logger.exception('Failed to send data to EDDN')

    except companion.ServerError:
        logger.error('Frontier CAPI Server returned an error')
        sys.exit(EXIT_SERVER)

    except companion.SKUError:
        logger.error('Frontier CAPI Server SKU problem')
        sys.exit(EXIT_SERVER)

    except companion.CredentialsError:
        logger.error('Frontier CAPI Server: Invalid Credentials')
        sys.exit(EXIT_CREDENTIALS)
Example #8
0
def updateSystem():
    #Update Clarissa System
    print("UPDATING IN 30 SECONDS! CLOSE ALL CLARISSA FILES!")
    time_check.sleep(30)
    update = Updater(dir_path)
    update.update()
class StartQT4(QtGui.QMainWindow):
    def __init__(self, stop_event, config, gui_queue, cloud_tx_queue,
                 cloud_rx_queue, error_queue, path_queue, send_active,
                 upload_cloud_thread, application_path):
        QtGui.QWidget.__init__(self, None)

        self.ui = Ui_mainWindow()
        self.ui.setupUi(self)

        QtGui.QWidget.setWindowTitle(
            self, "PW-Sat2 Ground Station (ver. {0}) - Main Window".format(
                config.config['APP_VERSION']))

        self.__add_available_signal_sources()
        self.exit_message_handler = ExitMessageHandler(gui_queue,
                                                       cloud_tx_queue,
                                                       cloud_rx_queue,
                                                       error_queue, path_queue)

        self.config = config
        self.stop_event = stop_event
        self.application_path = application_path

        self.gui_queue = gui_queue
        self.error_queue = error_queue
        self.cloud_tx_queue = cloud_tx_queue
        self.cloud_rx_queue = cloud_rx_queue
        self.send_active = send_active
        self.upload_cloud_thread = upload_cloud_thread

        self.first_frame = True

        self.__init_ribbon()

        self.__handle_frames_file_picker()
        self.__create_start_threads()
        self.__set_send_active()
        self.__connect_demodulator_button()
        self.__connect_run_source_button()

        self.ui.credentialsButton.clicked.connect(
            self.auth_status_thread.check)

        self.validate_credentials = ValidateCredentials(
            os.path.join(os.path.dirname(__file__), '..',
                         self.config.config['CREDENTIALS_FILE']))
        self.logger = logging.getLogger(__name__ + "." +
                                        self.__class__.__name__)

        if self.validate_credentials.file_blank():
            self.__add_credentials_widget()
        else:
            self.__add_welcome_widget()

    def __add_credentials_widget(self):
        self.ui.framesListWidget.setSelectionMode(
            QtGui.QAbstractItemView.NoSelection)
        item_widget = CredentialsChooseWidget()
        text = "<html><head/><body><p><span style=\" " \
                "font-size:14pt; font-weight:600;\">Sign in and download credentials</span><br/>" \
                "Go to our page {0} and download file with your credentials to enable " \
                "upload to cloud.</p></body></html>".format(self.config.config['CLOUD_URL'])

        item_widget.label_6.setText(text)

        item_widget.choose_credentials_file_button.clicked.connect(
            self.__handle_credentials_file_widget_button)

        item = QtGui.QListWidgetItem(self.ui.framesListWidget)
        item.setSizeHint(QtCore.QSize(0, 500))
        self.ui.framesListWidget.addItem(item)
        self.ui.framesListWidget.setItemWidget(item, item_widget)

    def __handle_credentials_file_widget_button(self):
        if self.__credentials_file_load_check_propagate(
        ) == CorrectCredentials:
            self.__remove_last_widget()
            self.__add_welcome_widget()
        else:
            pass

    def __add_welcome_widget(self):
        self.ui.framesListWidget.setSelectionMode(
            QtGui.QAbstractItemView.NoSelection)
        item_widget = UiFrameListEmptyWidget()

        item_widget.help_step_1.clicked.connect(
            lambda: webbrowser.open(self.config.config['HELP_WELCOME_STEP_1']))
        item_widget.help_step_2.clicked.connect(
            lambda: webbrowser.open(self.config.config['HELP_WELCOME_STEP_2']))
        item_widget.help_step_3.clicked.connect(
            lambda: webbrowser.open(self.config.config['HELP_WELCOME_STEP_3']))

        item = QtGui.QListWidgetItem(self.ui.framesListWidget)
        item.setSizeHint(QtCore.QSize(0, 400))
        self.ui.framesListWidget.addItem(item)
        self.ui.framesListWidget.setItemWidget(item, item_widget)

    def __remove_last_widget(self):
        self.ui.framesListWidget.setSelectionMode(
            QtGui.QAbstractItemView.SingleSelection)
        self.ui.framesListWidget.takeItem(0)

    def __handle_frames_file_picker(self):
        self.from_file_thread = FromFileToGuiQueueThreadFactory(
            self.stop_event, self.config.config, self.gui_queue)
        self.ui.loadFramesFromFileButton.clicked.connect(
            self.from_file_thread.load_from_file)

    def __connect_frame_uploads_buttons(self):
        self.ui.autoUploadToolButton.clicked.connect(self.__autosend_handle)
        self.ui.sendUnsuccessfulButton.clicked.connect(
            self.__resend_errors_handle)

    def __connect_demodulator_button(self):
        self.ui.runDemodulatorButton.clicked.connect(self.__run_demodulator)

    def __connect_run_source_button(self):
        self.ui.runSourceButton.clicked.connect(self.__run_source)

    def __run_source(self):
        item = self.ui.signalSourceDropdownButton.currentText()
        if item == "Recorded IQ File":
            self.source_thread = Thread(target=self.__source_iq_file_command)
            self.source_thread.start()
        elif item == "FUNcube Dongle Pro+":
            self.source_thread = Thread(target=self.__source_fcd_plus_command)
            self.source_thread.start()
        elif item == "RTL-SDR":
            self.source_thread = Thread(target=self.__source_rtl_sdr_command)
            self.source_thread.start()
        elif item == "PlutoSDR":
            self.source_thread = Thread(target=self.__source_pluto_sdr_command)
            self.source_thread.start()
        elif item == "Audio in (SSB - USB)":
            self.source_thread = Thread(target=self.__source_ssb_command)
            self.source_thread.start()
        else:
            print "Not implemented yet!"

    def __source_iq_file_command(self):
        print os.path.join(os.path.dirname(__file__), '..',
                           self.config.config['GRC_BINARY'] + ' -s "iq_file"')
        subprocess.call([
            os.path.join(os.path.dirname(__file__), '..',
                         self.config.config['GRC_BINARY']), '-s', "iq_file"
        ])

    def __source_ssb_command(self):
        print os.path.join(os.path.dirname(__file__), '..',
                           self.config.config['GRC_BINARY'] + ' -s "iq_file"')
        subprocess.call([
            os.path.join(os.path.dirname(__file__), '..',
                         self.config.config['GRC_BINARY']), '-s', "ssb"
        ])

    def __source_fcd_plus_command(self):
        print os.path.join(os.path.dirname(__file__), '..',
                           self.config.config['GRC_BINARY'] + ' -s "fcd+"')
        subprocess.call([
            os.path.join(os.path.dirname(__file__), '..',
                         self.config.config['GRC_BINARY']), '-s', "fcd+"
        ])

    def __source_rtl_sdr_command(self):
        print os.path.join(os.path.dirname(__file__), '..',
                           self.config.config['GRC_BINARY'] + ' -s "rtl-sdr"')
        subprocess.call([
            os.path.join(os.path.dirname(__file__), '..',
                         self.config.config['GRC_BINARY']), '-s', "rtl-sdr"
        ])

    def __source_pluto_sdr_command(self):
        print os.path.join(os.path.dirname(__file__), '..',
                           self.config.config['GRC_BINARY'] + ' -s "pluto"')
        subprocess.call([
            os.path.join(os.path.dirname(__file__), '..',
                         self.config.config['GRC_BINARY']), '-s', "pluto"
        ])

    def __demodulator_command(self):
        print os.path.join(
            os.path.dirname(__file__), '..',
            self.config.config['GRC_BINARY'] + ' -s "demodulator"')
        subprocess.call([
            os.path.join(os.path.dirname(__file__), '..',
                         self.config.config['GRC_BINARY']), '-s', "demodulator"
        ])

    def __run_demodulator(self):
        self.demodulator_thread = Thread(target=self.__demodulator_command)
        self.demodulator_thread.start()

    def __connect_help_buttons(self):
        self.ui.helpAccountButton.clicked.connect(
            lambda: webbrowser.open(self.config.config['HELP_ACCOUNT']))
        self.ui.helpSignalSourceButton.clicked.connect(
            lambda: webbrowser.open(self.config.config['HELP_SOURCE']))
        self.ui.helpDemodulatorButton.clicked.connect(
            lambda: webbrowser.open(self.config.config['HELP_DEMODULATOR']))
        self.ui.helpCloudUploadButton.clicked.connect(
            lambda: webbrowser.open(self.config.config['HELP_UPLOAD']))

    def __create_start_threads(self):
        self.item_widgets_thread = GetFromQueueToSignalThread(
            self.stop_event, 0.05, self.gui_queue)
        self.item_widgets_thread.item_ready.connect(self.__process_frame_data)
        self.item_widgets_thread.start()

        self.item_widgets_update_thread = GetFromQueueToSignalThread(
            self.stop_event, 0.05, self.cloud_rx_queue)
        self.item_widgets_update_thread.item_ready.connect(self.__update_list)
        self.item_widgets_update_thread.start()

        self.auth_status_thread = CheckAuthThread(self.stop_event,
                                                  self.config.config)
        self.auth_status_thread.state_signal.connect(self.set_auth_status)
        self.auth_status_thread.start()

        self.update_service_thread = Updater(self.stop_event,
                                             self.config.config,
                                             self.application_path)
        self.update_service_thread.to_update.connect(
            self.update_service_thread.show_update_window)
        self.update_service_thread.start()

        self.ui.credentialsLoadButton.clicked.connect(
            self.__credentials_file_load_check_propagate)

    def __credentials_file_load_check_propagate(self):
        result = LoadCredentialsFile.load_with_dialog(self.config.config)
        if result is CorrectCredentials:
            self.auth_status_thread.cloud.load_credentials()
            self.upload_cloud_thread.cloud.load_credentials()
            self.auth_status_thread.check()
            self.logger.log(logging.DEBUG, "Updated credential file")
        elif result is WrongOrEmptyCredentials:
            self.logger.log(
                logging.DEBUG,
                "Not updated credential file - wrong or empty file")
            self.__show_dialog_credentials_not_loaded_wrong_file()
        elif result is UnknownError:
            self.logger.log(
                logging.DEBUG,
                "Not updated credential file - wrong or empty path")
            self.__show_dialog_credentials_not_loaded_unknown_error()
        return result

    def __show_dialog_credentials_not_loaded_wrong_file(self):
        msg = "<h2>Credentials file not loaded!</h2>" \
              "Credentials file is empty or corrupted.<br>" \
              "Go to radio.pw-sat.pl and generate a new credentials file."
        QtGui.QMessageBox.warning(None, 'Credentials file not loaded!', msg,
                                  QtGui.QMessageBox.Ok)

    def __show_dialog_credentials_not_loaded_unknown_error(self):
        msg = "<h2>Credentials file not loaded!</h2>" \
              "Unknown error occured!"
        QtGui.QMessageBox.warning(None, 'Credentials file not loaded!', msg,
                                  QtGui.QMessageBox.Ok)

    def __set_send_active(self):
        self.send_active.set()

    def __init_ribbon(self):
        self.__connect_help_buttons()
        self.__connect_frame_uploads_buttons()
        self.__init_credential_buttons()

    def __init_credential_buttons(self):
        set_btn_icon(self.ui.credentialsButton,
                     ":/user/img/user-alt-slash-solid.svg")
        self.ui.credentialsButton.setText("Signing in...")
        self.ui.credentialsButton.setToolTip("Signing in in progress...")

    def __first_frame(self):
        if self.first_frame:
            self.first_frame = False
            return True
        return False

    def __process_frame_data(self, packet):
        # remove welcome widget when the first frame is received
        if self.__first_frame():
            self.__remove_last_widget()

        data = self.__produce_list_item(packet)
        self.__add_to_list(data)
        self.ui.framesListWidget.scrollToBottom()
        self.cloud_tx_queue.append(data)

    def __produce_list_item(self, packet):
        item_widget = UiFrameListWidgetFactory.get(packet)
        item = QtGui.QListWidgetItem(self.ui.framesListWidget)
        item.setSizeHint(QtCore.QSize(0, 65))
        return PacketListData(item, item_widget, packet)

    def __add_to_list(self, data):
        self.ui.framesListWidget.addItem(data.item)
        self.ui.framesListWidget.setItemWidget(data.item, data.widget_item)

    def __update_list(self, data):
        base_url = self.config.config['CLOUD_URL']
        if data.upload_status:
            UiFrameListWidgetFactory.set_sent(data.widget_item, data.uuid,
                                              base_url)
        else:
            UiFrameListWidgetFactory.set_send_error(data.widget_item)

    def __autosend_handle(self):
        if self.ui.autoUploadToolButton.isChecked():
            self.ui.autoUploadToolButton.setText("Auto-Upload Enabled")
            self.send_active.set()
        else:
            self.send_active.clear()
            self.ui.autoUploadToolButton.setText("Auto-Upload Disabled")

    def __resend_errors_handle(self):
        upload = UploadCloudError(self.stop_event, self.config.config,
                                  self.cloud_rx_queue, self.error_queue)
        upload.start()

    def set_auth_status(self, auth_status):
        self.validate_credentials.load()

        if auth_status:
            set_btn_icon(self.ui.credentialsButton,
                         ":/user/img/user-check-solid.svg")
            self.ui.credentialsButton.setText(
                self.validate_credentials.credentials_data["identifier"])
            self.ui.credentialsButton.setToolTip("Signed up successfully")

        elif self.validate_credentials.file_blank():
            set_btn_icon(self.ui.credentialsButton,
                         ":/user/img/user-alt-slash-solid.svg")
            self.ui.credentialsButton.setText("No credentials loaded")
            self.ui.credentialsButton.setToolTip(
                "Download credentials from radio.pw-sat.pl and load file"
                " clicking button load credentials from file.")

        elif not auth_status and not self.validate_credentials.file_blank():
            set_btn_icon(self.ui.credentialsButton,
                         ":/user/img/user-alt-slash-solid.svg")
            self.ui.credentialsButton.setText(
                "Cannot sign in, trying again...")
            self.ui.credentialsButton.setToolTip(
                "Cannot sign in - restart application, check internet connection or"
                " download and load new credentials from radio.pw-sat.pl.")

    def __add_available_signal_sources(self):
        self.ui.signalSourceDropdownButton.addItem("Recorded IQ File")
        self.ui.signalSourceDropdownButton.addItem("RTL-SDR")
        self.ui.signalSourceDropdownButton.addItem("Audio in (SSB - USB)")

        if platform.startswith('linux'):
            self.ui.signalSourceDropdownButton.addItem("FUNcube Dongle Pro+")
            self.ui.signalSourceDropdownButton.addItem("PlutoSDR")

    def closeEvent(self, event):
        self.exit_message_handler.exit_action(event)
Example #10
0
    def run(self, opts=None):
        if opts:
            options = self.create_options(opts)
            self.set_options(options)
        options = self.options

        # check proxy options
        proxy = options.proxy
        if options.proxy:
            try:
                pattern = 'http[s]?://(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]):[0-9][0-9][0-9][0-9]'
                m = re.search(pattern, proxy)
                if m is None:
                    self.banner()
                    print("\n[Error] - Proxy malformed!\n")
                    sys.exit(2)
            except Exception:
                self.banner()
                print("\n[Error] - Proxy malformed!\n")
                sys.exit(2)

        # check tor connection
        if options.checktor:
            self.banner()
            try:
                print("\nSending request to: https://check.torproject.org\n")
                tor_reply = urllib2.urlopen(
                    "https://check.torproject.org").read()
                your_ip = tor_reply.split('<strong>')[1].split(
                    '</strong>')[0].strip()
                if not tor_reply or 'Congratulations' not in tor_reply:
                    print("It seems that Tor is not properly set.\n")
                    print("Your IP address appears to be: " + your_ip + "\n")
                else:
                    print("Congratulations!. Tor is properly being used :-)\n")
                    print("Your IP address appears to be: " + your_ip + "\n")
            except:
                print(
                    "Cannot reach TOR checker system!. Are you correctly connected?\n"
                )

        # search for 'zombies' on google results
        if options.search:
            try:
                self.banner()
                print(
                    "\nSearching for 'zombies' on google results. Good Luck ;-)\n"
                )
                zombies = self.search_zombies()
                check_url_link_reply = raw_input(
                    "Wanna check if they are valid zombies? (Y/n)\n")
                print '-' * 25
                if check_url_link_reply == "n" or check_url_link_reply == "N":
                    print "\nBye!\n"
                else:
                    test = self.testing(zombies)
            except Exception:
                print("[Error] - Something wrong searching!\n")

        # test web 'zombie' servers -> show statistics
        if options.test:
            try:
                self.banner()
                zombies = self.extract_zombies()
                test = self.testing(zombies)
            except Exception:
                print("\n[Error] - Something wrong testing!\n")

        # attack target -> exploit Open Redirect massively and connect all vulnerable servers to a target
        if options.target:
            try:
                self.banner()
                zombies = self.extract_zombies()
                attack = self.attacking(zombies)
            except Exception:
                print("\n[Error] - Something wrong attacking!\n")

        # check/update for latest stable version
        if options.update:
            self.banner()
            try:
                print(
                    "\nTrying to update automatically to the latest stable version\n"
                )
                Updater()
            except:
                print(
                    "\nSomething was wrong!. You should checkout UFONet manually with:\n"
                )
                print("$ git clone https://github.com/epsylon/ufonet\n")
Example #11
0
def main(url, path, replace, push, html, update):
    """
    Start vulnerability analysis using command line tool.
    :param url: URL to a github repository
    :param path: path to a locally stored project
    :param replace: flag of whether detected vulnerabilities
    should be automatically replaced with safe alternative
    :param push: flag indicating whether pull-request should
    automatically be created
    :param html: path to html file which will contain report
    """
    # analyze source code of provided project
    print("Start analysis")

    # setup logging
    logger = logging.getLogger()
    logHandler = logging.FileHandler('/home/metrics.json')
    logger.addHandler(logHandler)
    formatter = jsonlogger.JsonFormatter()
    logHandler.setFormatter(formatter)
    logger.setLevel(logging.INFO)

    # store remote repo locally in /tmp
    local_repo_path = '/tmp/' + str(uuid.uuid4())
    gh_handler = None

    if url:
        # download remote repo
        gh_handler = GithubRepoHandler(url)
        gh_handler.download_repository(local_repo_path)
        gh_handler.get_repository_metrics()
    elif path:
        # analyze local repo
        local_repo_path = path

    updater = None

    try:
        updater = Updater(local_repo_path)
    except:
        print("Cannot update due to error")

    vulnerability_analyzer = VulnerabilityAnalyzer(local_repo_path)

    try:
        # check for vulnerable functions and vulnerable dependencies
        vulnerability_analyzer.analyze()
    except:
        print("Python AST cannot be parsed. Terminating analysis")

        if url:
            logger.info(url, extra={'analysis_failed': True})

        sys.exit(1)

    vulnerable_functions = vulnerability_analyzer.detected_vulnerable_functions
    vulnerable_imports = vulnerability_analyzer.detected_vulnerable_imports
    vulnerable_installed_dependencies = vulnerability_analyzer.detected_vulnerable_installed_dependencies

    outdated_dependencies = []

    if updater:
        outdated_dependencies = updater.outdated_dependencies

    pre_test_results = {}
    test_metrics_before = {}

    if len(vulnerable_functions) > 0:
        # run tests
        pre_tester = TestInfo(local_repo_path)

        try:
            pre_tester.runToxTest()
        except:
            print("An error occured while executing tests")

        print("Tests done")
        pre_test_results = pre_tester.getTestLog()
        test_metrics_before = pre_tester.get_test_metrics()

    post_test_metrics = {}
    post_test_results = {}

    if replace and len(vulnerable_functions) > 0:
        # automatically replace detected vulnerabilities if available
        print("Replace detected vulnerabilities")
        vulnerability_analyzer.replace_vulnerabilities_in_ast()

        # run tests
        post_tester = TestInfo(local_repo_path)

        try:
            post_tester.runToxTest()
            post_test_results = post_tester.getTestLog()
            post_test_metrics = post_tester.get_test_metrics()
        except:
            print("An error occured while executing tests")

    report = Report(vulnerable_functions, vulnerable_imports, pre_test_results,
                    post_test_results, outdated_dependencies, [], replace)

    # automatically create pull request
    if push and (len(vulnerable_functions) > 0 or len(vulnerable_imports) > 0):
        print("Create pull-request")

        gh_handler.push_updates("*****@*****.**",
                                "bugrevelio", "Vulnerabilities",
                                report.pull_request_report(),
                                "bugrevelio:master", "master")

    print(report.plain_text_report())

    if html:
        report.html_report(html)

    if url:
        # collect relevant metrics
        repo_metrics = gh_handler.get_repository_metrics()
        vulnerability_metrics = vulnerability_analyzer.get_vulnerability_metrics(
        )
        repo_metrics.update(vulnerability_metrics)
        repo_metrics.update(test_metrics_before)
        repo_metrics.update(post_test_metrics)

        logger.info(url, extra=repo_metrics)

    # delete downloaded repo
    shutil.rmtree(local_repo_path)