Exemple #1
0
def start(override: bool = False):
    """If no lock file, make one and start onionr.

    Error exit if there is and its not overridden
    """
    if os.path.exists(filepaths.lock_file) and not override:
        if os.path.exists(filepaths.restarting_indicator):
            try:
                os.remove(filepaths.restarting_indicator)
            except FileNotFoundError:
                pass
            else:
                return
        logger.fatal('Cannot start. Daemon is already running,' +
                     ' or it did not exit cleanly.\n' +
                     ' (if you are sure that there is not a daemon running,' +
                     f' delete {filepaths.lock_file} & try again).',
                     terminal=True)
    else:
        if not onionrvalues.DEVELOPMENT_MODE:
            lock_file = open(filepaths.lock_file, 'w')
            lock_file.write('delete at your own risk')
            lock_file.close()

        # Start Onionr daemon
        daemon()

        try:
            os.remove(filepaths.lock_file)
        except FileNotFoundError:
            pass
Exemple #2
0
    def startTor(self):
        '''
            Start Tor with onion service on port 80 & socks proxy on random port
        '''

        self.generateTorrc()

        if os.path.exists('./tor'):
            torBinary = './tor'
        elif os.path.exists('/usr/bin/tor'):
            torBinary = '/usr/bin/tor'
        else:
            torBinary = 'tor'

        try:
            tor = subprocess.Popen([torBinary, '-f', self.torConfigLocation],
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE)
        except FileNotFoundError:
            logger.fatal(
                "Tor was not found in your path or the Onionr directory. Please install Tor and try again."
            )
            sys.exit(1)
        else:
            # Test Tor Version
            torVersion = subprocess.Popen([torBinary, '--version'],
                                          stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE)
            for line in iter(torVersion.stdout.readline, b''):
                if 'Tor 0.2.' in line.decode():
                    logger.warn(
                        "Running 0.2.x Tor series, no support for v3 onion peers"
                    )
                    break
            torVersion.kill()

        # wait for tor to get to 100% bootstrap
        for line in iter(tor.stdout.readline, b''):
            if 'Bootstrapped 100%: Done' in line.decode():
                break
            elif 'Opening Socks listener' in line.decode():
                logger.debug(line.decode().replace('\n', ''))
        else:
            logger.fatal(
                'Failed to start Tor. Try killing any other Tor processes owned by this user.'
            )
            return False

        logger.info('Finished starting Tor')
        self.readyState = True

        myID = open('data/hs/hostname', 'r')
        self.myID = myID.read().replace('\n', '')
        myID.close()

        torPidFile = open('data/torPid.txt', 'w')
        torPidFile.write(str(tor.pid))
        torPidFile.close()

        return True
Exemple #3
0
 def __init__(self, name, size):
     self.name = name + '.TTF'
     try:
         self.font = pygame.font.Font(
             os.path.join(constants.fontPath, self.name), size)
     except Exception, e:
         logger.fatal("Unable to load font named %s: [%s]" % (self.name, e))
Exemple #4
0
    def makeSpanningTree(self, nodeGroup):
        for i in xrange(NUM_SPANNING_ATTEMPTS):
            # Pick a random starting node in the group.
            modGroup = set(nodeGroup)
            newEdges = dict([(node, set()) for node in list(nodeGroup)])
            temp = list(modGroup)
            random.shuffle(temp)
            start = temp[0]
            seenNodes = set([start])

            # Use None to mark the end of tree expansion at a given depth.
            queue = [start, None]
            while queue:
                node = queue.pop(0)
                if node is None:
                    if not queue:
                        # Out of nodes to add to the spanning tree!
                        break
                    # Finished connecting to all the nodes at this depth.
                    # Shuffle this set of nodes, to break up patterns that would
                    # otherwise emerge in the less random node layouts, and to
                    # give us a chance of finding a working arrangement in
                    # more unusual patterns.
                    random.shuffle(queue)
                    queue.append(None)
                    continue
                modGroup.remove(node)
                for neighbor in self.edges[node]:
                    if neighbor not in seenNodes and neighbor in nodeGroup:
                        # Make certain that adding the neighbor would not create
                        # too acute an angle with any existing edges.
                        neighborVector = neighbor.sub(node)
                        canAddEdge = True
                        for alt in newEdges[node]:
                            altVector = alt.sub(node)
                            angleDistance = altVector.angleWithVector(
                                neighborVector)
                            if abs(angleDistance) < MIN_ANGLE_DISTANCE:
                                canAddEdge = False
                        if canAddEdge:
                            seenNodes.add(neighbor)
                            newEdges[node].add(neighbor)
                            newEdges[neighbor].add(node)
                            queue.append(neighbor)
#                self.drawAll(interiorNodes = list(seenNodes),
#                        edges = newEdges, allNodes = list(nodeGroup))
#        self.drawAll(edges = newEdges, shouldForceSave = True)
            if not modGroup:
                # Every node was added to the spanning tree, so we're good
                # to go!
                return newEdges
        self.drawAll(interiorNodes=list(nodeGroup),
                     edges=dict([(node, self.edges[node])
                                 for node in nodeGroup]),
                     allNodes=list(nodeGroup),
                     shouldLabelNodes=True)
        logger.fatal(
            "Unable to make a spanning tree that hit every edge in %s" %
            nodeGroup)
        return None
Exemple #5
0
    def __init__(self):
        ## Maps config keys to their values.
        self.config = {
            'sound': 1,
            'fullscreen': 0,
        }
        ## Maps action names (e.g. 'jump', 'left') to PyGame key identifiers.
        self.controls = defaultPlayerKeys
        filename = self.getConfigPath()
        self.isFirstTimePlaying = False
        if not os.path.exists(filename):
            self.isFirstTimePlaying = True
            self.writeConfig()
        else:
            logger.debug("Reading a file to get config")
            fh = open(filename, 'r')
            for line in fh:
                (action, key) = line.split(':', 1)
                key = key.rstrip()
                if action in defaultPlayerKeys:
                    self.controls[action] = int(key)
                else:
                    self.config[action] = int(key)
            fh.close()

            for action, key in defaultPlayerKeys.items():
                if not action in self.controls:
                    logger.fatal('Controls do not have an entry for action [' + action + ']')
        logger.debug("Controls are",str(self.controls))
Exemple #6
0
def HeuristicRMSafetyCheck(path):
  """A heuristic safety check to prevent rm -rf / and the like.

  Logs to fatal if the safety assertions are false.
  """
  path_elements = path.split('/')
  if len(path_elements) < 4: # path must have at least 4 elements.
    logger.fatal('Cannot rm -rf %s: path too shallow.' % path)
 def loadModuleItems(self, path, items):
     path = path.replace(os.sep, '.')
     module = None
     try:
         module = __import__(path, globals(), locals(), items)
     except Exception, e:
         logger.fatal('Unable to load', items, " from ", path, ':',
                      e.message)
Exemple #8
0
    def loadAnimations(self, spriteName, shouldCreateCopy=True):
        if spriteName in self.animationsCache:
            if shouldCreateCopy:
                return self.copy(spriteName)
            return self.animationsCache[spriteName]

        # Search for a file named 'spriteConfig.py' through the path specified
        # in spriteName. Use the deepest one we find. This lets us share
        # spriteConfigs for similar sprites.
        directories = spriteName.split(os.sep)
        modulePath = None
        path = constants.spritePath
        for directory in directories:
            path += os.sep + directory
            if os.path.exists(
                    os.path.join(path, constants.spriteFilename + '.py')):
                modulePath = path

        if modulePath is None:
            logger.fatal(
                "Unable to find a spriteConfig.py file anywhere in the path",
                spriteName)

        modulePath = os.path.join(modulePath, constants.spriteFilename)
        spriteModule = game.dynamicClassManager.loadModuleItems(
            modulePath, ['sprites'])
        animations = {}
        for animationName, data in spriteModule.sprites.iteritems():
            # Load the bounding polygon, and all optional flags, with sane
            # defaults.
            animPolygon = polygon.Polygon(
                [Vector2D(point) for point in data['polygon']])
            shouldLoop = True
            if 'loop' in data:
                shouldLoop = data['loop']
            updateRate = 1
            if 'updateRate' in data:
                updateRate = data['updateRate']
            updateFunc = None
            if 'updateFunc' in data:
                updateFunc = data['updateFunc']
            drawOffset = Vector2D(0, 0)
            if 'drawOffset' in data:
                drawOffset = Vector2D(data['drawOffset'])
            moveOffset = Vector2D(0, 0)
            if 'moveOffset' in data:
                moveOffset = Vector2D(data['moveOffset'])
            frameActions = dict()
            if 'frameActions' in data:
                frameActions = data['frameActions']
            animations[animationName] = animation.Animation(
                spriteName, animationName, animPolygon, shouldLoop, updateRate,
                updateFunc, drawOffset, moveOffset, frameActions)

        self.animationsCache[spriteName] = animations
        if shouldCreateCopy:
            return self.copy(spriteName)
        return self.animationsCache[spriteName]
Exemple #9
0
def start(o_inst, input = False, override = False):
    if os.path.exists('.onionr-lock') and not override:
        logger.fatal('Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).')
    else:
        if not o_inst.debug and not o_inst._developmentMode:
            lockFile = open('.onionr-lock', 'w')
            lockFile.write('')
            lockFile.close()
        o_inst.running = True
        o_inst.daemon()
        o_inst.running = False
        if not o_inst.debug and not o_inst._developmentMode:
            try:
                os.remove('.onionr-lock')
            except FileNotFoundError:
                pass
Exemple #10
0
def start(input: bool = False, override: bool = False):
    """If no lock file, make one and start onionr, error if there is and its not overridden"""
    if os.path.exists(filepaths.lock_file) and not override:
        logger.fatal(
            'Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete onionr.lock & try again).',
            terminal=True)
    else:
        if not onionrvalues.DEVELOPMENT_MODE:
            lockFile = open(filepaths.lock_file, 'w')
            lockFile.write('delete at your own risk')
            lockFile.close()
        daemon()
        try:
            os.remove(filepaths.lock_file)
        except FileNotFoundError:
            pass
def getFieldDataHelper(perfDataDict, perfJ2eeType, whitelistDict=None):
    '''
    Returns the string with the fields and values
    '''
    def makeTuple(perfData, name, id):
        return (name + "." + id, perfData[id])

    rtnList = []
    perfName = perfDataDict["name"].replace(" ", "_")

    if (perfDataDict["classificaton"] == "CountStatistic"):
        for value in COUNTSTATISTIC_VALUES:
            if (isWhitelisted(perfJ2eeType, perfDataDict["name"], value,
                              whitelistDict) == True):
                # rtnList.append((perfName + ".count", perfDataDict["count"]))
                rtnList.append(makeTuple(perfDataDict, perfName, value))

    elif (perfDataDict["classificaton"] == "AverageStatistic"):
        for value in AVERAGESTATISTIC_VALUES:
            if (isWhitelisted(perfJ2eeType, perfDataDict["name"], value,
                              whitelistDict) == True):
                rtnList.append(makeTuple(perfDataDict, perfName, value))
    elif (perfDataDict["classificaton"] == "TimeStatistic"):
        for value in TIMESTATISTIC_VALUES:
            if (isWhitelisted(perfJ2eeType, perfDataDict["name"], value,
                              whitelistDict) == True):
                rtnList.append(makeTuple(perfDataDict, perfName, value))

    elif (perfDataDict["classificaton"] == "RangeStatistic"):
        for value in RANGESTATISTIC_VALUES:
            if (isWhitelisted(perfJ2eeType, perfDataDict["name"], value,
                              whitelistDict) == True):
                rtnList.append(makeTuple(perfDataDict, perfName, value))

    elif (perfDataDict["classificaton"] == "BoundedRangeStatistic"):
        for value in BOUNDEDRANGESTATISTIC_VALUES:
            if (isWhitelisted(perfJ2eeType, perfDataDict["name"], value,
                              whitelistDict) == True):
                rtnList.append(makeTuple(perfDataDict, perfName, value))

    else:
        l.fatal(
            "Invalid classificaton in perfDataDict found: '%s'. Exiting ..." %
            (perfDataDict["classificaton"]))
        raise Exception

    return rtnList
Exemple #12
0
    def start(self):
        '''
            Starts the Onionr daemon
        '''

        if os.path.exists('.onionr-lock'):
            logger.fatal(
                'Cannot start. Daemon is already running, or it did not exit cleanly.\n(if you are sure that there is not a daemon running, delete .onionr-lock & try again).'
            )
        else:
            if not self.debug and not self._developmentMode:
                lockFile = open('.onionr-lock', 'w')
                lockFile.write('')
                lockFile.close()
            self.daemon()
            if not self.debug and not self._developmentMode:
                os.remove('.onionr-lock')
Exemple #13
0
def start(override: bool = False):
    """If no lock file, make one and start onionr.

    Error exit if there is and its not overridden
    """
    if os.path.exists(filepaths.lock_file) and not override:
        if os.path.exists(filepaths.restarting_indicator):
            try:
                os.remove(filepaths.restarting_indicator)
            except FileNotFoundError:
                pass
            else:
                return
        with open(filepaths.lock_file, 'r') as lock_file:
            try:
                proc = psutil.Process(int(lock_file.read())).name()
            except psutil.NoSuchProcess:
                proc = ""
            if not proc.startswith("python"):
                logger.info(
                    f"Detected stale run file, deleting {filepaths.lock_file}", terminal=True)
                try:
                    os.remove(filepaths.lock_file)
                except FileNotFoundError:
                    pass
                start(override=True)
                return
        logger.fatal('Cannot start. Daemon is already running,'
                     + ' or it did not exit cleanly.\n'
                     + ' (if you are sure that there is not a daemon running,'
                     + f' delete {filepaths.lock_file} & try again).',
                     terminal=True)
    else:
        if not onionrvalues.DEVELOPMENT_MODE:
            lock_file = open(filepaths.lock_file, 'w')
            lock_file.write(str(os.getpid()))
            lock_file.close()

        # Start Onionr daemon
        daemon()

        try:
            os.remove(filepaths.lock_file)
        except FileNotFoundError:
            pass
 def loadDynamicClass(self, path):
     if path in self.classPathToClassMap:
         return self.classPathToClassMap[path]
     try:
         logger.debug("Loading module", path)
         # In order to allow arbitrary naming of these classes, we first
         # import a function that tells us the name of the class, then we
         # import the class itself.
         # \todo: seems like this could be done better somehow.
         nameFuncModule = __import__(path, globals(), locals(),
                                     ['getClassName'])
         className = nameFuncModule.getClassName()
         classModule = __import__(path, globals(), locals(), [className])
         initFunc = getattr(classModule, className)
         self.classPathToClassMap[path] = initFunc
         return initFunc
     except Exception, e:
         logger.fatal("Failed to load module", path, ":", e)
def getTagDataTuples(perfDataEntry):
    '''
    returns list of tag tuples.
    [ (tag1-name, tag1-value), (tag2-name, tag2-value), ... ]
    '''
    rawTagString = perfDataEntry["tags"]

    tagsList = rawTagString.split(NODE_SEPARATOR)
    if len(tagsList) > len(TAG_NAMES):
        l.fatal("you need more labels in TAG_NAMES!")

    rtnList = []
    for x in range(len(tagsList)):
        tagName = TAG_NAMES[x]
        tagValue = tagsList[x].replace(" ", "_")
        if (x == 3):
            tagValue = translateStatName(tagValue)
        rtnList.append((tagName, tagValue))
    l.debug("raw  tags data tuples list: '%s'", rtnList)
    return rtnList
Exemple #16
0
def validate():
    """
    Should be run after logging is enabled.

    """
    logger.debug("Running script with argv: " + str(sys.argv))
    logger.debug("Parsed options: " + str(options))
    logger.debug("Unparsed args: " + str(args))
    
    if args:
        logger.fatal("Unrecognized command line arguments: " + string.join(args))

    if options.non_root_user:
        try:
            utils.get_user_idinfo(options.non_root_user)
        except KeyError:
            logger.fatal("User '%s' doesn't exist" % options.non_root_user)

    if options.non_root_user and not utils.is_root():
        logger.fatal("[-u | --non-root-user] flags are invalid when current user is not root")

    if options.break_at_failure:
        options.debug = True

    if options.filter_tests:
        options.filter_tests = re.compile(options.filter_tests)
    
    logger.debug("Preprocessed options: " + str(options))
Exemple #17
0
def init():
    global key, readlock

    logger.info('Initializing...')
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)

    logger.log(f'Using pack version {pack.version}')

    logger.info(
        f'Connecting to the server: {server[0]} on port {str(server[1])}')
    s.connect(server)

    logger.info('Starting listening thread...')
    t = threading.Thread(target=connection_keep_recv, args=(s, server))
    t.setDaemon(True)
    t.start()

    readlock = True
    logger.info('Registering existance...')
    rand = secrets.token_urlsafe(32)
    s.send(pack.pack('register', json.dumps({'rand': rand}).encode()))
    logger.info('Existance registered, waiting for the reply...')

    logger.info('Obtaining key for encryption...')
    # Obtain secrect key for encryption, via HTTPS
    r = requests.get('https://' + keyserver + '/obtain', {'rand': rand})
    if 'key' not in r.json():
        logger.fatal('Failed to obtain key!')
    key = base64.b64decode(r.json()['key'])
    logger.log(key=key)
    readlock = False

    logger.info('Successfully obtained key!')

    # while True:
    #     logger.info('Loading...')
    t.join()
Exemple #18
0
def process_file(path, raw_file):
    # 打开文件
    print("Starting processing " + raw_file)
    win32api.ShellExecute(0, 'open', path + raw_file, '', '', 1)
    hwnd_main_sep = get_window(None, None, None, 'SEP Reader - [' + raw_file + ']')
    if 0 == hwnd_main_sep:
        logger.fatal("FATAL ERROR: SEP Reader Window not found! program terminated.")
        exit(100)

    # 发送打印指令
    win32gui.SetForegroundWindow(hwnd_main_sep)
    win32api.keybd_event(17, 0, 0, 0)  # Ctrl
    win32api.keybd_event(80, 0, 0, 0)  # P
    win32api.keybd_event(17, 0, win32con.KEYEVENTF_KEYUP, 0)
    win32api.keybd_event(80, 0, win32con.KEYEVENTF_KEYUP, 0)

    # 等待打印窗体、按回车并判断窗体消失
    hwnd_printer = get_window(None, None, None, 'SEP Reader')
    win32gui.SetForegroundWindow(hwnd_printer)
    win32api.keybd_event(13, 0, 0, 0)  # Enter
    win32api.keybd_event(13, 0, win32con.KEYEVENTF_KEYUP, 0)
    if not wait_window_disappear(hwnd_printer):
        logger.error("File [" + raw_file + "] print-window-disappear timed out; WM_CLOSE signal sent.")
        win32api.SendMessage(hwnd_printer, win32con.WM_CLOSE, 0, 0)

    # 窗口清理(SEP为关闭文件,保留程序窗口)
    win32gui.SetForegroundWindow(hwnd_main_sep)
    win32api.keybd_event(17, 0, 0, 0)  # Ctrl
    win32api.keybd_event(87, 0, 0, 0)  # W
    win32api.keybd_event(17, 0, win32con.KEYEVENTF_KEYUP, 0)
    win32api.keybd_event(87, 0, win32con.KEYEVENTF_KEYUP, 0)
    print("File " + raw_file + " printed.")
    # win32api.SendMessage(hwnd_main, win32con.WM_CLOSE, 0, 0)

    printed_file = os.path.join(path_printed, file_printed)
    renamed_file = os.path.join(path_printed, raw_file + '.pdf')
    moved_file = os.path.join(path_result, raw_file + '.pdf')

    # 重命名文件
    try:
        rename_file(printed_file, renamed_file)
    except Exception as e:
        logger.fatal("FATAL ERROR: rename file failed, exception is "+str(e)+", process terminated.")
        exit(100)

    # 移动文件
    try:
        move_file(renamed_file, moved_file, path_result)
    except Exception as e:
        logger.fatal("FATAL ERROR: move file failed, exception is "+str(e)+", process terminated.")
        exit(101)
Exemple #19
0
    def __init__(self, debug):
        '''
            Initialize the api server, preping variables for later use

            This initilization defines all of the API entry points and handlers for the endpoints and errors
            This also saves the used host (random localhost IP address) to the data folder in host.txt
        '''

        config.reload()

        if config.get('devmode', True):
            self._developmentMode = True
            logger.set_level(logger.LEVEL_DEBUG)
        else:
            self._developmentMode = False
            logger.set_level(logger.LEVEL_INFO)

        self.debug = debug
        self._privateDelayTime = 3
        self._core = Core()
        self._crypto = onionrcrypto.OnionrCrypto(self._core)
        self._utils = onionrutils.OnionrUtils(self._core)
        app = flask.Flask(__name__)
        bindPort = int(config.get('client')['port'])
        self.bindPort = bindPort
        self.clientToken = config.get('client')['client_hmac']
        if not os.environ.get("WERKZEUG_RUN_MAIN") == "true":
            logger.debug('Your HMAC token: ' + logger.colors.underline +
                         self.clientToken)

        if not debug and not self._developmentMode:
            hostNums = [
                random.randint(1, 255),
                random.randint(1, 255),
                random.randint(1, 255)
            ]
            self.host = '127.' + str(hostNums[0]) + '.' + str(
                hostNums[1]) + '.' + str(hostNums[2])
        else:
            self.host = '127.0.0.1'
        hostFile = open('data/host.txt', 'w')
        hostFile.write(self.host)
        hostFile.close()

        @app.before_request
        def beforeReq():
            '''
                Simply define the request as not having yet failed, before every request.
            '''
            self.requestFailed = False

            return

        @app.after_request
        def afterReq(resp):
            if not self.requestFailed:
                resp.headers['Access-Control-Allow-Origin'] = '*'
            else:
                resp.headers['server'] = 'Onionr'
            resp.headers['Content-Type'] = 'text/plain'
            resp.headers["Content-Security-Policy"] = "default-src 'none'"
            resp.headers['X-Frame-Options'] = 'deny'
            resp.headers['X-Content-Type-Options'] = "nosniff"

            return resp

        @app.route('/client/')
        def private_handler():
            startTime = math.floor(time.time())
            # we should keep a hash DB of requests (with hmac) to prevent replays
            action = request.args.get('action')
            #if not self.debug:
            token = request.args.get('token')
            if not self.validateToken(token):
                abort(403)
            self.validateHost('private')
            if action == 'hello':
                resp = Response('Hello, World! ' + request.host)
            elif action == 'shutdown':
                request.environ.get('werkzeug.server.shutdown')()
                resp = Response('Goodbye')
            elif action == 'stats':
                resp = Response('me_irl')
            else:
                resp = Response('(O_o) Dude what? (invalid command)')
            endTime = math.floor(time.time())
            elapsed = endTime - startTime
            if elapsed < self._privateDelayTime:
                time.sleep(self._privateDelayTime - elapsed)

            return resp

        @app.route('/public/')
        def public_handler():
            # Public means it is publicly network accessible
            self.validateHost('public')
            action = request.args.get('action')
            requestingPeer = request.args.get('myID')
            data = request.args.get('data')
            if action == 'firstConnect':
                pass
            elif action == 'ping':
                resp = Response("pong!")
            elif action == 'getHMAC':
                resp = Response(self._crypto.generateSymmetric())
            elif action == 'getSymmetric':
                resp = Response(self._crypto.generateSymmetric())
            elif action == 'getDBHash':
                resp = Response(self._utils.getBlockDBHash())
            elif action == 'getBlockHashes':
                resp = Response(self._core.getBlockList())
            # setData should be something the communicator initiates, not this api
            elif action == 'getData':
                resp = self._core.getData(data)
                if resp == False:
                    abort(404)
                    resp = ""
                resp = Response(resp)
            elif action == 'pex':
                response = ','.join(self._core.listAdders())
                if len(response) == 0:
                    response = 'none'
                resp = Response(response)
            elif action == 'kex':
                response = ','.join(self._core.listPeers())
                if len(response) == 0:
                    response = 'none'
                resp = Response(response)
            else:
                resp = Response("")

            return resp

        @app.errorhandler(404)
        def notfound(err):
            self.requestFailed = True
            resp = Response("")

            return resp

        @app.errorhandler(403)
        def authFail(err):
            self.requestFailed = True
            resp = Response("403")

            return resp

        @app.errorhandler(401)
        def clientError(err):
            self.requestFailed = True
            resp = Response("Invalid request")

            return resp

        if not os.environ.get("WERKZEUG_RUN_MAIN") == "true":
            logger.info('Starting client on ' + self.host + ':' +
                        str(bindPort) + '...')

        try:
            app.run(host=self.host, port=bindPort, debug=True, threaded=True)
        except Exception as e:
            logger.error(str(e))
            logger.fatal('Failed to start client on ' + self.host + ':' +
                         str(bindPort) + ', exiting...')
            exit(1)
Exemple #20
0
    def __enter__(self):
        '''Initialize the test framework or exit on failure'''

        os.environ['__DCGM_TESTING_FRAMEWORK_ACTIVE'] = '1'
        # Make sure that the MPS server is disabled before running the test-suite
        if utils.is_mps_server_running():
            print('DCGM Testing framework is not interoperable with MPS server. Please disable MPS server.')
            sys.exit(1)
        
        # Various setup steps
        option_parser.parse_options()
        utils.verify_user_file_permissions()
        utils.verify_localhost_dns()
        if not option_parser.options.use_running_hostengine:
            utils.verify_hostengine_port_is_usable()
        utils.verify_dcgm_service_not_active()
        if not test_utils.verify_dcgmi_executible_visible_for_all_users():
            print('DCGM Testing framework is located in the directory that is does have proper permissions to run ' \
                  'tests under unprivileged service account.')
            print('See the logs to understand which part of the path lacks the read+execute permissions')
            print('Either run `chmod o+rx <directory>` or move the DCGM Testing framework to another location')
            sys.exit(1)
        utils.verify_nvidia_fabricmanager_service_active_if_needed()

        if not test_utils.noLogging:
            logger.setup_environment()

            if logger.log_dir:
                logger.close()
            
        option_parser.validate()
        
        if not test_utils.is_framework_compatible():
            logger.fatal("The test framework and dcgm versions are incompatible. Exiting Test Framework.")
            sys.exit(1)

        # Directory where DCGM test*.py files reside
        test_utils.set_tests_directory('tests')

        # Verify that package architecture matches python architecture
        if utils.is_64bit():
            # ignore this check on ppc64le and armv8 for now
            if not (platform.machine() == "ppc64le" or platform.machine() == "aarch64"):
                if not os.path.exists(os.path.join(utils.script_dir, "apps/amd64")):
                    print("Testing package is missing 64bit binaries, are you sure you're using package of correct architecture?")
                    sys.exit(1)
        else:
            if not os.path.exists(os.path.join(utils.script_dir, "apps/x86")):
                print("Testing package is missing 32bit binaries, are you sure you're using package of correct architecture?")
                sys.exit(1)

        # Stops the framework if running python 32bits on 64 bits OS
        if utils.is_windows():
            if os.name == "nt" and "32 bit" in sys.version and platform.machine() == "AMD64":
                print("Running Python 32-bit on a 64-bit OS is not supported. Please install Python 64-bit")
                sys.exit(1)

        if utils.is_linux():
            python_exec = str(sys.executable)
            python_arch = check_output(["file", "-L", python_exec])

            if "32-bit" in python_arch.decode('utf-8') and utils.is_64bit() == True:
                print("Running Python 32-bit on a 64-bit OS is not supported. Please install Python 64-bit")
                sys.exit(1)

        #Tell DCGM how to find our testing package's NVVS
        test_utils.set_nvvs_bin_path()
Exemple #21
0
    moved_file = os.path.join(path_result, raw_file + '.pdf')

    # 重命名文件
    try:
        rename_file(printed_file, renamed_file)
    except Exception as e:
        logger.fatal("FATAL ERROR: rename file failed, exception is "+str(e)+", process terminated.")
        exit(100)

    # 移动文件
    try:
        move_file(renamed_file, moved_file, path_result)
    except Exception as e:
        logger.fatal("FATAL ERROR: move file failed, exception is "+str(e)+", process terminated.")
        exit(101)


while 1:
    for i in os.walk(path_init):
        for fileName in i[2]:
            try:
                process_file(path_init, fileName)
                if path_init != path_processed:
                    move_file(path_init+fileName, path_processed+fileName, path_processed)
            except Exception as e:
                except_string = "FATAL ERROR: something error, exception is >>>"+str(e)+"<<<."
                print(except_string)
                logger.fatal(except_string)
    print("Process finished or no file, sleep 10 seconds.")
    time.sleep(10)
Exemple #22
0
    def startTor(self):
        '''
            Start Tor with onion service on port 80 & socks proxy on random port
        '''

        self.generateTorrc()

        if os.path.exists('./tor'):
            self.torBinary = './tor'
        elif os.path.exists('/usr/bin/tor'):
            self.torBinary = '/usr/bin/tor'
        else:
            self.torBinary = 'tor'

        try:
            tor = subprocess.Popen(
                [self.torBinary, '-f', self.torConfigLocation],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE)
        except FileNotFoundError:
            logger.fatal(
                "Tor was not found in your path or the Onionr directory. Please install Tor and try again."
            )
            sys.exit(1)
        else:
            # Test Tor Version
            torVersion = subprocess.Popen([self.torBinary, '--version'],
                                          stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE)
            for line in iter(torVersion.stdout.readline, b''):
                if 'Tor 0.2.' in line.decode():
                    logger.error('Tor 0.3+ required')
                    sys.exit(1)
                    break
            torVersion.kill()

        # wait for tor to get to 100% bootstrap
        try:
            for line in iter(tor.stdout.readline, b''):
                if 'bootstrapped 100' in line.decode().lower():
                    break
                elif 'opening socks listener' in line.decode().lower():
                    logger.debug(line.decode().replace('\n', ''))
            else:
                logger.fatal(
                    'Failed to start Tor. Maybe a stray instance of Tor used by Onionr is still running? This can also be a result of file permissions being too open'
                )
                return False
        except KeyboardInterrupt:
            logger.fatal('Got keyboard interrupt. Onionr will exit soon.',
                         timestamp=False,
                         level=logger.LEVEL_IMPORTANT)
            return False

        logger.debug('Finished starting Tor.', timestamp=True)
        self.readyState = True

        try:
            myID = open(self.dataDir + 'hs/hostname', 'r')
            self.myID = myID.read().replace('\n', '')
            myID.close()
        except FileNotFoundError:
            self.myID = ""

        torPidFile = open(self.dataDir + 'torPid.txt', 'w')
        torPidFile.write(str(tor.pid))
        torPidFile.close()

        return True
Exemple #23
0
    def walkWalls(self,
                  slopeAveragingDistance,
                  skipRate=1,
                  shouldUseOnlyRealWalls=False):
        # Find a point in the grid that's part of our sector, by walking
        # our line in gridspace.
        # This breaks down at junctions, where start and end
        # points are the same), but we know they own their endpoints.
        originalSpace = self.start.toGridspace()
        if not self.isJunction():
            endSpace = self.end.toGridspace()
            delta = endSpace.sub(originalSpace).normalize()
            while (not self.getIsOurSpace(originalSpace)
                   and originalSpace.distance(endSpace) > 2):
                originalSpace = originalSpace.add(delta)

        # Find a wall from that point by dropping straight down.
        while (self.getIsOurSpace(originalSpace)):
            originalSpace = originalSpace.addY(1)
        # And back out.
        originalSpace = originalSpace.addY(-1).toInt()

        # If we can't find our sector, then probably all of it
        # got absorbed by other sectors or pushed into walls, so don't
        # do the wallwalker. Otherwise, carry on.
        if not self.getIsOurSpace(originalSpace):
            return

        first = True
        numSteps = 0
        currentSpace = originalSpace.copy()
        recentSpaces = []
        while first or currentSpace.distanceSquared(
                originalSpace) > constants.EPSILON:
            first = False
            recentSpaces.append(currentSpace)
            startSpace = None
            if len(recentSpaces) >= slopeAveragingDistance:
                startSpace = recentSpaces.pop(0)
            if startSpace is not None:
                shouldReturnSpace = (numSteps % skipRate) == 0
                if shouldReturnSpace and shouldUseOnlyRealWalls:
                    # Check neighboring spaces for walls
                    shouldReturnSpace = False
                    for space in currentSpace.perimeter():
                        if game.map.getBlockAtGridLoc(
                                space) != generator.BLOCK_EMPTY:
                            shouldReturnSpace = True
                            break
                if shouldReturnSpace:
                    delta = currentSpace.sub(startSpace)
                    # We know the wallwalker travels clockwise; thus, the
                    # surface normal is always to our right.
                    angle = delta.angle() - math.pi / 2.0
                    yield (currentSpace, angle)
            numSteps += 1
            if numSteps > maxWallwalkerSteps:
                # This should never happen, and indicates something went
                # wrong in map generation.
                marks = [self.start.average(self.end).toGridspace()]
                game.map.markLoc = currentSpace
                game.map.drawStatus(deadSeeds=game.map.deadSeeds, marks=marks)
                logger.fatal("Hit maximum steps for node", self.id)
            # Get the space adjacent to our own that continues the walk,
            # by using the Marching Squares algorithm
            # (http://en.wikipedia.org/wiki/Marching_squares)
            x = currentSpace.x
            y = currentSpace.y
            marchIndex = 0
            if not self.getIsOurSpace(Vector2D(x, y)):
                marchIndex += 1
            if not self.getIsOurSpace(Vector2D(x, y + 1)):
                marchIndex += 2
            if not self.getIsOurSpace(Vector2D(x - 1, y + 1)):
                marchIndex += 4
            if not self.getIsOurSpace(Vector2D(x - 1, y)):
                marchIndex += 8
            currentSpace = currentSpace.add(marchingSquares[marchIndex])
Exemple #24
0
    t = threading.Thread(target=connection_keep_recv, args=(s, server))
    t.setDaemon(True)
    t.start()

    readlock = True
    logger.info('Registering existance...')
    rand = secrets.token_urlsafe(32)
    s.send(pack.pack('register', json.dumps({'rand': rand}).encode()))
    logger.info('Existance registered, waiting for the reply...')

    logger.info('Obtaining key for encryption...')
    # Obtain secrect key for encryption, via HTTPS
    r = requests.get('https://' + keyserver + '/obtain', {'rand': rand})
    if 'key' not in r.json():
        logger.fatal('Failed to obtain key!')
    key = base64.b64decode(r.json()['key'])
    logger.log(key=key)
    readlock = False

    logger.info('Successfully obtained key!')

    # while True:
    #     logger.info('Loading...')
    t.join()


try:
    init()
except Exception as e:
    logger.fatal('An exception occured:', Exception=e)
Exemple #25
0
from logger import InitLogConfig
import logger
import logging

InitLogConfig(loglevel=logging.INFO)

logger.debug("DEBUG")
logger.info("INFO")
logger.error("INFO")
logger.warning("INFO")
logger.fatal("INFO")
Exemple #26
0
    def startTor(self, gen_torrc=True) -> BooleanSuccessState:
        """
            Start Tor with onion service on port 80 & socks proxy on random port
        """
        if gen_torrc:
            gentorrc.generate_torrc(self, self.apiServerIP)

        if os.path.exists('./tor'):
            self.torBinary = './tor'
        elif os.path.exists('/usr/bin/tor'):
            self.torBinary = '/usr/bin/tor'
        else:
            self.torBinary = 'tor'

        try:
            tor = subprocess.Popen(
                [self.torBinary, '-f', self.torConfigLocation],
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE)
        except FileNotFoundError:
            logger.fatal(
                "Tor was not found in your path or the Onionr directory. Please install Tor and try again.",
                terminal=True)
            return False
        else:
            # Test Tor Version
            torVersion = subprocess.Popen([self.torBinary, '--version'],
                                          stdout=subprocess.PIPE,
                                          stderr=subprocess.PIPE)
            for line in iter(torVersion.stdout.readline, b''):
                if 'Tor 0.2.' in line.decode():
                    logger.fatal('Tor 0.3+ required', terminal=True)
                    return False
            torVersion.kill()

        # wait for tor to get to 100% bootstrap
        try:
            for line in iter(tor.stdout.readline, b''):
                for word in ('bootstrapped', '%'):
                    if word not in line.decode().lower():
                        break
                else:
                    if '100' not in line.decode():
                        logger.info(line.decode().strip(), terminal=True)
                if 'bootstrapped 100' in line.decode().lower():
                    logger.info(line.decode())
                    break
                elif 'opening socks listener' in line.decode().lower():
                    logger.debug(line.decode().replace('\n', ''))
                else:
                    if 'err' in line.decode():
                        logger.error(line.decode().replace('\n', ''))
                    elif 'warn' in line.decode():
                        logger.warn(line.decode().replace('\n', ''))
                    else:
                        logger.debug(line.decode().replace('\n', ''))
            else:
                logger.fatal(
                    'Failed to start Tor. Maybe a stray instance of Tor used by Onionr is still running? This can also be a result of file permissions being too open',
                    terminal=True)
                return False
        except KeyboardInterrupt:
            logger.fatal('Got keyboard interrupt. Onionr will exit soon.',
                         timestamp=False,
                         terminal=True)
            return False

        try:
            myID = open(self.dataDir + 'hs/hostname', 'r')
            self.myID = myID.read().replace('\n', '')
            myID.close()
        except FileNotFoundError:
            self.myID = ""

        with open(self.dataDir + 'torPid.txt', 'w') as tor_pid_file:
            tor_pid_file.write(str(tor.pid))

        multiprocessing.Process(target=watchdog.watchdog,
                                args=[os.getpid(), tor.pid],
                                daemon=True).start()

        logger.info('Finished starting Tor.', terminal=True)

        self.readyState = True
        return True
Exemple #27
0
    async def fetchNextSegment(self, segmentNum=1, bitrate=0):
        # if not bitrate:
        # 	return

        segment_Duration = 0
        bitIdx = bitrate
        # for i, b in enumerate(self.manifest_data['bitrates_kbps']):
        # 	if b == bitrate:
        # 		segment_Duration = int(self.manifest_data['segment_duration_ms']) / int(self.manifest_data['timescale'])
        # 		bitIdx = i+1
        # 		break

        # for fname in sorted(glob(segment_list)):
        # 	_, self.segment_baseName = fname.rsplit('/', 1)
        # 	# self.args.urls[0] = self.baseUrl + '/' + str(os.stat(fname).st_size)
        # 	logger.info('aakash '+self.args.urls[0])
        # 	start = time.time()
        # 	res = await perform_download(self.configuration, self.args)
        # 	elapsed = time.time() - start

        # _, self.segment_baseName = fname.rsplit('/', 1)
        # self.args.urls[0] = self.baseUrl + '/' + str(os.stat(fname).st_size)

        urlsToDwnld: List[str] = []

        for i in range(3):
            t_str = self.baseUrl + '/video_' + str(bitIdx) + '_dash' + str(
                segmentNum + i)
            urlsToDwnld.append(t_str)

        urlsToDwnld = self.genUrlsForSegment(segmentNum, bitIdx + 1)

        # self.args.urls[0] = self.baseUrl + '/video_' + str(bitIdx) + '_dash' + str(segmentNum)
        # logger.info('aakash :{}'.format(urlsToDwnld))
        start = time.time()
        # res = await perform_download(self.configuration, self.args)
        res = await perform_download(self.configuration, self.args,
                                     urlsToDwnld)

        elapsed = time.time() - start

        data = res[0][0]
        if data is not None:
            self.lastDownloadTime = elapsed
            self.lastDownloadSize = data
            self.latest_tput = res[0][1]

            # TODO: put data in queue instead of urls
            await self.segmentQueue.put(urlsToDwnld)

            # QOE parameters update
            self.perf_parameters['bitrate_change'].append(
                (self.currentSegment + 1, bitrate))
            self.perf_parameters['tput_observed'].append(
                (self.currentSegment + 1, res[0][1]))
            self.perf_parameters['avg_bitrate'] += bitrate
            self.perf_parameters['avg_bitrate_change'] += abs(
                bitrate - self.perf_parameters['prev_rate'])

            if not self.perf_parameters['prev_rate'] or self.perf_parameters[
                    'prev_rate'] != bitrate:
                self.perf_parameters['prev_rate'] = bitrate
                self.perf_parameters['change_count'] += 1

            self.currentSegment += 1
            async with self.lock:
                self.currBuffer += segment_Duration

            ret = True
        else:
            logger.fatal(
                "Error: downloaded segment is none!! Playback will stop shortly"
            )
            ret = False
        return ret
Exemple #28
0
def parse_args():
    def lbytes32(s):
        """Converts a hex string into a 32 bytes long byte array, litte endian"""
        if len(s) > 32:
            warn(
                'Nonce can be at most 32 bytes long, is {:d}! Will be truncated'
                .format(len(s)))
            return lx(s[:64])
        return lx('0' * (64 - len(s)) + s)

    def split(s):
        """Runs s.split()"""
        return s.split()

    def smartint(i):
        if i.startswith('0x'):
            return int(i, 16)
        else:
            return int(i, 10)

    parser = argparse.ArgumentParser(
        description=
        "This script uses any Equihash solver to find a solution for the specified genesis block"
    )
    parser.add_argument(
        "-c",
        "--chainparams",
        dest="chain",
        default="mainnet",
        choices=["mainnet", "testnet", "regtest"],
        help="""Select the core chain parameters for PoW limit and parameters
            N and K.""")
    parser.add_argument(
        "-t",
        "--time",
        dest="time",
        action="store",
        type=int,
        default=int(time.time()),
        help="unix time to set in block header (defaults to current time)")
    parser.add_argument(
        "-C",
        "--coinname",
        dest="coinname",
        default="Zcash",
        help=
        "the coin name prepends the blake2s hash of timestamp in pszTimestamp")
    parser.add_argument(
        "-z",
        "--timestamp",
        dest="timestamp",
        default=
        "The Economist 2016-10-29 Known unknown: Another crypto-currency is born. BTC#436254 0000000000000000044f321997f336d2908cf8c8d6893e88dbf067e2d949487d ETH#2521903 483039a6b6bd8bd05f0584f9a078d075e454925eb71c1f13eaff59b405a721bb DJIA close on 27 Oct 2016: 18,169.68",
        help="""the pszTimestamp found in the input coinbase transaction
            script. Will be blake2s'd and then prefixed by coin name. Default
            is Zcash's mainnet pszTimestamp. You may use tokens of the form
            {XYZ}, which will be replaced by the current block index and hash
            of coin XZY (BTC, ETH or ZEC). Always the latest block is retrieved,
            regardless of time argument.""")
    parser.add_argument(
        "-Z",
        "--pszTimestamp",
        dest="pszTimestamp",
        default=None,
        help="Specify the pszTimestamp directly. Will ignore options -C and -z"
    )
    parser.add_argument(
        "-p",
        "--pubkey",
        dest="pubkey",
        type=x,
        default=
        x("04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f"
          ),
        help="the pubkey found in the output transaction script")
    parser.add_argument(
        "-b",
        "--bits",
        dest="bits",
        type=smartint,
        default=0x1f07ffff,
        help="the target in compact representation, defining a difficulty of 1"
    )
    parser.add_argument(
        "-E",
        "--extra-nonce",
        dest="extranonce",
        type=smartint,
        default=None,
        help="Usually, the coinbase script contains the nBits as fixed first"
        " data, which in bitcoin is also referred to as extra nonce. This"
        " conventional behaviour can be changed by specifying this parameter"
        " (not recommended for mainnet, useful for testnet).")
    parser.add_argument(
        "-V",
        "--value",
        dest="value",
        default=0,
        type=int,
        help="output transaction value in zatoshi (1 ZEC = 100000000 zatoshi)")
    parser.add_argument(
        "-n",
        "--nonce",
        dest="nonce",
        default=b'\x00' * 32,
        type=lbytes32,
        help="nonce to start with when searching for a valid"
        " equihash solution; parsed as hex, leading zeros may be omitted.")
    parser.add_argument("-r",
                        "--rounds",
                        dest="rounds",
                        default=1,
                        type=int,
                        help="how many nonces to check at most")
    parser.add_argument(
        "-s",
        "--solver",
        dest="solver",
        type=split,
        default=split("../equihash/equi"),
        help="""path to solver binary. Currently supported are silentarmy
            (sa-solver) and Tromp (equi/equi485). Command line arguments may be
            passed, although that should be unnecessary.""")
    parser.add_argument("-S",
                        "--solver-type",
                        dest="solver_type",
                        default=None,
                        choices=["tromp", "silentarmy"],
                        help="""Set the type of solver explicitly.
            Otherwise GenesisZ tries to infer the type from the binary name
            (equi* -> tromp, sa-solver -> silentarmy)""")
    parser.add_argument(
        "-T",
        "--threads",
        dest="threads",
        default=1,
        type=int,
        help="How many CPU threads to use when solving with Tromp.")
    parser.add_argument("-v",
                        "--verbose",
                        dest="verbose",
                        action="store_true",
                        help="verbose output")

    args = parser.parse_args()
    logger.verbose = args.verbose
    SelectCoreParams(args.chain)
    # infer solver type from binary if not set
    if not args.solver_type:
        if args.solver[0].endswith('sa-solver'):
            args.solver_type = 'silentarmy'
        elif re.search(r'eq\w*$', args.solver[0]):
            args.solver_type = 'tromp'
        else:
            fatal(
                "Couldn't infer solver type from binary name and type not set with -S"
            )

    # silentarmy only supports N,K=200,9
    if args.solver_type == 'silentarmy' and args.chain == 'regtest':
        fatal(
            "Silentarmy doesn't support regtest parameters N,K=48,5. Get Tromp's equihash solver."
        )

    verb('Chain: ' + args.chain)
    verb('Time: {:d}'.format(args.time))
    verb('Start Nonce: ' + b2lx(args.nonce))
    verb('Pubkey: ' + b2x(args.pubkey))
    verb('Solver: {}'.format(args.solver))
    verb('Solver type: ' + args.solver_type)

    return args
Exemple #29
0
'''
import sqlite3, os, sys, time, json, uuid
import logger, netcontroller, config
from onionrblockapi import Block
import deadsimplekv as simplekv
import onionrutils, onionrcrypto, onionrproofs, onionrevents as events, onionrexceptions
import onionrblacklist
from onionrusers import onionrusers
import dbcreator, onionrstorage, serializeddata, subprocesspow
from etc import onionrvalues, powchoice

if sys.version_info < (3, 6):
    try:
        import sha3
    except ModuleNotFoundError:
        logger.fatal(
            'On Python 3 versions prior to 3.6.x, you need the sha3 module')
        sys.exit(1)


class Core:
    def __init__(self, torPort=0):
        '''
            Initialize Core Onionr library
        '''
        # set data dir
        self.dataDir = os.environ.get('ONIONR_HOME',
                                      os.environ.get('DATA_DIR', 'data/'))
        if not self.dataDir.endswith('/'):
            self.dataDir += '/'

        try:
Exemple #30
0
    def __init__(self, torPort=0):
        '''
            Initialize Core Onionr library
        '''
        # set data dir
        self.dataDir = os.environ.get('ONIONR_HOME',
                                      os.environ.get('DATA_DIR', 'data/'))
        if not self.dataDir.endswith('/'):
            self.dataDir += '/'

        try:
            self.onionrInst = None
            self.queueDB = self.dataDir + 'queue.db'
            self.peerDB = self.dataDir + 'peers.db'
            self.blockDB = self.dataDir + 'blocks.db'
            self.blockDataLocation = self.dataDir + 'blocks/'
            self.blockDataDB = self.blockDataLocation + 'block-data.db'
            self.publicApiHostFile = self.dataDir + 'public-host.txt'
            self.privateApiHostFile = self.dataDir + 'private-host.txt'
            self.addressDB = self.dataDir + 'address.db'
            self.hsAddress = ''
            self.i2pAddress = config.get('i2p.own_addr', None)
            self.bootstrapFileLocation = 'static-data/bootstrap-nodes.txt'
            self.bootstrapList = []
            self.requirements = onionrvalues.OnionrValues()
            self.torPort = torPort
            self.dataNonceFile = self.dataDir + 'block-nonces.dat'
            self.dbCreate = dbcreator.DBCreator(self)
            self.forwardKeysFile = self.dataDir + 'forward-keys.db'
            self.keyStore = simplekv.DeadSimpleKV(self.dataDir +
                                                  'cachedstorage.dat',
                                                  refresh_seconds=5)

            # Socket data, defined here because of multithreading constraints with gevent
            self.killSockets = False
            self.startSocket = {}
            self.socketServerConnData = {}
            self.socketReasons = {}
            self.socketServerResponseData = {}

            self.usageFile = self.dataDir + 'disk-usage.txt'
            self.config = config

            self.maxBlockSize = 10000000  # max block size in bytes

            if not os.path.exists(self.dataDir):
                os.mkdir(self.dataDir)
            if not os.path.exists(self.dataDir + 'blocks/'):
                os.mkdir(self.dataDir + 'blocks/')
            if not os.path.exists(self.blockDB):
                self.createBlockDB()
            if not os.path.exists(self.forwardKeysFile):
                self.dbCreate.createForwardKeyDB()
            if not os.path.exists(self.peerDB):
                self.createPeerDB()
            if not os.path.exists(self.addressDB):
                self.createAddressDB()

            if os.path.exists(self.dataDir + '/hs/hostname'):
                with open(self.dataDir + '/hs/hostname', 'r') as hs:
                    self.hsAddress = hs.read().strip()

            # Load bootstrap address list
            if os.path.exists(self.bootstrapFileLocation):
                with open(self.bootstrapFileLocation, 'r') as bootstrap:
                    bootstrap = bootstrap.read()
                for i in bootstrap.split('\n'):
                    self.bootstrapList.append(i)
            else:
                logger.warn('Warning: address bootstrap file not found ' +
                            self.bootstrapFileLocation)

            self.use_subprocess = powchoice.use_subprocess(self)
            self._utils = onionrutils.OnionrUtils(self)
            # Initialize the crypto object
            self._crypto = onionrcrypto.OnionrCrypto(self)
            self._blacklist = onionrblacklist.OnionrBlackList(self)
            self.serializer = serializeddata.SerializedData(self)

        except Exception as error:
            logger.error('Failed to initialize core Onionr library.',
                         error=error)
            logger.fatal('Cannot recover from error.')
            sys.exit(1)
        return
Exemple #31
0
    def __enter__(self):
        '''Initialize the test framework or exit on failure'''

        os.environ['__DCGM_TESTING_FRAMEWORK_ACTIVE'] = '1'
        # Make sure that the MPS server is disabled before running the test-suite
        if utils.is_mps_server_running():
            print('DCGM Testing framework is not interoperable with MPS server. Please disable MPS server.')
            sys.exit(1)
        
        # Various setup steps
        option_parser.parse_options() 
        utils.verify_user_file_permissions()
        utils.verify_localhost_dns()
        if not option_parser.options.use_running_hostengine:
            utils.verify_hostengine_port_is_usable()

        if not test_utils.noLogging:
            logger.setup_environment()

            if logger.log_dir:
                logger.close()
            
        option_parser.validate()
        
        if not test_utils.is_framework_compatible():
            logger.fatal("The test framework and dcgm versions are incompatible. Exiting Test Framework.")
            sys.exit(1)

        # Directory where DCGM test*.py files reside
        test_utils.set_tests_directory('tests')

        # Verify that package architecture matches python architecture
        if utils.is_64bit():
            # ignore this check on ppc64le and armv8 for now
            if not (platform.machine() == "ppc64le" or platform.machine() == "aarch64"):
                if not os.path.exists(os.path.join(utils.script_dir, "apps/amd64")):
                    print("Testing package is missing 64bit binaries, are you sure you're using package of correct architecture?")
                    sys.exit(1)
        else:
            if not os.path.exists(os.path.join(utils.script_dir, "apps/x86")):
                print("Testing package is missing 32bit binaries, are you sure you're using package of correct architecture?")
                sys.exit(1)

        # Stops the framework if running python 32bits on 64 bits OS
        if utils.is_windows():
            if os.name == "nt" and "32 bit" in sys.version and platform.machine() == "AMD64":
                print("Running Python 32-bit on a 64-bit OS is not supported. Please install Python 64-bit")
                sys.exit(1)

        if utils.is_linux():
            python_exec = str(sys.executable)
            python_arch = check_output(["file", "-L", python_exec])

            if "32-bit" in python_arch and utils.is_64bit() == True:
                print("Running Python 32-bit on a 64-bit OS is not supported. Please install Python 64-bit")
                sys.exit(1)

        #Tell DCGM how to find our testing package's NVVS
        if not os.path.isfile('./apps/nvvs/nvvs'):
            logger.warning("NVVS is missing from the test framework install. Hopefully it's installed.")
        else:
            nvvsDir = os.getcwd() + '/apps/nvvs'
            logger.debug("NVVS directory: %s" % nvvsDir)
            os.environ['NVVS_BIN_PATH'] = nvvsDir #The env variable parser in DcgmDiagManager is only the directory