Пример #1
0
 def olDelPlayer(self, account):
     try:
         ASSERT(
             self.olCheckPlayer(account) == 1,
             'LoginService:olDelPlayer, olCheckPlayer != 1')
         for queueData in self.loginQueuePlayerList:
             if queueData.account == account:
                 self.loginQueuePlayerList.remove(queueData)
                 Log(
                     'Login',
                     'del player from loginQueuePlayerList, playerId = %d, account = %s',
                     queueData.playerId, queueData.account)
                 break
         for key in self.loginPlayingPlayerList.keys():
             if key == account:
                 Log(
                     'Login',
                     'del player from loginPlayingPlayerList, playerId = %d, account = %s',
                     self.loginPlayingPlayerList[key], key)
                 del self.loginPlayingPlayerList[key]
                 break
         for key in self.scenePlayingPlayerList.keys():
             if key == account:
                 Log(
                     'Login',
                     'del player from scenePlayingPlayerList, playerId = %d, account = %s',
                     self.scenePlayingPlayerList[key], key)
                 del self.scenePlayingPlayerList[key]
                 break
     except BaseException as err:
         ASSERT(False, 'LoginService:olDelPlayer, ' + str(err))
Пример #2
0
    def Init(self):
        '''Initialize our Pygame game / app
		App.Init() -> bool'''
        try:
            pygame.init()
        except Exception as e:
            Log("Unable to Init Pygame:", e)
            return False

        try:
            self.Window = pygame.display.set_mode(
                (self.WindowWidth, self.WindowHeight))
            pygame.display.set_caption("My Pygame game")
        except Exception as e:
            Log("Unable to create Pygame Window:", e)
            return False

        # Adaptation for SDL_SetRenderDrawColor
        self.DisplayDrawColor = (255, 255, 255)

        self.textureBank = TextureBank()
        if not self.textureBank.Init():
            Log("Unable to init TextureBank")
            return False

        return True
Пример #3
0
    def SetDirectory(self, newdir: str, Create: bool = False) -> bool:
        Log("**SetDirectory: " + newdir)

        # Split newdir into components
        if newdir is None or len(newdir) == 0:
            return True

        components = []
        if newdir[0] == "/":
            components.append("/")
            newdir = newdir[1:]
        components.extend(newdir.split("/"))
        components = [c.strip() for c in components if len(c) > 0]

        # Now walk the component list
        for component in components:
            # Does the directory exist?
            if not self.FileExists(component):
                # If not, are we allowed to create it"
                if not Create:
                    Log("SetDirectory was called for a non-existant directory with create=False"
                        )
                    return False
                if not self.MKD(component):
                    Log("mkd failed...bailing out...")
                    return False

            # Now cwd to it.
            if not self.CWD(component):
                Log("cwd failed...bailing out...")
                return False

        return True
Пример #4
0
 def __init__(self, metadataFile, csvFile, ncOutput):
     Log().set_log_info("[Begin] conversion to NetCDF of: " + metadataFile +
                        "  " + csvFile + "  " + ncOutput)
     self.init_elements(metadataFile, csvFile, ncOutput)
     if not os.path.exists(self.ncOutput):
         try:
             self.ncFile = Dataset(self.ncOutput,
                                   'w',
                                   format='NETCDF' + self.version)
         except:
             Log().set_log_error(
                 "The netCDF_version is wrong. Assigning the default value(netCDF4_CLASSIC) to "
                 "netCDF_version")
             self.version = '4_CLASSIC'
             self.ncFile = Dataset(self.ncOutput,
                                   'w',
                                   format='NETCDF' + self.version)
         self.create_netcdf()
     else:  # Append start
         self.ncFile = Dataset(self.ncOutput, 'r+')
         self.append_netcdf()
     self.metadata.globalAttributes.max_min_attribute(self.ncFile)
     self.ncFile.close()
     Log().set_log_info("[Finished] conversion to NetCDF of : " +
                        metadataFile + "  " + csvFile + "  " + ncOutput)
Пример #5
0
    def retAccountValidate(self, result, validateType, account, deviceId,
                           deviceType, deviceVersion, channelId, mediaId, oid,
                           accessToken, shouldCache, rapidValidateCode):
        try:
            if result == VALIDATERESULT_SUCCESS:
                self.player.setStatus(PLAYERSTATUS_VALIDATE_OK)
                print('ObjLogin.retAccountValidate Success')
                Log('Login', 'ObjLogin.retAccountValidate Success, account=%s',
                    account)
                self.validateType = validateType
                self.account = account
                self.deviceId = deviceId
                self.deviceType = deviceType
                self.deviceVersion = deviceVersion
                self.channelId = channelId
                self.mediaId = mediaId
                self.oid = oid
                self.accessToken = accessToken
                self.shouleCache = shouldCache
                self.rapidValidateCode = rapidValidateCode

                checkMsg = AccountStateCheckMsg()
                checkMsg.playerId = self.player.getId()
                checkMsg.account = self.account
                gServiceManager.SendMessage2Srv(SERVICE_ID_LOGIN, checkMsg)
                self.player.setStatus(PLAYERSTATUS_ACCOUNTSTATE_CHECKING)
            else:
                self.player.setStatus(PLAYERSTATUS_VALIDATE_FAILED)
                print('ObjLogin.retAccountValidate Failed')
                Log('Login', 'ObjLogin.retAccountValidate Failed, account=%s',
                    account)
        except BaseException as err:
            ASSERT(False, 'Obj_Login:retAccountValidate, ' + str(err))
Пример #6
0
def Bailout(e, msg: str, title: str) -> None:
    Log("exception: " + str(e), isError=True)
    Log("   title: " + title, isError=True)
    Log("   msg: " + msg, isError=True)
    LogClose()
    ctypes.windll.user32.MessageBoxW(0, msg, title, 1)
    raise e
Пример #7
0
    def DownloadConSeries(self, seriesname) -> bool:  # MainConSeriesFrame

        # Clear out any old information
        self._grid.Datasource = ConSeries()

        if seriesname is None or len(seriesname) == 0:
            # Nothing to load. Just return.
            return False

        if self._basedirectoryFTP is None:
            assert False  # Never take this branch.  Delete when I'm sure.

        ProgressMessage(self).Show("Loading " + self.Seriesname +
                                   "/index.html from fanac.org")
        file = FTP().GetFileAsString("/" + self.Seriesname, "index.html")

        pathname = self.Seriesname + "/index.html"
        if len(self._basedirectoryFTP) > 0:
            pathname = self._basedirectoryFTP + "/" + pathname

        if file is not None:

            # Get the JSON from the file
            j = FindBracketedText(file, "fanac-json")[0]
            if j is None or j == "":
                Log("DownloadConSeries: Can't load convention information from "
                    + pathname)
                wx.MessageBox("Can't load convention information from " +
                              pathname)
                return False

            try:
                self.FromJson(j)
            except (json.decoder.JSONDecodeError):
                Log("DownloadConSeries: JSONDecodeError when loading convention information from "
                    + pathname)
                wx.MessageBox(
                    "JSONDecodeError when loading convention information from "
                    + pathname)
                return False
        else:
            # Offer to download the data from Fancy 3
            self.Seriesname = seriesname
            resp = wx.MessageBox(
                "Do you wish to download the convention series " + seriesname +
                " from Fancyclopedia 3?", 'Shortcut',
                wx.YES | wx.NO | wx.ICON_QUESTION)
            if resp == wx.YES:
                self.DownloadConSeriesFromFancy(seriesname)

        if self.TextFancyURL is None or len(self.TextFancyURL) == 0:
            self.TextFancyURL = "fancyclopedia.org/" + WikiPagenameToWikiUrlname(
                seriesname)

        self._grid.MakeTextLinesEditable()
        self.RefreshWindow()
        ProgressMessage(self).Show(self.Seriesname + " Loaded",
                                   close=True,
                                   delay=0.5)
        return True
Пример #8
0
    def Update(self):
        while not self.dead:
            try:
                if not self.initialized:
                    self.Init()
                    continue

                frame = self.cap.read()
                if frame is None:
                    self.missedFrames += 1
                    if self.missedFrames > self.missedFrameLimit:
                        self.initialized = False
                        Log("Device has become unresponsive", self)
                    continue
                self.missedFrames = 0

                if self.ShouldSavePreview():
                    self.SavePreview(frame)

                self.prevFrames.appendleft(frame.copy())

                if self.motionEnabled:
                    frame = imutils.resize(frame, width=500)
                    gray = self.ProcessFrame(frame)

                    if self.compareFrame is None or self.ShouldUpdateCompareFrame(
                    ):
                        self.UpdateCompareFrame(gray)
                        continue

                    frameDelta, thresh, boxes = self.CompareFrames(
                        self.compareFrame, gray)

                    text = "Unoccupied"
                    motion = False
                    biggest = self.GetBiggestBox(boxes)
                    if biggest:
                        cv2.rectangle(
                            frame, (biggest.x, biggest.y),
                            (biggest.x + biggest.w, biggest.y + biggest.h),
                            (0, 0, 255), 2)
                        text = "Occupied"
                        motion = True

                    cv2.putText(frame, "Room Status: {}".format(text),
                                (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5,
                                (0, 0, 255), 2)

                    #cv2.imshow(self.name + " Security Feed", frame)
                    #cv2.imshow(self.name + " Thresh", thresh)
                    #cv2.imshow(self.name + " Frame Delta", frameDelta)

                    if motion:
                        self.SaveVideo()
                        #email.SendMessage("Motion detected from '" + self.name + "'")

                cv2.waitKey(1)
            except Exception as e:
                Log("Exception in Update: " + str(e), self)
Пример #9
0
 def UpdateMessage(self, s: str):
     if ProgressMessage._progressMessageDlg is None:
         Log("ProgressMessage.UpdateMessage() called without an existing ProgressDialog"
             )
         return
     Log("ProgressMessage.Update('" + s + "')")
     ProgressMessage._progressMessageDlg.Update(0, s)
     ProgressMessage._progressMessageDlg.Pulse(s)
Пример #10
0
 def create_attribute_list(self):
     try:
         for key, value in self.data["global_attributes"].items():
             self.attributesList[key] = value
     except:
         Log().set_log_error('Not found global_attributes on .json file.')
         Log().set_log_info('The script has closed unsatisfactorily')
         sys.exit(-1)
Пример #11
0
 def GetFileAsString(self, directory: str, fname: str) -> Optional[str]:
     if not self.SetDirectory(directory):
         Log("GetFileAsString: Bailing out...")
         return None
     s = FTP().GetAsString(fname)
     if s is None:
         Log("Could not load " + directory + "/" + fname)
     return s
Пример #12
0
 def set_dimensions_by_netcdf(self, dimensions):
     try:
         for dimension in dimensions:
             self.dimensions[dimension] = len(dimensions[dimension])
             self.dimensionsList.append(dimension)
     except:
         Log().set_log_error('Not found dimensions on .json file.')
         Log().set_log_info('The script has closed unsatisfactorily')
         sys.exit(-1)
Пример #13
0
 def mill_grains(self):
     try:
         log = Log(1, "Mashing.Milling", "Milling Started", datetime.datetime.now(), "pass")
         print(log.generate_log())
         #mill grains
         log = Log(2, "Mashing.Milling", "Milling Ended", datetime.datetime.now(), "pass")
         print(log.generate_log())
         return "Grains milled"
     except Exception as e:
         print(e)
Пример #14
0
 def write_dimensions(self, ncFile):
     try:
         dimensions = self.metadata['dimensions']
         for dimension in dimensions:
             if not dimension['length'] == "":
                 ncFile.createDimension(dimension['dimension_name'], dimension['length'])
     except:
         Log().set_log_warning('Error writing dimensions')
         Log().set_log_info('The script has closed unsatisfactorily')
         sys.exit(-1)
Пример #15
0
def ReadAllFanacFanzineMainPages() -> list[tuple[str, str]]:
    Log("----Begin reading Classic and Modern tables")
    # This is a list of fanzines on Fanac.org
    # Each item is a tuple of (compressed name,  link name,  link url)
    fanacFanzineDirectoriesList: list[tuple[str, str]]=[]
    directories=ReadList("control-topleveldirectories.txt")
    for dirs in directories:
        ReadModernOrClassicTable(fanacFanzineDirectoriesList, dirs)

    Log("----Done reading Classic and Modern tables")
    return fanacFanzineDirectoriesList
Пример #16
0
 def write_attributes(self, netCDF):
     try:
         for attribute in self.attributesList:
             if self.attributesList[attribute]:
                 setattr(netCDF, attribute, self.attributesList[attribute])
         netCDF.date_created = str(datetime.now().date()) + 'T' + str(
             datetime.now().time()) + 'Z'
     except:
         Log().set_log_error('Error writing attributes')
         Log().set_log_info('The script has closed unsatisfactorily')
         sys.exit(-1)
Пример #17
0
    def Nlst(self, directory: str) -> List[str]:
        if self.g_ftp is None:
            Log("FTP.Nlst: FTP not initialized")
            return []

        if not self.SetDirectory(directory):
            Log("FTP.Nlst: Bailing out...")
            return []

        return [x for x in self.g_ftp.nlst()
                if x != "." and x != ".."]  # Ignore the . and .. elements
Пример #18
0
    def load_climbs(self, ukc_name: str):
        """
        Load all the climbs for a climber, saving all the individual climbs to the climbs set and all instances of
        these cimbs to the logbook.

        :param ukc_name: The UKC name of a climber
        :return: All climb information loaded
        """
        # Get the logbook file name for the climber
        log_book = [
            file for file in os.listdir(ClimbManager.DIRECTORY)
            if file.split('_')[0] == ukc_name
        ][0]
        with open(ClimbManager.DIRECTORY + '\\' +
                  log_book) as file:  # Load the file
            read_csv = csv.reader(file)
            first = True  # First line contains the header so skip
            for row in read_csv:
                if first:
                    first = False
                    continue
                name = row[0]
                # When downloading info from UKC the grades and stars are located together
                # Seperate this information, store the grade and count the stars
                grade_info = row[1].split(' ')
                grade = ' '.join(grade_info[:-1])
                grade = grade.rstrip()
                stars = len(grade_info[-1])
                climb_style = row[2]
                # The grade of the route hides information about what kind of route it is, trad/sport/boulder etc.
                style = self.find_style(grade)
                partners = row[3].split(', ')
                notes = row[4]
                # Get the date the route was done
                date = self.determine_date(row[5])
                crag = row[6]
                climb = Climb(name, style, grade, stars,
                              crag)  # Create a Climb object and add
                if name + '_' + crag not in self.get_climbs_in_climb(
                ):  # If the climb not loaded then load
                    self.add_climb(climb)
                    log = Log(date, climb_style, partners, notes,
                              climb)  # Create a Log object and append
                    self.add_log(log)
                else:
                    # Find the correct climb and add a log of that climb
                    load_climb = [
                        climb for climb in self.get_climbs()
                        if climb.name_crag() == name + '_' + crag
                    ][0]
                    log = Log(date, climb_style, partners, notes,
                              load_climb)  # Create a Log object and append
                    self.add_log(log)
Пример #19
0
 def MKD(self, newdir: str) -> bool:
     Log("**make directory: '" + newdir + "'")
     try:
         msg = self.g_ftp.mkd(newdir)
     except Exception as e:
         Log("FTP connection failure. Exception=" + str(e))
         if not self.Reconnect():
             return False
         msg = self.g_ftp.mkd(newdir)
     Log(msg + "\n")
     return msg == newdir or msg.startswith("250 ") or msg.startswith(
         "257 ")  # Web doc shows all three as possible.
Пример #20
0
    def MoveRows(self, oldrow: int, numrows: int, newrow: int):  # Grid
        rows = self._datasource.Rows

        dest = newrow
        start = oldrow
        end = oldrow + numrows - 1
        if newrow < oldrow:
            # Move earlier
            b1 = rows[0:dest]
            i1 = list(range(0, dest))
            b2 = rows[dest:start]
            i2 = list(range(dest, start))
            b3 = rows[start:end + 1]
            i3 = list(range(start, end + 1))
            b4 = rows[end + 1:]
            i4 = list(range(end + 1, len(rows)))
        else:
            # Move later
            b1 = rows[0:start]
            i1 = list(range(0, start))
            b2 = rows[start:end + 1]
            i2 = list(range(start, end + 1))
            b3 = rows[end + 1:end + 1 + dest - start]
            i3 = list(range(end + 1, end + 1 + dest - start))
            b4 = rows[end + 1 + dest - start:]
            i4 = list(range(end + 1 + dest - start, len(rows)))

        rows = b1 + b3 + b2 + b4
        self._datasource.Rows = rows

        tpermuter = i1 + i3 + i2 + i4
        permuter = [None] * len(
            tpermuter
        )  # This next bit of code inverts the permuter. (There ought to be a more elegant way to generate it!)
        for i, r in enumerate(tpermuter):
            permuter[r] = i

        Log("\npermuter: " + str(permuter))
        Log("old editable rows: " + str(
            sorted(list(set([x[0]
                             for x in self._datasource.AllowCellEdits])))))
        # Now use the permuter to update the row numbers of the cells which are allowed to be edited
        for i, (row, col) in enumerate(self._datasource.AllowCellEdits):
            try:
                self._datasource.AllowCellEdits[i] = (permuter[row], col)
            except:
                pass
        Log("new editable rows: " + str(
            sorted(list(set([x[0]
                             for x in self._datasource.AllowCellEdits])))))
Пример #21
0
 def check_source(self, metadataFile, csvFile, ncOutput):
     if not os.path.exists(metadataFile):
         Log().set_log_error('Not found .json file. (Metadata file)')
         Log().set_log_info('The script has closed unsatisfactorily')
         sys.exit(-1)
     elif not os.path.exists(csvFile):
         Log().set_log_error('Not .csv / .data file. (Data file)')
         Log().set_log_info('The script has closed unsatisfactorily')
         sys.exit(-1)
     finalCharacter = ncOutput[len(ncOutput) - 1]
     if finalCharacter != '/':
         ncOutput += '/'
         return ncOutput
     return ncOutput
Пример #22
0
    def __initialise_loging__(self):
        """
        Check to see if logging has been set
        """
        if hasattr(self, 'logging'):
            # redirect = self.logging['redirectallouput'] == "true"
            loc = self.logging[
                'loglocation'] if 'loglocation' in self.logging else None
            if self.logging['logtofile'] == "true":
                self.log = Log(self.logging['loglevel'], True, loc)
            else:
                self.log = Log(self.logging['loglevel'], False)
        else:
            self.log = Log("error")

        self.log.do_message('Log created app ready')
Пример #23
0
    def PreferredName(self) -> str:
        # If this is itself tagged as a Locale we return the page name even if it is also a redirect
        # E.g., we return "Cambridge, MA" because it is tagged Locale even though it points to "Boston, MA"
        if self.IsTaggedLocale:
            if len(self.DisplayName) > 0:
                return self.DisplayName
            return self.PageName

        # If all of the real page names are empty, we just return the NonPageName -- either it's real and we need to return it or it is also empty which is still correct
        if self.DisplayName == "" and self.PageName == "" and self.Redirect == "":
            return self.NonPageName

        # At this point we know it's a real page and not itself a locale.
        # If it's a Wikidot redirect, we always return the redirect
        if self.IsWikidotRedirect:  # Note that this test is not perfect, since it can't look at the page's contents.
            return self.Redirect

        # Locale or not, if it's a redirect, follow the redirect!
        if self.Redirect:
            return self.Redirect    #TODO: should go to redirect's Locale to see if it has some other preferred name?

        # Looks like an error
        Log(f"@@@Locale page '{self.PageName}' is not tagged as Locale, but is not in Wikidot format either", Print=False, isError=True)
        if self.PageName != "":
            return self.PageName

        return ""
Пример #24
0
 def reconnect(self):
     try:
         Log('DB', 'db serviceexec index=%d, try to reconnect to db',
             self.index)
         self.dbConnectorInterface.reconnect()
     except BaseException as err:
         ASSERT(False, 'DBServiceExec:reconnect, ' + str(err))
Пример #25
0
 def commit_to_log_file(msg, log_path, entity=None):
     #log changes
     log = Log(log_path)
     if entity is None:
         log.commit_log(msg)
     else:
         log.commit_log(msg, entity)
Пример #26
0
 def delete_attributes(self, variablesName, variable):
     try:
         for key in variablesName:
             if key in variable:
                 del variable[key]
     except:
         Log().set_log_warning('Error deleting attribute')
Пример #27
0
 def add_attribute_to_variable(self, variable, attributes):
     try:
         for attribute in attributes:
             if attributes[attribute]:
                 setattr(variable, attribute, attributes[attribute])
     except:
         Log().set_log_warning('Error adding attribute')
Пример #28
0
    def compare(self, networks, losses):
        folders = [
            dir for dir in os.listdir('.')
            if os.path.isdir(dir) and not dir.startswith('.')
        ]
        networks = tb.wildcardMatch(folders, networks)

        logs = []
        measureNames = []
        for net in networks:
            logfile = '%s/training/log.txt' % net
            print 'reading %s' % logfile
            logs.append(Log(net, logfile))
            for name in logs[-1].measureNames():
                if name not in measureNames: measureNames.append(name)

        if losses is not None:
            selectedNames = tb.unique(tb.wildcardMatch(measureNames, losses))
        else:
            selectedNames = tb.unique(measureNames)

        print 'comparing networks:'
        for net in networks:
            print "   ", net
        print 'comparing losses: '
        for name in selectedNames:
            print "   ", name

        Log.plotComparison(selectedNames, logs)
Пример #29
0
    def create_from_config_file(self):
        '''
		Method to create LVM Volume Groups based on a JSON config file.
		It relies on 'vgcreate' command.
		'''
        usrsap = UsrSap()
        data = Data()
        log = Log()
        shared = Shared()
        purposes = [usrsap, data, log, shared]

        with open('/opt/hope/config/config.json', 'r') as config_file:
            config = json.load(config_file)

        for purpose in purposes:

            for purpose_key, purpose_value in config.items():

                if purpose_key == purpose.name:

                    pv_names = ''

                    for pv in purpose_value['pvs']:

                        pv_names += '/dev/mapper/%s ' % (pv['alias'])

                    os.system('vgcreate %s %s %s' %
                              (purpose.vg_args, purpose_value['vg'], pv_names))

        self.show()
Пример #30
0
def extractSessions(source, filename=None):
    Sessons = Utils.File.readCSV(source)
    tmp = {}
    logs = []
    print('...start to extract data')
    for row in Sessons:
        try:
            tmpLog = Log(row)
            tmpLog.setType(setTypeOfLogs(tmpLog.Message))
            logs.append(tmpLog)
        except BaseException as e:
            print e
            pass
    Sessions = {}
    for log in logs:
        id = log.getId()
        if id in Sessions:
            Sessions[id].update(log)
        else:
            Sessions[id] = Session(id)
            Sessions[id].update(log)
    print('...start to calc features')
    for x in Sessions:
        Sessions[x].calcFeature()
    if filename != None:
        print('...start to save session to {0}'.format(filename))
        with open(filename, 'w') as f:
            json.dump(Sessions, f, default=lambda x: x.__dict__)
    print('...finish')
    return json.loads(json.dumps(Sessions, default=lambda x: x.__dict__))