def addStartup(): if osName == 'win': from scal3.windows import winMakeShortcut makeDir(winStartupDir) #fname = APP_NAME + ('-qt' if uiName=='qt' else '') + '.pyw' fname = core.COMMAND + '.pyw' fpath = join(rootDir, fname) #open(winStartupFile, 'w').write('execfile(%r, {"__file__":%r})'%(fpath, fpath)) try: winMakeShortcut(fpath, winStartupFile) except: return False else: return True elif isdir( '%s/.config' % homeDir ): ## osName in ('linux', 'mac') ## maybe Gnome/KDE on Solaris, *BSD, ... text = '''[Desktop Entry] Type=Application Name=%s %s Icon=%s Exec=%s''' % (core.APP_DESC, core.VERSION, APP_NAME, core.COMMAND ) ## double quotes needed when the exec path has space makeDir(comDeskDir) try: fp = open(comDesk, 'w') except: core.myRaise(__file__) return False else: fp.write(text) return True elif osName == 'mac': ## FIXME pass return False
def importConfigIter(): makeDir(newConfDir) makeDir(newEventDir) ######### funcs = [ importBasicConfigIter, importEventBasicJsonIter, importPluginsIter, importGroupsIter, importGroupsIter, importAccountsIter, importTrashIter, importEventsIter, ] ### iters = [func() for func in funcs] ### counts = [itr.send(None) for itr in iters] totalCount = sum(counts) ### totalRatio = 0.0 delta = 1.0 / totalCount for iterIndex, itr in enumerate(iters): iterCount = counts[iterIndex] for stepIndex in itr: yield totalRatio + stepIndex*delta totalRatio += iterCount * delta yield totalRatio ### yield 1.0
def init(): global log import os from io import StringIO from os.path import join, isdir from scal3.path import confDir, rootDir, APP_NAME from scal3.os_utils import makeDir if os.path.exists(confDir): if not isdir(confDir): os.rename(confDir, confDir + "-old") os.mkdir(confDir) else: os.mkdir(confDir) makeDir(join(confDir, "log")) try: import logging import logging.config logConfText = open(join(rootDir, "conf", "logging-user.conf")).read() for varName in ("confDir", "APP_NAME"): logConfText = logConfText.replace(varName, eval(varName)) logging.config.fileConfig(StringIO(logConfText)) log = logging.getLogger(APP_NAME) except Exception as e: print("failed to setup logger:", e) from scal3.utils import FallbackLogger log = FallbackLogger()
def addStartup(): if osName=='win': from scal3.windows import winMakeShortcut makeDir(winStartupDir) #fname = APP_NAME + ('-qt' if uiName=='qt' else '') + '.pyw' fname = core.COMMAND + '.pyw' fpath = join(rootDir, fname) #open(winStartupFile, 'w').write('execfile(%r, {"__file__":%r})'%(fpath, fpath)) try: winMakeShortcut(fpath, winStartupFile) except: return False else: return True elif isdir('%s/.config'%homeDir):## osName in ('linux', 'mac') ## maybe Gnome/KDE on Solaris, *BSD, ... text = '''[Desktop Entry] Type=Application Name=%s %s Icon=%s Exec=%s'''%(core.APP_DESC, core.VERSION, APP_NAME, core.COMMAND)## double quotes needed when the exec path has space makeDir(comDeskDir) try: fp = open(comDesk, 'w') except: core.myRaise(__file__) return False else: fp.write(text) return True elif osName=='mac':## FIXME pass return False
def importConfigIter(): makeDir(newConfDir) makeDir(newEventDir) ######### funcs = [ importBasicConfigIter, importEventBasicJsonIter, importPluginsIter, importGroupsIter, importGroupsIter, importAccountsIter, importTrashIter, importEventsIter, ] ### iters = [func() for func in funcs] ### counts = [itr.send(None) for itr in iters] totalCount = sum(counts) ### totalRatio = 0.0 delta = 1.0 / totalCount for iterIndex, itr in enumerate(iters): iterCount = counts[iterIndex] for stepIndex in itr: yield totalRatio + stepIndex * delta totalRatio += iterCount * delta yield totalRatio ### yield 1.0
def saveBsonObject(data): data = getSortedDict(data) bsonBytes = bytes(BSON.encode(data)) _hash = sha1(bsonBytes).hexdigest() dpath = join(objectDir, _hash[:2]) fpath = join(dpath, _hash[2:]) if not isfile(fpath): makeDir(dpath) open(fpath, 'wb').write(bsonBytes) return _hash
def saveBsonObject(data): data = getSortedDict(data) bsonBytes = bytes(bson.dumps(data)) _hash = sha1(bsonBytes).hexdigest() dpath = join(objectDir, _hash[:2]) fpath = join(dpath, _hash[2:]) if not isfile(fpath): makeDir(dpath) open(fpath, "wb").write(bsonBytes) return _hash
def importAccountsIter(): makeDir(newAccountsDir) ### oldFiles = os.listdir(oldAccountsDir) yield len(oldFiles) index = 0 ### for fname in oldFiles: yield index; index += 1 jsonPath = join(oldAccountsDir, fname) newJsonPath = join(newAccountsDir, fname) if not isfile(jsonPath): print("\"%s\": not such file" % jsonPath) continue jsonPathNoX, ext = splitext(fname) if ext != ".json": continue try: _id = int(jsonPathNoX) except ValueError: continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print("error while loading json file \"%s\"" % jsonPath) continue if "history" in data: print("skipping \"%s\": history already exists" % jsonPath) continue try: tm = data.pop("modified") except KeyError: tm = now() ### basicData = {} #basicData["modified"] = tm ### ## remove extra params from data and add to basicData for param in ( "enable", ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData["history"] = [(tm, _hash)] open(newJsonPath, "w").write( dataToPrettyJson(basicData, sort_keys=True) )
def importEventsIter(): makeDir(newEventEventsDir) oldFiles = os.listdir(oldEventEventsDir) yield len(oldFiles) index = 0 for dname in oldFiles: yield index; index += 1 #### try: _id = int(dname) except ValueError: continue dpath = join(oldEventEventsDir, dname) newDpath = join(newEventEventsDir, dname) if not isdir(dpath): print("\"%s\" must be a directory" % dpath) continue jsonPath = join(dpath, "event.json") if not isfile(jsonPath): print("\"%s\": not such file" % jsonPath) continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print("error while loading json file \"%s\"" % jsonPath) continue try: tm = data.pop("modified") except KeyError: tm = now() ### basicData = {} #basicData["modified"] = tm ### ## remove extra params from data and add to basicData for param in ( "remoteIds", "notifiers",## FIXME ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData["history"] = [(tm, _hash)] open(newDpath + ".json", "w").write( dataToPrettyJson(basicData, sort_keys=True) )
def importEventsIter(): makeDir(newEventEventsDir) oldFiles = os.listdir(oldEventEventsDir) yield len(oldFiles) index = 0 for dname in oldFiles: yield index index += 1 #### try: _id = int(dname) except ValueError: continue dpath = join(oldEventEventsDir, dname) newDpath = join(newEventEventsDir, dname) if not isdir(dpath): print('"%s" must be a directory' % dpath) continue jsonPath = join(dpath, 'event.json') if not isfile(jsonPath): print('"%s": not such file' % jsonPath) continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print('error while loading json file "%s"' % jsonPath) continue try: tm = data.pop('modified') except KeyError: tm = now() ### basicData = {} #basicData['modified'] = tm ### ## remove extra params from data and add to basicData for param in ( 'remoteIds', 'notifiers', ## FIXME ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData['history'] = [(tm, _hash)] open(newDpath + '.json', 'w').write(dataToPrettyJson(basicData, sort_keys=True))
def importAccountsIter(): makeDir(newAccountsDir) ### oldFiles = os.listdir(oldAccountsDir) yield len(oldFiles) index = 0 ### for fname in oldFiles: yield index index += 1 jsonPath = join(oldAccountsDir, fname) newJsonPath = join(newAccountsDir, fname) if not isfile(jsonPath): print('"%s": not such file' % jsonPath) continue jsonPathNoX, ext = splitext(fname) if ext != '.json': continue try: _id = int(jsonPathNoX) except ValueError: continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print('error while loading json file "%s"' % jsonPath) continue if 'history' in data: print('skipping "%s": history already exists' % jsonPath) continue try: tm = data.pop('modified') except KeyError: tm = now() ### basicData = {} #basicData['modified'] = tm ### ## remove extra params from data and add to basicData for param in ('enable', ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData['history'] = [(tm, _hash)] open(newJsonPath, 'w').write(dataToPrettyJson(basicData, sort_keys=True))
def addStartup(): if osName == "win": from scal3.windows import winMakeShortcut, winStartupFile makeDir(winStartupDir) #fname = APP_NAME + ("-qt" if uiName=="qt" else "") + ".pyw" fname = core.COMMAND + ".pyw" fpath = join(rootDir, fname) #open(winStartupFile, "w").write( # "execfile(%r, {"__file__":%r})"%(fpath, fpath) #) try: winMakeShortcut(fpath, winStartupFile) except: return False else: return True elif isdir("%s/.config" % homeDir): # osName in ("linux", "mac") # maybe Gnome/KDE on Solaris, *BSD, ... text = """[Desktop Entry] Type=Application Name=%s %s Icon=%s Exec=%s""" % ( core.APP_DESC, core.VERSION, APP_NAME, core.COMMAND, ) # double quotes needed when the exec path has space makeDir(comDeskDir) try: fp = open(comDesk, "w") except: core.myRaise(__file__) return False else: fp.write(text) return True elif osName == "mac": # FIXME pass return False
def importGroupsIter(): groupsEnableDict = {} ## {groupId -> enable} ### makeDir(newGroupsDir) ### oldFiles = os.listdir(oldGroupsDir) yield len(oldFiles) + 1 index = 0 ### for fname in oldFiles: yield index index += 1 jsonPath = join(oldGroupsDir, fname) newJsonPath = join(newGroupsDir, fname) if not isfile(jsonPath): print("\"%s\": not such file" % jsonPath) continue jsonPathNoX, ext = splitext(fname) if ext != ".json": continue try: _id = int(jsonPathNoX) except ValueError: continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print("error while loading json file \"%s\"" % jsonPath) continue #### groupsEnableDict[_id] = data.pop("enable", True) #### if "history" in data: print("skipping \"%s\": history already exists" % jsonPath) continue try: tm = data.pop("modified") except KeyError: tm = now() ### basicData = {} #basicData["modified"] = tm ### ## remove extra params from data and add to basicData for param in ("remoteIds", ): basicData[param] = data.pop(param, None) for param in ( "enable", "idList", "remoteSyncData", "deletedRemoteEvents", ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData["history"] = [(tm, _hash)] open(newJsonPath, "w").write(dataToPrettyJson(basicData, sort_keys=True)) #### yield index index += 1 oldGroupListFile = join(oldEventDir, "group_list.json") newGroupListFile = join(newEventDir, "group_list.json") try: groupIds = json.loads(open(oldGroupListFile).read()) except Exception as e: print("error while loading %s: %s" % (oldGroupListFile, e)) else: if isinstance(groupIds, list): signedGroupIds = [ (1 if groupsEnableDict.get(gid, True) else -1) * gid for gid in groupIds ] try: open(newGroupListFile, "w").write(dataToPrettyJson(signedGroupIds)) except Exception as e: print("error while writing %s: %s" % (newGroupListFile, e)) else: print("file \"%s\" contains invalid data" % oldGroupListFile + ", must contain a list")
def importGroupsIter(): groupsEnableDict = {} ## {groupId -> enable} ### makeDir(newGroupsDir) ### oldFiles = os.listdir(oldGroupsDir) yield len(oldFiles) + 1 index = 0 ### for fname in oldFiles: yield index; index += 1 jsonPath = join(oldGroupsDir, fname) newJsonPath = join(newGroupsDir, fname) if not isfile(jsonPath): print("\"%s\": not such file" % jsonPath) continue jsonPathNoX, ext = splitext(fname) if ext != ".json": continue try: _id = int(jsonPathNoX) except ValueError: continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print("error while loading json file \"%s\"" % jsonPath) continue #### groupsEnableDict[_id] = data.pop("enable", True) #### if "history" in data: print("skipping \"%s\": history already exists" % jsonPath) continue try: tm = data.pop("modified") except KeyError: tm = now() ### basicData = {} #basicData["modified"] = tm ### ## remove extra params from data and add to basicData for param in ( "remoteIds", ): basicData[param] = data.pop(param, None) for param in ( "enable", "idList", "remoteSyncData", "deletedRemoteEvents", ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData["history"] = [(tm, _hash)] open(newJsonPath, "w").write(dataToPrettyJson(basicData, sort_keys=True)) #### yield index; index += 1 oldGroupListFile = join(oldEventDir, "group_list.json") newGroupListFile = join(newEventDir, "group_list.json") try: groupIds = json.loads(open(oldGroupListFile).read()) except Exception as e: print("error while loading %s: %s" % (oldGroupListFile, e)) else: if isinstance(groupIds, list): signedGroupIds = [ (1 if groupsEnableDict.get(gid, True) else -1) * gid for gid in groupIds ] try: open(newGroupListFile, "w").write(dataToPrettyJson(signedGroupIds)) except Exception as e: print("error while writing %s: %s" % (newGroupListFile, e)) else: print( "file \"%s\" contains invalid data" % oldGroupListFile + ", must contain a list" )
def importGroupsIter(): groupsEnableDict = {} ## {groupId -> enable} ### makeDir(newGroupsDir) ### oldFiles = os.listdir(oldGroupsDir) yield len(oldFiles) + 1 index = 0 ### for fname in oldFiles: yield index ; index += 1 jsonPath = join(oldGroupsDir, fname) newJsonPath = join(newGroupsDir, fname) if not isfile(jsonPath): print('"%s": not such file'%jsonPath) continue jsonPathNoX, ext = splitext(fname) if ext != '.json': continue try: _id = int(jsonPathNoX) except ValueError: continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print('error while loading json file "%s"'%jsonPath) continue #### groupsEnableDict[_id] = data.pop('enable', True) #### if 'history' in data: print('skipping "%s": history already exists'%jsonPath) continue try: tm = data.pop('modified') except KeyError: tm = now() ### basicData = {} #basicData['modified'] = tm ### ## remove extra params from data and add to basicData for param in ( 'remoteIds', ): basicData[param] = data.pop(param, None) for param in ( 'enable', 'idList', 'remoteSyncData', 'deletedRemoteEvents', ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData['history'] = [(tm, _hash)] open(newJsonPath, 'w').write(dataToPrettyJson(basicData, sort_keys=True)) #### yield index ; index += 1 oldGroupListFile = join(oldEventDir, 'group_list.json') newGroupListFile = join(newEventDir, 'group_list.json') try: groupIds = json.loads(open(oldGroupListFile).read()) except Exception as e: print('error while loading %s: %s'%(oldGroupListFile, e)) else: if isinstance(groupIds, list): signedGroupIds = [ (1 if groupsEnableDict.get(gid, True) else -1) * gid \ for gid in groupIds ] try: open(newGroupListFile, 'w').write(dataToPrettyJson(signedGroupIds)) except Exception as e: print('error while writing %s: %s'%(newGroupListFile, e)) else: print('file "%s" contains invalid data, must contain a list'%oldGroupListFile)
def importGroupsIter(): groupsEnableDict = {} ## {groupId -> enable} ### makeDir(newGroupsDir) ### oldFiles = os.listdir(oldGroupsDir) yield len(oldFiles) + 1 index = 0 ### for fname in oldFiles: yield index index += 1 jsonPath = join(oldGroupsDir, fname) newJsonPath = join(newGroupsDir, fname) if not isfile(jsonPath): print('"%s": not such file' % jsonPath) continue jsonPathNoX, ext = splitext(fname) if ext != '.json': continue try: _id = int(jsonPathNoX) except ValueError: continue try: data = json.loads(open(jsonPath).read()) except Exception as e: print('error while loading json file "%s"' % jsonPath) continue #### groupsEnableDict[_id] = data.pop('enable', True) #### if 'history' in data: print('skipping "%s": history already exists' % jsonPath) continue try: tm = data.pop('modified') except KeyError: tm = now() ### basicData = {} #basicData['modified'] = tm ### ## remove extra params from data and add to basicData for param in ('remoteIds', ): basicData[param] = data.pop(param, None) for param in ( 'enable', 'idList', 'remoteSyncData', 'deletedRemoteEvents', ): try: basicData[param] = data.pop(param) except KeyError: pass ### _hash = saveBsonObject(data) basicData['history'] = [(tm, _hash)] open(newJsonPath, 'w').write(dataToPrettyJson(basicData, sort_keys=True)) #### yield index index += 1 oldGroupListFile = join(oldEventDir, 'group_list.json') newGroupListFile = join(newEventDir, 'group_list.json') try: groupIds = json.loads(open(oldGroupListFile).read()) except Exception as e: print('error while loading %s: %s' % (oldGroupListFile, e)) else: if isinstance(groupIds, list): signedGroupIds = [ (1 if groupsEnableDict.get(gid, True) else -1) * gid \ for gid in groupIds ] try: open(newGroupListFile, 'w').write(dataToPrettyJson(signedGroupIds)) except Exception as e: print('error while writing %s: %s' % (newGroupListFile, e)) else: print('file "%s" contains invalid data, must contain a list' % oldGroupListFile)