def f_read(f_name): ret = "" try: with open(_join(_expanduser("~"), ".klb/" + f_name)) as f: ret += f.read() except: pass if _getenv("SLAVE"): try: with open(_join(_expanduser("~"), ".slave/" + f_name)) as f: ret += f.read() except: pass return ret
def _validate_file(self, file_name): """function _validate_file Input: file_name - The file to be validated Output: validated file_name _validate_file verifies the file exists and the extension matches the parser extension(s) before proceeding. This hook can be overwritten to remove or perform different file checks as long as it returns the file_name. """ if not isinstance(file_name, _string_types): raise IOError('{}: file_name must be a string'.format(file_name)) file_name = _realpath(_expanduser(file_name)) if not _isfile(file_name): raise IOError('{}: File not found.'.format(file_name)) if file_name.split('.')[-1] not in self.extensions: raise IOError('{name}: Unsupported extension. Supported ' 'extensions are {exts}'.format( name=file_name, exts='(' + ', '.join(self.extensions) + ')' )) return file_name
def getDownloadDir(): """ @return: filepath of download_entry dir @rtype: str Todo- figure out a non stupid way to do this. There should be a special OS folder designated as default folder. Update: http://msdn.microsoft.com/en-us/library/windows/desktop/bb762188(v=vs.85).aspx Use SHGetKnownFolderPath with the correct GUID issue- GUID is written as a string, but needs to be sent to function as a struct(?) need to make custom c structure. """ try: user = _expanduser("~") except: # Todo- figure out how to find dl folder on mac? raise dl_dir = '\\'.join([user, "Downloads"]) if not _exists(dl_dir): raise FileNotFoundError("Couldn't find downloads folder") return dl_dir.replace('/', '\\')
def f_readlines(f_name): ret = [] try: with open(_join(_expanduser("~"), ".klb/" + f_name)) as f: ret += f.readlines() except: pass if _getenv("SLAVE"): try: with open(_join(_expanduser("~"), ".slave/" + f_name)) as f: ret += f.readlines() except: pass for i in range(len(ret)): ret[i] = ret[i].strip("\n") return ret
def getWorkDir(): """Get current user's Documents folder @rtype: str """ # OS-specific attempt for Windows if os_name == 'nt': try: return getWinUserDocs() except OSError: pass try: return getWinCommonDocs() except OSError: pass user = _expanduser("~") docs = 'Documents' mydocs = 'My Documents' folders = (docs, mydocs) for folder in folders: workdir = ''.join((user, folder)) if _exists(workdir): return workdir.replace('/', '\\') return None
def _validate_file(self, file_name): """function _validate_file Input: file_name - The file to be validated Output: validated file_name _validate_file verifies the file exists and the extension matches the parser extension(s) before proceeding. This hook can be overwritten to perform different file checks or remove the checks entirely as long as it returns the file_name. """ if not isinstance(file_name, _string_types): raise ParseError( '{}: file_name must be a string'.format(file_name)) file_name = _realpath(_expanduser(file_name)) if not _isfile(file_name): raise ParseError('{}: File not found.'.format(file_name)) fnsplit = file_name.split('.') if (len(fnsplit) < 2 or fnsplit[-1].upper() not in [ext.upper() for ext in self.extensions]): raise ParseError('{name}: Unsupported extension. Supported ' 'extensions are {exts}'.format( name=file_name, exts='(' + ', '.join(self.extensions) + ')')) return file_name
def f_read_json(f_name): ret = {} try: with open(_join(_expanduser("~"), ".klb/" + f_name)) as f: temp = _json.load(f) for a in temp: ret[a] = temp[a] except: pass if _getenv("SLAVE"): try: with open(_join(_expanduser("~"), ".slave/" + f_name)) as f: temp = _json.load(f) for a in temp: ret[a] = temp[a] except: pass return ret
def _update_env(): ## Can't import modules, only concrete objects/functions. ## All vars (including imported things) need to have an underscore prefix if you want them ignored by invoke. from os import environ as _environ from os.path import join as _join, expanduser as _expanduser, exists as _exists from getpass import getuser as _getuser _release_auth_file = _expanduser(_join("~", "etc", "release.auth")) if _exists(_release_auth_file): _auth = open(_release_auth_file).read().strip().split(':') _environ['WIKI_USER'] = _auth[0] _environ['WIKI_PASS'] = _auth[1] _environ['JIRA_USER'] = _auth[0] _environ['JIRA_PASS'] = _auth[1] _passwd_file = _expanduser(_join("~", "etc", "passwd")) if _exists(_passwd_file): _pass = open(_passwd_file).read().strip() _environ['SMB_USER'] = _getuser() _environ['SMB_PASS'] = _pass
def f_read_json(f_name): ret = {} try: with open(_join(_expanduser("~"), ".klb/" + f_name)) as f: temp = _json.load(f) for a in temp: ret[a] = temp[a] except: pass if _getenv("SLAVE"): try: with open(_join(_expanduser("~"), ".slave/" + f_name)) as f: temp = _json.load(f) if len(ret) == 0: ret = temp else: for a in temp: ret[a] = temp[a] except: pass return ret
def _annotate_hosts_with_ssh_config_info(): def hostinfo(host, config): hive = config.lookup(host) if 'hostname' in hive: host = hive['hostname'] if 'user' in hive: host = '%s@%s' % (hive['user'], host) if 'port' in hive: host = '%s:%s' % (host, hive['port']) return host try: config_file = file(_expanduser('~/.ssh/config')) except IOError: pass else: config = _SSHConfig() config.parse(config_file) keys = [config.lookup(host).get('identityfile', None) for host in env.hosts] env.key_filename = [_expanduser(key) for key in keys if key is not None] env.hosts = [hostinfo(host, config) for host in env.hosts]
def _check_path(self, path): """ Check path and ensure it is absolute. Args: path (str): path Returns: str: Absolute path """ with self._handle_exception(OSError): path = _realpath(_expanduser(path)) if not _isfile(path): raise FileNotFoundError('No such file: ' + path) return path
_realpath) from collections.abc import Mapping as _Mapping from subprocess import run as _run, PIPE as _PIPE try: # Use LibYAML if available from yaml import CSafeLoader as _Loader, CDumper as _Dumper except ImportError: # Else use pure-Python library from yaml import SafeLoader as _Loader, Dumper as _Dumper from yaml import dump as _yaml_dump, load as _yaml_load from accelpy.exceptions import RuntimeException as _RuntimeException #: User configuration directory HOME_DIR = _expanduser('~/.accelize') # Ensure directory exists and have restricted access rights _makesdirs(HOME_DIR, exist_ok=True) _chmod(HOME_DIR, 0o700) def yaml_read(path): """ Read a YAML file. Args: path (path-like object): Path to file to load. Returns: dict or list: Un-serialized content
from os.path import join as _join from os.path import expanduser as _expanduser from os.path import dirname as _dirname from os.path import realpath as _realpath from os.path import isdir as _isdir from os import mkdir as _mkdir from os import getenv as _getenv import json as _json import pickle if _getenv("SLAVE"): default_path = _join(_expanduser("~"), ".slave/") else: default_path = _join(_expanduser("~"), ".klb/") def test_dir(directory): if not _isdir(directory): try: _mkdir(directory) except: pass def open_local(f_name, method="r"): try: return open(_join(_expanduser("~"), ".klb/" + f_name),method) except: folder = _dirname(_realpath(__file__)) return open(_join(folder, "blank_file")) def f_readlines(f_name): ret = []
def open_local(f_name, method="r"): try: return open(_join(_expanduser("~"), ".klb/" + f_name), method) except: folder = _dirname(_realpath(__file__)) return open(_join(folder, "blank_file"))
from os.path import join as _join from os.path import expanduser as _expanduser from os.path import dirname as _dirname from os.path import realpath as _realpath from os.path import isdir as _isdir from os import mkdir as _mkdir from os import getenv as _getenv import json as _json import pickle import config if _getenv("SLAVE"): default_path = _join(_expanduser("~"), ".slave/") else: default_path = _join(_expanduser("~"), ".klb/") def test_dir(directory): if not _isdir(directory): try: _mkdir(directory) except: pass test_dir(default_path) def open_local(f_name, method="r"): try: return open(_join(_expanduser("~"), ".klb/" + f_name), method)
def open_local(f_name, method="r"): try: return open(_join(_expanduser("~"), ".klb/" + f_name),method) except: folder = _dirname(_realpath(__file__)) return open(_join(folder, "blank_file"))
# -*- coding: utf-8 -*- """ Parse PSE and config data. Tries user-config, if not found, falls back to default """ from collections import OrderedDict as _ODict from os.path import dirname as _dirname, expanduser as _expanduser from json import load as _load, dump as _dump with open(_dirname(__file__) + '/default.json') as _f: default = _load(_f, object_pairs_hook=_ODict) try: with open(_expanduser('~/.vipster.json')) as _f: _cfile = _load(_f, object_pairs_hook=_ODict) except: from copy import deepcopy as _deepcopy _cfile = _deepcopy(default) pse = _cfile['PSE'] config = _cfile['General'] _paramdict = _cfile['Parameters'] def saveConfig(): """Write config and PSE to json-file""" with open(_expanduser('~/.vipster.json'), 'w') as f: _dump(_ODict([('PSE', pse), ('General', config), ('Parameters', _paramdict)]), f, indent=2) __all__ = ["pse", "config", "default", "saveConfig"]
def sign_rpm_packages(packages_dir, private_key=None, public_key=None, pass_phrase=None, quiet=False): """ Sign all RPM packages in a directory. Args: packages_dir (str): Directory containing packages. private_key (str): Path to GPG private key to use. If no private key specified, use current GPG configuration. public_key (str): Path to GPG public key to use. pass_phrase (str): GPG key pass phrase. quiet (bool): Hide commands output. """ _init_gpg_configuration(private_key) # Import public key if public_key: _run(['rpm', '--import', public_key], quiet=quiet) # Sign packages packages = [ package for package in _listdir(packages_dir) if _splitext(package)[1].lower() == '.rpm' ] gpg_info = _run('gpg --export | gpg --list-packets', shell=True, quiet=True).stdout for line in gpg_info.strip().splitlines(): if ':user ID packet' in line: gpg_user_id = line.rsplit(':', 1)[1].strip().strip('"') break else: raise RuntimeError('Unable to read GPG User ID') macros = [ '_signature gpg', '_gpg_path %s' % _expanduser("~/.gnupg"), '_gpg_name %s' % gpg_user_id ] if pass_phrase: macros += [ '_gpgbin /usr/bin/gpg', ' '.join( ('__gpg_sign_cmd %{__gpg}', 'gpg', '--force-v3-sigs', '--batch', '--verbose', '--no-armor', '--passphrase "%s"' % pass_phrase, '--no-secmem-warning', '-u', '"%{_gpg_name}"', '-sbo', '%{__signature_filename}', '--digest-algo', 'sha256', '%{__plaintext_filename}')) ] define = [] for macro in macros: define.extend(["--define", macro]) _run(['rpm', '--addsign'] + define + packages, quiet=True, cwd=packages_dir) # Verify signatures result = _run(['rpm', '--checksig'] + packages, quiet=True, cwd=packages_dir) for line in result.stdout.splitlines(): line = line.rstrip() if (not line.endswith('gpg OK') and not line.endswith('pgp md5 OK') and not line.endswith('digests signatures OK')): raise RuntimeError('Package signature verification failure: %s' % line)
def saveConfig(): """Write config and PSE to json-file""" with open(_expanduser('~/.vipster.json'), 'w') as f: _dump(_ODict([('PSE', pse), ('General', config), ('Parameters', _paramdict)]), f, indent=2)
#!/usr/bin/env python # -*- coding: UTF-8 -*- # # PyStore: Flat-file datastore for timeseries data # https://github.com/ranaroussi/pystore # # Copyright 2018-2019 Ran Aroussi # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from os.path import expanduser as _expanduser DEFAULT_PATH = _expanduser('~/pystore')
def sign_rpm_packages(packages_dir, private_key=None, public_key=None, pass_phrase=None, quiet=False): """ Sign all RPM packages in a directory. Args: packages_dir (str): Directory containing packages. private_key (str): Path to GPG private key to use. If no private key specified, use current GPG configuration. public_key (str): Path to GPG public key to use. pass_phrase (str): GPG key pass phrase. quiet (bool): Hide commands output. """ _init_gpg_configuration(private_key) # Import public key if public_key: _run(["rpm", "--import", public_key], quiet=quiet) # Sign packages packages = [ package for package in _listdir(packages_dir) if _splitext(package)[1].lower() == ".rpm" ] gpg_info = _run("gpg --export | gpg --list-packets", shell=True, quiet=True).stdout for line in gpg_info.strip().splitlines(): if ":user ID packet" in line: gpg_user_id = line.rsplit(":", 1)[1].strip().strip('"') break else: raise RuntimeError("Unable to read GPG User ID") macros = [ "_signature gpg", "_gpg_path %s" % _expanduser("~/.gnupg"), "_gpg_name %s" % gpg_user_id, ] if pass_phrase: macros += [ "_gpgbin /usr/bin/gpg", " ".join(( "__gpg_sign_cmd %{__gpg}", "gpg", "--force-v3-sigs", "--batch", "--verbose", "--no-armor", '--passphrase "%s"' % pass_phrase, "--no-secmem-warning", "-u", '"%{_gpg_name}"', "-sbo", "%{__signature_filename}", "--digest-algo", "sha256", "%{__plaintext_filename}", )), ] define = [] for macro in macros: define.extend(["--define", macro]) _run(["rpm", "--addsign"] + define + packages, quiet=True, cwd=packages_dir) # Verify signatures result = _run(["rpm", "--checksig"] + packages, quiet=True, cwd=packages_dir) for line in result.stdout.splitlines(): line = line.rstrip() if (not line.endswith("gpg OK") and not line.endswith("pgp md5 OK") and not line.endswith("digests signatures OK")): raise RuntimeError("Package signature verification failure: %s" % line) if not quiet: print("Signed packages:\n - %s" % "\n- ".join(packages))
CAMERA_CYCLE = './config/camera.yaml' COUNTDOWN_CYCLE = './config/countdown.yaml' GROVEL_CYCLE = './config/grovel.yaml' ATTRACTOR_BACKGROUND = './media/images/hills-cars.jpg' PHOTO_DIRECTORY = './photos/' CAPTURE_SCRIPT = 'scripts/capture.sh' FAKE_CAPTURE_SCRIPT = 'scripts/fake-capture.sh' XO_CAPTURE_SCRIPT = 'scripts/olpc-capture.sh' #CAMERA_SCRIPT = FAKE_CAPTURE_SCRIPT CAMERA_SCRIPT = CAPTURE_SCRIPT CAPTURE_IMAGE_TYPE = 'jpg' #png for XO_CAPTURE_SCRIPT TEST_IMAGE = _expanduser('~/images/tetuhi-examples/cowboy-tanks.jpg') MIN_OBVIOUSNESS_ELEMENT = 100 MIN_OBVIOUSNESS_ELEMENT_SUM = 200 MIN_OBVIOUSNESS_ELEMENT_GLOBAL = 20 MIN_OBVIOUSNESS_BLOB = 150 MIN_OBVIOUSNESS_BLOB_SUM = 400 MIN_OBVIOUSNESS_BLOB_GLOBAL = 20 #for evolving games IDEAL_LENGTH = 550 TARGET_LENGTH = (400, 700) CUTOFF_LENGTH = 800 RULE_GROWING_ATTEMPTS = 30
INSTRUCTIONS_CYCLE = './config/instructions.yaml' CREDITS_CYCLE = './config/credits.yaml' CAMERA_CYCLE = './config/camera.yaml' COUNTDOWN_CYCLE = './config/countdown.yaml' GROVEL_CYCLE = './config/grovel.yaml' ATTRACTOR_BACKGROUND = './media/images/hills-cars.jpg' PHOTO_DIRECTORY = './photos/' CAPTURE_SCRIPT = 'scripts/capture.sh' FAKE_CAPTURE_SCRIPT = 'scripts/fake-capture.sh' XO_CAPTURE_SCRIPT = 'scripts/olpc-capture.sh' #CAMERA_SCRIPT = FAKE_CAPTURE_SCRIPT CAMERA_SCRIPT = CAPTURE_SCRIPT CAPTURE_IMAGE_TYPE = 'jpg' #png for XO_CAPTURE_SCRIPT TEST_IMAGE = _expanduser('~/images/tetuhi-examples/cowboy-tanks.jpg') MIN_OBVIOUSNESS_ELEMENT = 100 MIN_OBVIOUSNESS_ELEMENT_SUM = 200 MIN_OBVIOUSNESS_ELEMENT_GLOBAL = 20 MIN_OBVIOUSNESS_BLOB = 150 MIN_OBVIOUSNESS_BLOB_SUM = 400 MIN_OBVIOUSNESS_BLOB_GLOBAL = 20 #for evolving games IDEAL_LENGTH = 550 TARGET_LENGTH = (400, 700) CUTOFF_LENGTH = 800 RULE_GROWING_ATTEMPTS = 30 RULE_GROWING_TIMEOUT = 6