def generate_aliases(self) : self._aliases = {} self.counter = 0 do_queue(Reader, self._generate_aliases_helper, self.get_urls()) with open(join(save_data_path('arm-pack-manager'), "aliases.json"), "wb+") as out: dump(self._aliases, out) stdout.write("\n")
def __init__(self, target): cache = Cache(True, False) data_path = join(save_data_path('arm-pack-manager'), "index.json") if not exists(data_path) or not self.check_version(data_path): cache.cache_descriptors() t = TARGET_MAP[target] self.core = t.core try: cpu_name = t.device_name target_info = cache.index[cpu_name] # Target does not have device name or pdsc file except: try: # Try to find the core as a generic CMSIS target cpu_name = self.cpu_cmsis() target_info = cache.index[cpu_name] except: raise TargetNotSupportedException("Target not in CMSIS packs") self.target_info = target_info self.url = target_info['pdsc_file'] self.pack_url, self.pack_id = ntpath.split(self.url) self.dname = cpu_name self.dfpu = target_info['processor']['fpu'] self.debug, self.dvendor = self.vendor_debug(target_info['vendor']) self.dendian = target_info['processor'].get('endianness', 'Little-endian') self.debug_svd = target_info.get('debug', '') self.compile_header = target_info['compile']['header']
def cache_file (self, curl, url) : """Low level interface to caching a single file. :param curl: The user is responsible for providing a curl.Curl object as the curl parameter. :type curl: curl.Curl :param url: The URL to cache. :type url: str :rtype: None """ if not self.silent : print("Caching {}...".format(url)) dest = join(save_data_path('arm-pack-manager'), strip_protocol(url)) try : makedirs(dirname(dest)) except OSError as exc : if exc.errno == EEXIST : pass else : raise with open(dest, "wb+") as fd : curl.setopt(curl.URL, url) curl.setopt(curl.FOLLOWLOCATION, True) curl.setopt(curl.WRITEDATA, fd) if not self.no_timeouts : curl.setopt(curl.CONNECTTIMEOUT, 2) curl.setopt(curl.LOW_SPEED_LIMIT, 50 * 1024) curl.setopt(curl.LOW_SPEED_TIME, 2) try : curl.perform() except Exception as e : stderr.write("[ ERROR ] file {} did not download {}\n".format(url, str(e))) self.counter += 1 self.display_counter("Caching Files")
def aliases(self) : """An index of most of the important data in all cached PDSC files. :Example: >>> from ArmPackManager import Cache >>> a = Cache() >>> a.index["LPC1768"] {u'algorithm': {u'RAMsize': u'0x0FE0', u'RAMstart': u'0x10000000', u'name': u'Flash/LPC_IAP_512.FLM', u'size': u'0x80000', u'start': u'0x00000000'}, u'compile': [u'Device/Include/LPC17xx.h', u'LPC175x_6x'], u'debug': u'SVD/LPC176x5x.svd', u'pdsc_file': u'http://www.keil.com/pack/Keil.LPC1700_DFP.pdsc', u'memory': {u'IRAM1': {u'size': u'0x8000', u'start': u'0x10000000'}, u'IRAM2': {u'size': u'0x8000', u'start': u'0x2007C000'}, u'IROM1': {u'size': u'0x80000', u'start': u'0x00000000'}}} """ if not self._aliases : try : with open(join(save_data_path('arm-pack-manager'), "aliases.json")) as i : self._aliases = load(i) except IOError : self.generate_aliases() return self._aliases
def main(): """ Program entry point. """ state_file = os.path.join(save_data_path('hnmail'), 'state.pickle') with State(state_file) as state: run(state=state)
def generate_index(self) : self._index = {} self.counter = 0 do_queue(Reader, self._generate_index_helper, self.get_urls()) with open(join(save_data_path('arm-pack-manager'), "index.json"), "wb+") as out: self._index["version"] = "0.1.0" dump(self._index, out) stdout.write("\n")
def _get_db(self): db = self.conf.get('main','db') if not db == ':memory:': from xdg.BaseDirectory import save_data_path from os.path import join res_data_path = save_data_path(self.xdg_resource) db = join(res_data_path,db) return db
def load_data_series() -> Iterator[Tuple[datetime, Bug]]: """Yield timestamps along with bugs stored in the database.""" data_dir = save_data_path("buglog") files = sorted(Path(data_dir).glob("*.json")) for file in files: time = datetime.fromisoformat(file.stem) with open(file) as fin: for bug_name, bug_fields in json.load(fin).items(): bug_cls = str_to_bug(bug_name) yield time, bug_cls(**bug_fields)
def pack_from_cache(self, url) : """Low level inteface for extracting a PACK file from the cache. Assumes that the file specified is a PACK file and is in the cache. :param url: The URL of a PACK file. :type url: str :return: A parsed representation of the PACK file. :rtype: ZipFile """ return ZipFile(join(save_data_path('arm-pack-manager'), strip_protocol(device['pack_file'])))
def __init__(self): from xdg.BaseDirectory import save_config_path, save_data_path # TODO: Should we use save_config_path or load_first_config? self.config_dir = save_config_path('coinbox') self.data_dir = save_data_path('coinbox') # TODO: the default locale directory should be determined in a better way # some package it in the egg as resources (yuck) # some have a $prefix set up, and from that determine the path to $prefix/share/locale # some have it in the data directory self.locale_dir = '/usr/share/locale'
def pdsc_from_cache(self, url) : """Low level inteface for extracting a PDSC file from the cache. Assumes that the file specified is a PDSC file and is in the cache. :param url: The URL of a PDSC file. :type url: str :return: A parsed representation of the PDSC file. :rtype: BeautifulSoup """ dest = join(save_data_path('arm-pack-manager'), strip_protocol(url)) with open(dest, "r") as fd : return BeautifulSoup(fd, "html.parser")
def load_selfsigned_cert(hostname): from xdg.BaseDirectory import save_data_path DATA_DIR = save_data_path('crackomatic') key_file = os.path.join(DATA_DIR, '%s.key.pem' % hostname) cert_file = os.path.join(DATA_DIR, '%s.cert.pem' % hostname) if not (os.path.exists(key_file) and os.path.exists(cert_file)): key_pem, cert_pem = generate_selfsigned_cert(hostname) open(key_file, 'wb').write(key_pem) open(cert_file, 'wb').write(cert_pem) os.chmod(key_file, 0o600) os.chmod(cert_file, 0o600) return key_file, cert_file
def __init__(self, user, password, max_parallel_downloads=3, download_directory = "~/downloads", res = None): super(PlowBot, self).__init__(user, password, res) self.download_queue = Queue.Queue() self.parallel_downloads_sema = threading.BoundedSemaphore( max_parallel_downloads) self.download_directory = os.path.expanduser(download_directory) assert os.path.exists(self.download_directory), "Download directory does not exist" # logging stuff - newer versions of jabberbot if not callable(self.log): logfile = os.path.join(save_data_path("plowbot"), "plowbot.log") handler = logging.handlers.RotatingFileHandler(logfile, maxBytes=100000) formatter = logging.Formatter("%(asctime) - %(name)s - %(levelname)s - %(message)s") self.log.addHandler(handler) self.log.setLevel(logging.INFO)
def get_options(): config_dir = load_first_config('hnss') data_dir = save_data_path('hnss') auth_file_default = os.path.join(config_dir, 'auth.json') data_file_default = os.path.join(data_dir, 'data.json') if os.path.exists('auth.json'): with open('auth.json', 'rb') as fh: auth = json.loads(fh.read()) parser = argparse.ArgumentParser(description=""" Download saved stories from HackerNews and dump the resultant data into a .json file. Subsequent runs using a previous data file will only scrape the newest saved stories. (Note: There is a 30 second delay between requests.) """) parser.add_argument('-u', '--username', default=None, help="HackerNews username") parser.add_argument('-p', '--password', default=None, help="HackerNews password") parser.add_argument('-a', '--auth-file', default=None, help="Auth file (JSON format). (default: %s)" % auth_file_default) parser.add_argument('-f', '--file', help="File to download to. '-' can be used to redirect output to stdout. (default: %s)" % data_file_default, default=data_file_default) parser.add_argument('-m', '--max-pages', type=int, default=1, help="The maximum number of pages to go into the past. 0 goes back all the way to the beginning of time. (default: 1)") parser.add_argument('-d', '--debug', action='store_true', help="Debug mode.") parser.add_argument('-v', '--verbose', action='store_true', help="Verbose output.") options = parser.parse_args() if options.auth_file: with open(options.auth_file, 'rb') as fh: auth_info = json.loads(fh.read()) options.username = options.username or auth_info['username'] options.password = options.password or auth_info['password'] else: if os.path.exists(auth_file_default): with open(auth_file_default, 'rb') as fh: auth_info = json.loads(fh.read()) options.username = options.username or auth_info['username'] options.password = options.password or auth_info['password'] if not options.username: sys.exit("Error: No username given.") if not options.password: sys.exit("Error: No password given.") return options
def define_store_location(): ''' According to the DES-EMA (http://www.nautilus-actions.org/?q=node/377) .desktop files are Searched for in "XDG_DATA_DIRS/file-manager/actions". This module install user specif versions of .desktop files, therefore, it uses ""$XDG_DATA_HOME/file-manager/actions" as defined in the XDG Base Directory Specifications. Still according to the XDG Specifications If $XDG_DATA_HOME is either not set or empty, a default equal to $HOME/.local/share should be used. ''' # Returns xdg_data_home or default XDG_DATA_HOME = xdg_data_home DESKTOP_FILE_DIR = os.path.join('file-manager', 'actions') # Ensure desktop data dir exists store_location = save_data_path(DESKTOP_FILE_DIR) return store_location
def get_service(): """ Handle oauth's shit (copy-pasta from http://code.google.com/apis/tasks/v1/using.html) Yes I do publish a secret key here, apparently it is normal http://stackoverflow.com/questions/7274554/why-google-native-oauth2-flow-require-client-secret """ FLAGS = gflags.FLAGS FLOW = OAuth2WebServerFlow( client_id='617841371351.apps.googleusercontent.com', client_secret='_HVmphe0rqwxqSR8523M6g_g', scope='https://www.googleapis.com/auth/tasks', user_agent='michel/0.0.1') FLAGS.auth_local_webserver = False storage = Storage(os.path.join(save_data_path("michel"), "oauth.dat")) credentials = storage.get() if credentials is None or credentials.invalid == True: credentials = run(FLOW, storage) http = httplib2.Http() http = credentials.authorize(http) return build(serviceName='tasks', version='v1', http=http)
def __init__(self, client_id, client_secret, encode_xoauth2, **kwargs): super().__init__(client_id, client_secret, encode_xoauth2, **kwargs) # Basic self._tenant_id = "common" # Some needed redirection self._redirect_host = "localhost" self._redirect_port = "5000" # self._redirect_path = "/getToken/" self._redirect_path = "/" self._redirect_uri = self._generate_redirect_uri() # self._credentials_file = save_data_path(APP_NAME) + f"/{client_id}.bin" # Scopes self._scopes = [ "https://outlook.office365.com/IMAP.AccessAsUser.All", # "https://outlook.office.com/SMTP.Send", ] self._browser_activated = False
sys.exit(0) config = _load_config(options.config) if not options.sender: options.sender = config.sender if not options.to: options.to = config.to if not options.relay: options.relay = config.relay if not options.sender or not options.to or not options.relay: logger.error("need to specify --from and --to and --relay") sys.exit(1) datadir = save_data_path('opensuse.org', config.name) current_fn = os.path.join(datadir, "announcer-current-version") if not options.version: u = urlparse(urljoin(config.url, config.iso)) conn = httplib.HTTPConnection(u.hostname, 80) conn.request('HEAD', u.path) res = conn.getresponse() if res.status != 302: raise Exception("http fail: %s %s" % (res.status, res.reason)) loc = res.getheader('location') if loc is None: raise Exception("empty location!")
import argparse from logging import getLogger import os import sys from xdg.BaseDirectory import save_data_path import toml from ._version import __version__ DATA_DIR = save_data_path('crackomatic') DB_PATH = os.path.join(DATA_DIR, 'crackomatic.db') logger = getLogger(__name__) parser = argparse.ArgumentParser( description="Find and notify users in your Active Directory " "with weak passwords", ) parser.add_argument( '-v', '--version', action='version', version='Crack-O-Matic ' + __version__ ) parser.add_argument( '-d', '--debug', action='store_true', default=False, help="log debugging information (default: %(default)s)", )
def get_credentials_file(app=None, user=None, override=False): mode = 'wb' if override else 'rb+' return open(save_data_path('oauth2token', app) + '/' + user, mode)
#!/usr/bin/python from xdg.BaseDirectory import save_cache_path, save_data_path CACHEDIR = save_cache_path('opensuse.org', 'abi-checker') DATADIR = save_data_path('opensuse.org', 'abi-checker') import abichecker_dbmodel as DB import sqlalchemy.orm.exc class Config(object): def __init__(self, session): self.session = session if self.session is None: self.session = DB.db_session() def set(self, key, value): try: entry = self.session.query(DB.Config).filter(DB.Config.key == key).one() entry.value = value except sqlalchemy.orm.exc.NoResultFound as e: entry = DB.Config(key=key, value=value) self.session.add(entry) self.session.commit() def get(self, key, default = None): try: entry = self.session.query(DB.Config).filter(DB.Config.key == key).one() return entry.value except sqlalchemy.orm.exc.NoResultFound as e: pass
#!/usr/bin/python from xdg.BaseDirectory import save_cache_path, save_data_path CACHEDIR = save_cache_path('opensuse.org', 'abi-checker') DATADIR = save_data_path('opensuse.org', 'abi-checker') import abichecker_dbmodel as DB import sqlalchemy.orm.exc class Config(object): def __init__(self, session): self.session = session if self.session is None: self.session = DB.db_session() def set(self, key, value): try: entry = self.session.query( DB.Config).filter(DB.Config.key == key).one() entry.value = value except sqlalchemy.orm.exc.NoResultFound as e: entry = DB.Config(key=key, value=value) self.session.add(entry) self.session.commit() def get(self, key, default=None): try: entry = self.session.query( DB.Config).filter(DB.Config.key == key).one()
parser.add_option("--to", metavar="EMAIL", help="recepient email address") parser.add_option("--relay", metavar="RELAY", help="SMTP relay server address") parser.add_option("--version", metavar="VERSION", help="announce specific version") (options, args) = parser.parse_args() if not options.sender or not options.to or not options.relay: print >> sys.stderr, "need to specify --from and --to and --relay" sys.exit(1) url = "http://download.opensuse.org/tumbleweed/iso/" iso = "openSUSE-Tumbleweed-DVD-x86_64-Current.iso" changes = "Changes.%s.txt" DATADIR = save_data_path('opensuse.org', 'factory-announcer') intro = """ Please note that this mail was generated by a script. The described changes are computed based on the x86_64 DVD. The full online repo contains too many changes to be listed here. Please check the known defects of this snapshot before upgrading: https://openqa.opensuse.org/tests/overview?distri=opensuse&groupid=1&version=Tumbleweed&build={} When you reply to report some issues, make sure to change the subject. It is not helpful to keep the release announcement subject in a thread while discussing a specific problem. """ current_fn = os.path.join(DATADIR, "announcer-current-version")
#! /usr/bin/env python3 from setuptools.dist import Distribution from setuptools import setup Distribution(dict(setup_requires="pyxdg==0.25")) from os.path import join from xdg.BaseDirectory import save_data_path, load_data_paths # plugins # according to the Debian Python Policy, they should go in /usr/share/<program> # NOTE: for the time being, we're gonna install them in the user's xdg.data dir # first, make sure the dir exists save_data_path("pbt/") save_data_path("pbt/plugins/") # the user's dir comes first user_xdg_data_dir = next(load_data_paths('pbt/plugins/')) setup( name='pbt', version='0.0.1', description='python build tool', url='https://github.com/pebete/pbt', packages=['pbt'], scripts=['bin/pbt'], license='Apache 2.0', install_requires=["PyYAML==3.10", "flake8==2.0", "cookiecutter==0.7.0", "pyxdg==0.25"], data_files=[ (join(user_xdg_data_dir, 'dump'), ['plugins/dump/main.py', ]),
import datetime import pickle import os from functools import total_ordering from bzoing.playme import Playme import time import threading import subprocess from xdg.BaseDirectory import save_data_path share_dir = save_data_path("bzoing") @total_ordering class Task(): """Defines tasks, their representation and ordering.""" def __init__(self, id, description, alarm, sound, function, notify): self.id = id self.description = description self.alarm = alarm self.function = function self.sound = sound self.notify = notify def __repr__(self): return '{}: {} {} {}'.format(self.__class__.__name__, self.id, self.description, self.alarm) def __lt__(self, other): if hasattr(other, 'alarm'): return self.alarm.__lt__(other.alarm)
import os from xdg.BaseDirectory import save_data_path DATA_DIR = save_data_path('aurifere')
def get_main_db(): return 'sqlite:///' +join(save_data_path('worklog'), 'db.sqlite')
def db_path() -> Path: """Get path to current database.""" time_str = datetime.now().isoformat(" ", "seconds") data_dir = save_data_path("buglog") return Path(data_dir) / f"{time_str}.json"
def _buffer_file() -> Path: return Path(save_data_path("ledgerclock")) / "data.json"