Ejemplo n.º 1
0
def extract(url, *, include_psl_private_domains=False):
  extractor = _extractors.get(include_psl_private_domains)
  if extractor is None:
    if include_psl_private_domains:
      tld_data = os.path.join(
        save_cache_path('tldextract'), 
        'tld_data_psl'
      )
    else:
      tld_data = os.path.join(
        save_cache_path('tldextract'), 
        'tld_data'
      )
    extractor = tldextract.TLDExtract(
      include_psl_private_domains = include_psl_private_domains,
    )
    extractor.cache_file = tld_data
    try:
      t = os.path.getmtime(tld_data)
      if time.time() - t > 86400 * 7:
        extractor.update()
    except FileNotFoundError:
      pass
    _extractors[include_psl_private_domains] = extractor

  return extractor(url)
Ejemplo n.º 2
0
def initialize_argument_parser(parser):
	# Artifact cache config
	parser.add_argument('--artifact-cache-dir',
	                    default=save_cache_path('byod', 'artifacts'))
	parser.add_argument('--git-cache-dir',
	                    default=save_cache_path('byod', 'gits'))
	parser.add_argument('--dependent-artifact', nargs='*')
Ejemplo n.º 3
0
def initialize_argument_parser(parser):
    # Artifact cache config
    parser.add_argument('--artifact-cache-dir',
                        default=save_cache_path('byod', 'artifacts'))
    parser.add_argument('--git-cache-dir',
                        default=save_cache_path('byod', 'gits'))
    parser.add_argument('--dependent-artifact', nargs='*')
Ejemplo n.º 4
0
    def get(self, url, **kwargs):
        self.log.info("Downloading %s", url)

        h = hashlib.sha1()
        h.update(url.encode("utf-8"))
        for k, v in kwargs.items():
            h.update(k.encode("utf-8"))
            h.update(v.encode("utf-8"))

        filename = os.path.join(save_cache_path(self.NAME), h.hexdigest())

        if (
            os.path.exists(filename)
            and os.path.getmtime(filename) > self.epoch
        ):
            with open(filename, "rb") as f:
                return pickle.load(f)

        response = self.session.get(
            url, headers={"User-agent": "Mozilla/5.0"}, params=kwargs
        )
        response.raise_for_status()

        with open(filename, "wb") as f:
            pickle.dump(response, f)

        return response
Ejemplo n.º 5
0
def main():
    global rpc
    APP_NAME = 'bch-interlinker'
    NEW_TIP_CHECK_INTERVAL_SECONDS = 5

    config_path = save_config_path(APP_NAME)
    cache_path = save_cache_path(APP_NAME)
    db_path = path.join(cache_path, 'db')
    config_file_path = path.join(config_path, 'config.ini')

    config = configparser.ConfigParser()
    config.read(config_file_path)

    rpc = AuthServiceProxy("http://%s:%s@%s:%s" %
            (config['daemon']['user'], config['daemon']['password'],
                config['daemon']['host'], config['daemon']['port']))

    logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', datefmt='%d/%m/%y %H:%M:%S %Z')
    logger = logging.getLogger(APP_NAME)
    logger.setLevel(logging.DEBUG)

    tip_id = ''
    while True:
        possibly_new_tip_id = rpc.getbestblockhash()
        if possibly_new_tip_id == tip_id:
            sleep(NEW_TIP_CHECK_INTERVAL_SECONDS)
            continue
        tip_id = possibly_new_tip_id
        logger.info('new block "%s"', tip_id)
        with shelve.open(db_path) as db:
            new_interlink = interlink(tip_id, db)
        logger.debug('new interlink "%s"', new_interlink)
        logger.info('mtr hash "%s"', new_interlink.hash().hex())
        logger.info('velvet tx "%s"', send_velvet_tx(new_interlink.hash()))
Ejemplo n.º 6
0
def solve():

    distribution = request.args.get('distribution')
    if distribution is None:
        raise KeyError("missing distribution")

    if request.content_length > 2048:
        raise KeyError("request too large")

    data = request.get_data(as_text=True)
    if data is None:
        raise KeyError("missing data")

    d = Deptool.Deptool(context=distribution)
    app.logger.debug("got job %s", data)

    result = d.process_testcase(data.split('\n'))

    stamp = time.strftime("%Y%m%d-%H%M%S")
    path = save_cache_path('opensuse.org', 'deptool', 'solve')

    with open(os.path.join(path, 'job-{}.json'.format(stamp)), 'w') as fh:
        s = dict(distibution=distribution, job=data, version="1")
        fh.write(json.dumps(s))

    with open(os.path.join(path, 'result-{}.json'.format(stamp)), 'w') as fh:
        s = dict(result=result, version="1")
        fh.write(json.dumps(result))

    return jsonify(result)
Ejemplo n.º 7
0
Archivo: cache.py Proyecto: kspi/agenda
def get(name, max_age, update_fn):
    filename = os.path.join(save_cache_path('agenda'), name)
    value = None

    def update_value():
        nonlocal value
        value = update_fn()
        if value is not None:
            with open(filename, 'wb') as f:
                f.write(value.encode('utf-8'))

    def read_cached():
        nonlocal value
        with open(filename, 'rb') as f:
            value = f.read().decode('utf-8')

    if not os.path.exists(filename):
        update_value()
    else:
        age = time.time() - os.path.getmtime(filename)
        if age > max_age:
            update_value()
            if not value:
                read_cached()
        else:
            read_cached()

    return value
Ejemplo n.º 8
0
    def jobs(self, args):
        jobs = self._get_jobs(args)

        # print jobs
        for job in jobs:
            formated_status = get_formated_status(job['color'])
            print(formated_status + " " + job['name'])

        # save job names to cache file
        our_cache_dir = save_cache_path('python-jenkins-cli')
        job_cache_file = os.path.join(our_cache_dir, self.JOB_CACHE_FILE_NAME)
        with open(job_cache_file, 'w') as f:
            f.write(' '.join(job['name'] for job in jobs))
Ejemplo n.º 9
0
def load_data(namespace, key, default):
    dirpath = save_cache_path(__package__, namespace)
    filename = os.path.join(dirpath, key)

    try:
        f = open(filename, 'r')
        with f:
            return f.read()
    except IOError:
        data = default()

        with open(filename+".temp", 'w') as f:
            f.write(data)

        os.rename(filename+".temp", filename)

        return data
Ejemplo n.º 10
0
 def load(self, filename):
     '''Load registered version database into this database'''
     assert (filename is not None)
     path = join(save_cache_path(XDG_DIRECTORY), filename)
     try:
         open(path, "a")
     except (IOError, OSError) as exp:
         raise BaseError("Create database filename failed; %s" % exp)
     logging.debug("Loading database %s" % path)
     try:
         fileobj = open(path, "r")
         dico = json.load(fileobj)
         self.update(dico)
     except Exception as exp:
         logging.error("Unable to load database %s: %s" % (path, exp))
     # because we use self._path is __del__, this should be done when
     # we are sure that db is loaded
     self._path = path
Ejemplo n.º 11
0
 def load(self, filename):
     '''Load registered version database into this database'''
     assert(filename is not None)
     path = join(save_cache_path(XDG_DIRECTORY), filename)
     try:
         open(path, "a")
     except (IOError, OSError) as exp:
         raise BaseError("Create database filename failed; %s" % exp)
     logging.debug("Loading database %s" % path)
     try:
         fileobj = open(path, "r")
         dico = json.load(fileobj)
         self.update(dico)
     except Exception as exp:
         logging.error("Unable to load database %s: %s" % (path, exp))
     # because we use self._path is __del__, this should be done when
     # we are sure that db is loaded
     self._path = path
Ejemplo n.º 12
0
    def __init__(self, *args, **kwargs):
        super(HistoryManager, self).__init__(*args, **kwargs)

        self._history = None
        self.last_cmd = None
        self.db = None

        if HAVE_SQLITE:
            dbpath = os.path.join(
                save_cache_path('graphcli'),
                'history.db'
            )
            self.db = sqlite3.connect(dbpath)
            self.cursor = self.db.cursor()

            self.cursor.execute('''
                CREATE TABLE IF NOT EXISTS graphcli_history (
                    ts INT,
                    cmd TEXT
                )
            ''')
Ejemplo n.º 13
0
def extract(url, *, include_psl_private_domains=False):
  cache_dir = save_cache_path('tldextract')
  last_updated = os.path.join(cache_dir, 'last_updated')
  extractor = tldextract.TLDExtract(
    cache_dir = cache_dir,
    include_psl_private_domains = include_psl_private_domains,
  )

  update = False
  try:
    t = os.path.getmtime(last_updated)
    if time.time() - t > 86400 * 7:
      update = True
  except FileNotFoundError:
    update = True

  if update:
    extractor.update()
    with open(last_updated, 'w'): pass

  return extractor(url)
Ejemplo n.º 14
0
    def migrate_paths():
        path_map = {
            '{}-access': 'metrics-access',
            '{}-clone': 'request/clone',
            '{}-metrics': 'request/metrics',
            '{}-main': 'request/main',
            '{}-test': None,
            'opensuse-packagelists': 'pkglistgen',
            'opensuse-repo-checker': 'repository-meta',
            'opensuse-repo-checker-http': None,
            'osc-plugin-factory': None,
        }
        bases = [NAME, 'osc-plugin-factory']
        cache_root = save_cache_path('')
        for base in bases:
            for source, destination in path_map.items():
                source = os.path.join(cache_root, source.format(base))
                if destination:
                    destination = os.path.join(CacheManager.directory(), destination)

                yield source, destination
    def migrate(first=True):
        # If a old path exists then perform migration.
        cache_root = save_cache_path('')
        for source, destination in CacheManager.migrate_paths():
            if not os.path.exists(source):
                continue

            if first:
                print('> migrating caches', file=sys.stderr)

                # Move existing dir out of the way in order to nest.
                cache_moved = CacheManager.directory() + '-main'
                if not os.path.exists(cache_moved):
                    os.rename(CacheManager.directory(), cache_moved)

                # Detected need to migrate, but may have already passed -main.
                CacheManager.migrate(False)
                return

            # If either incompatible format, explicit removal, or newer source
            # was already migrated remove the cache entirely.
            if destination and os.path.exists(destination):
                # Set to None to make clear in message.
                destination = None

            print('> - {} -> {}'.format(
                os.path.relpath(source, cache_root),
                os.path.relpath(destination, cache_root)
                if destination else None),
                  file=sys.stderr)

            if not destination:
                shutil.rmtree(source)
                continue

            # Ensure parent directory exists and then move within.
            destination_parent = os.path.dirname(destination)
            if not os.path.exists(destination_parent):
                os.makedirs(destination_parent)
            os.rename(source, destination)
Ejemplo n.º 16
0
    def migrate(first=True):
        # If a old path exists then perform migration.
        cache_root = save_cache_path('')
        for source, destination in CacheManager.migrate_paths():
            if not os.path.exists(source):
                continue

            if first:
                print('> migrating caches', file=sys.stderr)

                # Move existing dir out of the way in order to nest.
                cache_moved = CacheManager.directory() + '-main'
                if not os.path.exists(cache_moved):
                    os.rename(CacheManager.directory(), cache_moved)

                # Detected need to migrate, but may have already passed -main.
                CacheManager.migrate(False)
                return

            # If either incompatible format, explicit removal, or newer source
            # was already migrated remove the cache entirely.
            if destination and os.path.exists(destination):
                # Set to None to make clear in message.
                destination = None

            print(
                '> - {} -> {}'.format(
                    os.path.relpath(source, cache_root),
                    os.path.relpath(destination, cache_root) if destination else None),
                file=sys.stderr)

            if not destination:
                shutil.rmtree(source)
                continue

            # Ensure parent directory exists and then move within.
            destination_parent = os.path.dirname(destination)
            if not os.path.exists(destination_parent):
                os.makedirs(destination_parent)
            os.rename(source, destination)
    def migrate_paths():
        path_map = {
            '{}-access': 'metrics-access',
            '{}-clone': 'request/clone',
            '{}-metrics': 'request/metrics',
            '{}-main': 'request/main',
            '{}-test': None,
            'opensuse-packagelists': 'pkglistgen',
            'opensuse-repo-checker': 'repository-meta',
            'opensuse-repo-checker-http': None,
            'osc-plugin-factory': None,
        }
        bases = [NAME, 'osc-plugin-factory']
        cache_root = save_cache_path('')
        for base in bases:
            for source, destination in path_map.items():
                source = os.path.join(cache_root, source.format(base))
                if destination:
                    destination = os.path.join(CacheManager.directory(),
                                               destination)

                yield source, destination
Ejemplo n.º 18
0
def cache_directory(container_name):
    """
    A directory to cache the container data in.
    """

    return os.path.join(save_cache_path('forklift'), container_name)
        # assume it's a broadcast
        logging.debug("Broadcast; forking.")
        if os.fork() == 0:
            os.setsid()
            os.closerange(0, 3)
            logging.debug("Forked.")
            s = SystemImage()
            s.setup()
            s.run()
            return

    json.dump(obj, open(f2, "w"))


if __name__ == '__main__':
    logdir = save_cache_path("ubuntu-system-settings")
    logfile = os.path.join(logdir, "software_updates_helper.log")
    rothandler = logging.handlers.TimedRotatingFileHandler(logfile,
                                                           when="D",
                                                           backupCount=10)
    logging.basicConfig(
        format="%(asctime)s %(levelname)8s [%(process)04x] %(message)s",
        datefmt="%Y-%m-%dT%H:%M:%S",
        level=logging.DEBUG,
        handlers=(rothandler, ))
    logging.debug("Starting.")
    try:
        main()
    except Exception:
        logging.exception("Died with exception:")
    else:
Ejemplo n.º 20
0
import os
from os import path
import subprocess
from xdg.BaseDirectory import save_cache_path

CACHE_DIR = save_cache_path('osc-plugin-factory', 'git')


def clone(url, directory):
    return_code = subprocess.call(['git', 'clone', url, directory])
    if return_code != 0:
        raise Exception('Failed to clone {}'.format(url))


def sync(cache_dir, repo_url, message=None):
    cwd = os.getcwd()
    devnull = open(os.devnull, 'wb')

    # Ensure git-sync tool is available.
    git_sync_dir = path.join(cache_dir, 'git-sync')
    git_sync_exec = path.join(git_sync_dir, 'git-sync')
    if not path.exists(git_sync_dir):
        os.makedirs(git_sync_dir)
        clone('https://github.com/simonthum/git-sync.git', git_sync_dir)
    else:
        os.chdir(git_sync_dir)
        subprocess.call(['git', 'pull', 'origin', 'master'],
                        stdout=devnull,
                        stderr=devnull)

    repo_name = path.basename(path.normpath(repo_url))
Ejemplo n.º 21
0
import os
from os import path
import subprocess
from xdg.BaseDirectory import save_cache_path

CACHE_DIR = save_cache_path('openSUSE-release-tools', 'git')


def clone(url, directory):
    return_code = subprocess.call(['git', 'clone', url, directory])
    if return_code != 0:
        raise Exception('Failed to clone {}'.format(url))


def sync(cache_dir, repo_url, message=None):
    cwd = os.getcwd()
    devnull = open(os.devnull, 'wb')

    # Ensure git-sync tool is available.
    git_sync_dir = path.join(cache_dir, 'git-sync')
    git_sync_exec = path.join(git_sync_dir, 'git-sync')
    if not path.exists(git_sync_dir):
        os.makedirs(git_sync_dir)
        clone('https://github.com/simonthum/git-sync.git', git_sync_dir)
    else:
        os.chdir(git_sync_dir)
        subprocess.call(['git', 'pull', 'origin', 'master'],
                        stdout=devnull,
                        stderr=devnull)

    repo_name = path.basename(path.normpath(repo_url))
Ejemplo n.º 22
0
}

SPIDER_MIDDLEWARES = {
    "feeds.spidermiddlewares.FeedsHttpErrorMiddleware": 51,
    "feeds.spidermiddlewares.FeedsHttpCacheMiddleware": 1000,
}

DOWNLOADER_MIDDLEWARES = {
    "feeds.downloadermiddlewares.FeedsHttpCacheMiddleware": 900,
    "scrapy.downloadermiddlewares.httpcache.HttpCacheMiddleware": None,
}

HTTPCACHE_ENABLED = True
HTTPCACHE_STORAGE = "feeds.cache.FeedsCacheStorage"
HTTPCACHE_POLICY = "feeds.cache.FeedsCachePolicy"
HTTPCACHE_DIR = save_cache_path("feeds")
HTTPCACHE_EXPIRATION_SECS = FEEDS_CONFIG_CACHE_EXPIRES * 24 * 60 * 60
HTTPCACHE_IGNORE_HTTP_CODES = list(range(400, 600))

# Do not enable cookies by default to make better use of the cache.
COOKIES_ENABLED = False

RETRY_ENABLED = True
# equals 5 requests in total
RETRY_TIMES = 4

# Don't filter duplicates.
# Spiders sometimes produce feeds with potentially overlapping items.
DUPEFILTER_CLASS = "scrapy.dupefilters.BaseDupeFilter"

# Default user agent. Can be overriden in feeds.cfg.
Ejemplo n.º 23
0
#!/usr/bin/python

from argparse import ArgumentParser

from xdg.BaseDirectory import save_cache_path

from . import __version__

parser = ArgumentParser(description=__doc__, prog=__package__)
parser.add_argument('--version',
                    action='version',
                    version=('%(prog)s ' + __version__))
parser.add_argument('--base-directory',
                    type=str,
                    default=save_cache_path('artifactcache'))
subparsers = parser.add_subparsers()


# commands: compose, save, list, delete
def compose():
    pass


# acache compose -a foo -a bar -E COMPOSE \
# -x acache save -a baz '*' -E SAVE \
# -x sh -c \
#    'git-cache clone-temporary --describeable "$REPO" --checkout "$SHA1" --ref "$ANCHOR_REF" "$COMPOSE/baz.build" "$@" ' - \
# -x git-chroot-safe --binds-env BIND --protects-env PROTECT \
# -x sh -c \
#    '# Note: This command is generated, filling in the chroot, commands and env vars where possible
#     # BIND and PROTECT are newline delimited, shell argv style word lists
Ejemplo n.º 24
0
 def __init__(self):
     self.cacheFolder = save_cache_path(u"hunspell-gl")
     self.pickleFolder = os.path.join(self.cacheFolder, u"pickle")
Ejemplo n.º 25
0
 def directory(*args):
     CacheManager.prune_all()
     if CacheManager.test:
         return save_cache_path(NAME, '.test', *args)
     return save_cache_path(NAME, *args)
Ejemplo n.º 26
0
#!/usr/bin/python

from argparse import ArgumentParser

from xdg.BaseDirectory import save_cache_path

from . import __version__


parser = ArgumentParser(description=__doc__, prog=__package__)
parser.add_argument("--version", action="version", version=("%(prog)s " + __version__))
parser.add_argument("--base-directory", type=str, default=save_cache_path("artifactcache"))
subparsers = parser.add_subparsers()

# commands: compose, save, list, delete
def compose():
    pass


# acache compose -a foo -a bar -E COMPOSE \
# -x acache save -a baz '*' -E SAVE \
# -x sh -c \
#    'git-cache clone-temporary --describeable "$REPO" --checkout "$SHA1" --ref "$ANCHOR_REF" "$COMPOSE/baz.build" "$@" ' - \
# -x git-chroot-safe --binds-env BIND --protects-env PROTECT \
# -x sh -c \
#    '# Note: This command is generated, filling in the chroot, commands and env vars where possible
#     # BIND and PROTECT are newline delimited, shell argv style word lists
#     set --
#     while read -r; do
#         eval set -- "$REPLY"
#     done <<<"$BIND" #TODO: posix sh compat
 def directory(*args):
     CacheManager.prune_all()
     if CacheManager.test:
         return save_cache_path(NAME, '.test', *args)
     return save_cache_path(NAME, *args)
Ejemplo n.º 28
0
    def update_and_solve_target(self,
                                apiurl,
                                target_project,
                                target_config,
                                main_repo,
                                opts,
                                skip_release=False):
        print('[{}] {}/{}: update and solve'.format(opts.scope, opts.project,
                                                    main_repo))

        group = target_config.get('pkglistgen-group', '000package-groups')
        product = target_config.get('pkglistgen-product', '000product')
        release = target_config.get('pkglistgen-release',
                                    '000release-packages')

        url = makeurl(apiurl, ['source', opts.project])
        packages = ET.parse(http_GET(url)).getroot()
        if packages.find('entry[@name="{}"]'.format(product)) is None:
            if not self.options.dry:
                undelete_package(apiurl, opts.project, product, 'revive')
            # TODO disable build.
            print('{} undeleted, skip dvd until next cycle'.format(product))
            return
        elif not opts.force:
            root = ET.fromstringlist(
                show_results_meta(apiurl,
                                  opts.project,
                                  product,
                                  repository=[main_repo],
                                  multibuild=True))
            if len(root.xpath('result[@state="building"]')) or len(
                    root.xpath('result[@state="dirty"]')):
                print('{}/{} build in progress'.format(opts.project, product))
                return

        checkout_list = [group, product]
        if not skip_release:
            checkout_list.append(release)

            if packages.find('entry[@name="{}"]'.format(release)) is None:
                if not self.options.dry:
                    undelete_package(apiurl, opts.project, product, 'revive')
                print(
                    '{} undeleted, skip dvd until next cycle'.format(release))
                return

        # Cache dir specific to hostname and project.
        host = urlparse.urlparse(apiurl).hostname
        cache_dir = save_cache_path('opensuse-packagelists', host,
                                    opts.project)

        if os.path.exists(cache_dir):
            shutil.rmtree(cache_dir)
        os.makedirs(cache_dir)

        group_dir = os.path.join(cache_dir, group)
        product_dir = os.path.join(cache_dir, product)
        release_dir = os.path.join(cache_dir, release)

        for package in checkout_list:
            checkout_package(apiurl,
                             opts.project,
                             package,
                             expand_link=True,
                             prj_dir=cache_dir)

        if not skip_release:
            self.unlink_all_except(release_dir)
        self.unlink_all_except(product_dir)
        self.copy_directory_contents(
            group_dir, product_dir,
            ['supportstatus.txt', 'groups.yml', 'package-groups.changes'])
        self.change_extension(product_dir, '.spec.in', '.spec')

        self.options.input_dir = group_dir
        self.options.output_dir = product_dir
        self.postoptparse()

        print('-> do_update')
        self.do_update('update', opts)

        print('-> do_solve')
        opts.ignore_unresolvable = bool(
            target_config.get('pkglistgen-ignore-unresolvable'))
        opts.ignore_recommended = bool(
            target_config.get('pkglistgen-ignore-recommended'))
        opts.include_suggested = bool(
            target_config.get('pkglistgen-include-suggested'))
        opts.locale = target_config.get('pkglistgen-local')
        opts.locales_from = target_config.get('pkglistgen-locales-from')
        self.do_solve('solve', opts)

        delete_products = target_config.get('pkglistgen-delete-products',
                                            '').split(' ')
        self.unlink_list(product_dir, delete_products)

        print('-> product service')
        for product_file in glob.glob(os.path.join(product_dir, '*.product')):
            print(
                subprocess.check_output(
                    [PRODUCT_SERVICE, product_file, product_dir,
                     opts.project]))

        delete_kiwis = target_config.get(
            'pkglistgen-delete-kiwis-{}'.format(opts.scope), '').split(' ')
        self.unlink_list(product_dir, delete_kiwis)

        spec_files = glob.glob(os.path.join(product_dir, '*.spec'))
        if skip_release:
            self.unlink_list(None, spec_files)
        else:
            self.move_list(spec_files, release_dir)

        self.multibuild_from_glob(product_dir, '*.kiwi')
        self.build_stub(product_dir, 'kiwi')
        self.commit_package(product_dir)

        if not skip_release:
            self.multibuild_from_glob(release_dir, '*.spec')
            self.build_stub(release_dir, 'spec')
            self.commit_package(release_dir)
Ejemplo n.º 29
0
#!/usr/bin/python

from xdg.BaseDirectory import save_cache_path, save_data_path

CACHEDIR = save_cache_path('opensuse.org', 'abi-checker')
DATADIR = save_data_path('opensuse.org', 'abi-checker')

import abichecker_dbmodel as DB
import sqlalchemy.orm.exc


class Config(object):
    def __init__(self, session):
        self.session = session
        if self.session is None:
            self.session = DB.db_session()

    def set(self, key, value):
        try:
            entry = self.session.query(
                DB.Config).filter(DB.Config.key == key).one()
            entry.value = value
        except sqlalchemy.orm.exc.NoResultFound as e:
            entry = DB.Config(key=key, value=value)
        self.session.add(entry)
        self.session.commit()

    def get(self, key, default=None):
        try:
            entry = self.session.query(
                DB.Config).filter(DB.Config.key == key).one()
Ejemplo n.º 30
0
#!/usr/bin/env python
# (c) Stefan Countryman, 2019
"""
Browse NDS2 channels for a given time frame. Select channels and time windows
and open an `ndscope` window to view interactive plots of timeseries data (look
up `ndscope` documentation for details).
"""

import sys
from PyQt5 import QtCore, QtGui, QtWidgets
from PyQt5.QtWidgets import QTreeWidget, QTreeWidgetItem, QApplication, QWidget
from window import Ui_Dialog
from xdg.BaseDirectory import save_cache_path

CACHE_ROOT = save_cache_path('ndselect')

TOP_LEVEL_CHANNELS = {
    'LLO': (
        'L0:ACM',
        'L0:FMC',
        'L0:PEM',
        'L1:ALS',
        'L1:AOS',
        'L1:ASC',
        'L1:AWC',
        'L1:CAL',
        'L1:CAM',
        'L1:CDS',
        'L1:DAQ',
        'L1:DMT',
        'L1:FEC',
Ejemplo n.º 31
0
    import pickle


try:
    from xdg.BaseDirectory import save_cache_path
except ImportError:
    from xdg.BaseDirectory import xdg_cache_home

    def save_cache_path(*name):
        path = os.path.join(xdg_cache_home, *name)
        if not os.path.isdir(path):
            os.makedirs(path)
        return path

# Where the cache files are stored
CACHEDIR = save_cache_path('opensuse-repo-checker')


def memoize(ttl=None, session=False, add_invalidate=False):
    """Decorator function to implement a persistent cache.

    >>> @memoize()
    ... def test_func(a):
    ...     return a

    Internally, the memoized function has a cache:

    >>> cache = [c.cell_contents for c in test_func.func_closure if 'sync' in dir(c.cell_contents)][0]
    >>> 'sync' in dir(cache)
    True
Ejemplo n.º 32
0
 def __init__(self):
     self.cacheFolder = save_cache_path(u"hunspell-gl")
     self.pickleFolder = os.path.join(self.cacheFolder, u"pickle")
Ejemplo n.º 33
0
    if path.isdir(string):
        return string

    raise argparse.ArgumentTypeError('{} is not a directory'.format(string))


if __name__ == '__main__':
    parser = argparse.ArgumentParser(
        description=
        'Tumbleweed snapshot review data ingest and formatting tool.',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.set_defaults(func=main)

    parser.add_argument('--cache-dir',
                        type=directory_type,
                        default=save_cache_path(CACHE_ROOT_DIR),
                        help='cache directory')
    parser.add_argument('-d',
                        '--debug',
                        action='store_true',
                        help='print debugging information')
    parser.add_argument('-o',
                        '--output-dir',
                        type=directory_type,
                        help='output directory')
    parser.add_argument('--read-only',
                        action='store_true',
                        help='opperate on site in read-only mode')

    subparsers = parser.add_subparsers(title='subcommands')
    bug.argparse_configure(subparsers)
#!/usr/bin/python

from xdg.BaseDirectory import save_cache_path, save_data_path

CACHEDIR = save_cache_path('opensuse.org', 'abi-checker')
DATADIR = save_data_path('opensuse.org', 'abi-checker')

import abichecker_dbmodel as DB
import sqlalchemy.orm.exc

class Config(object):
    def __init__(self, session):
        self.session = session
        if self.session is None:
            self.session = DB.db_session()

    def set(self, key, value):
        try:
            entry = self.session.query(DB.Config).filter(DB.Config.key == key).one()
            entry.value = value
        except sqlalchemy.orm.exc.NoResultFound as e:
            entry = DB.Config(key=key, value=value)
        self.session.add(entry)
        self.session.commit()

    def get(self, key, default = None):
        try:
            entry = self.session.query(DB.Config).filter(DB.Config.key == key).one()
            return entry.value
        except sqlalchemy.orm.exc.NoResultFound as e:
            pass
Ejemplo n.º 35
0
    import pickle


try:
    from xdg.BaseDirectory import save_cache_path
except ImportError:
    from xdg.BaseDirectory import xdg_cache_home

    def save_cache_path(*name):
        path = os.path.join(xdg_cache_home, *name)
        if not os.path.isdir(path):
            os.makedirs(path)
        return path

# Where the cache files are stored
CACHEDIR = save_cache_path('opensuse-repo-checker')


def memoize(ttl=None, session=False, is_method=False):
    """Decorator function to implement a persistent cache.

    >>> @memoize()
    ... def test_func(a):
    ...     return a

    Internally, the memoized function has a cache:

    >>> cache = [c.cell_contents for c in test_func.func_closure if 'sync' in dir(c.cell_contents)][0]
    >>> 'sync' in dir(cache)
    True
Ejemplo n.º 36
0
}

SPIDER_MIDDLEWARES = {
    "feeds.spidermiddlewares.FeedsHttpErrorMiddleware": 51,
    "feeds.spidermiddlewares.FeedsHttpCacheMiddleware": 1000,
}

DOWNLOADER_MIDDLEWARES = {
    "feeds.downloadermiddlewares.FeedsHttpCacheMiddleware": 900,
    "scrapy.downloadermiddlewares.httpcache.HttpCacheMiddleware": None,
}

HTTPCACHE_ENABLED = True
HTTPCACHE_STORAGE = "feeds.cache.FeedsCacheStorage"
HTTPCACHE_POLICY = "feeds.cache.FeedsCachePolicy"
HTTPCACHE_DIR = save_cache_path("feeds")
HTTPCACHE_EXPIRATION_SECS = FEEDS_CONFIG_CACHE_EXPIRES * 24 * 60 * 60
HTTPCACHE_IGNORE_HTTP_CODES = list(range(400, 600))

# Do not enable cookies by default to make better use of the cache.
COOKIES_ENABLED = False

RETRY_ENABLED = True
# equals 5 requests in total
RETRY_TIMES = 4

# Don't filter duplicates.
# Spiders sometimes produce feeds with potentially overlapping items.
DUPEFILTER_CLASS = "scrapy.dupefilters.BaseDupeFilter"

# Default user agent. Can be overriden in feeds.cfg.
Ejemplo n.º 37
0
	    chain(keep_fds, (rpipe,)))
	with DaemonContext(detach_process=True,
	                   files_preserve=preserve_fds):
		# Blocking drain pipe until EOF
		while True:
			select((rpipe,), (), ())
			if not rpipe.read(1024):
				break
		# Resource cleanup will happen
		# through context manager exits.

parser = ArgumentParser(description=__doc__, prog=__package__)
parser.add_argument('--version', action='version',
                    version=('%(prog)s ' + __version__))
parser.add_argument('--base-directory', type=str,
                    default=save_cache_path('gitcache'))
parser.add_argument('--allow-update', type=bool, nargs='?',
                    default=True, const=True)
parser.add_argument('--disable-update',
                    dest='allow_update', action='store_false')
parser.add_argument('--setup-timeout', type=int)
parser.add_argument('--allow-autogc', type=bool, nargs='?',
                    default=False, const=True)
parser.add_argument('--disable-autogc',
                    dest='allow_autogc', action='store_false')
parser.add_argument('--allow-hardlinks', type=bool, nargs='?',
                    default=False, const=True)
parser.add_argument('--disable-hardlinks',
                    dest='allow_hardlinks', action='store_false')
subparsers = parser.add_subparsers()