Пример #1
0
def extract_config():

    def git_sha(base=''):
        try:
            return str(sh.git('rev-parse', 'HEAD', _cwd=base)).strip()
        except Exception:
            return 'NA'

    config = c = {}

    versions = v = {}
    v['bloscpack'] = bp.__version__
    v['blosc']     = blosc.__version__
    v['numpy']     = np.__version__
    v['joblib']    = jb.__version__
    v['tables']    = tables.__version__
    v['conda']     = str(sh.conda('--version', _tty_in=True)).strip()
    v['python']    = str(sh.python('--version', _tty_in=True)).strip()

    hashes = h = {}
    h['bloscpack'] = git_sha(os.path.dirname(bp.__file__))
    h['joblib'] = git_sha(jb.__path__[0])
    h['blosc']  = git_sha(blosc.__path__[0])
    h['numpy'] = git_sha(np.__path__[0])
    h['tables']  = git_sha(tables.__path__[0])
    h['benchmark'] = git_sha()

    c['uname'] = str(sh.uname('-a')).strip()
    c['hostname'] = str(sh.hostname()).strip()
    c['whoami'] = str(sh.whoami()).strip()
    c['date'] = str(sh.date()).strip()

    c['versions'] = versions
    c['hashes'] = hashes
    return config
Пример #2
0
def extract_config():

    def git_sha(base=''):
        try:
            return str(sh.git('rev-parse', 'HEAD', _cwd=base)).strip()
        except Exception:
            return 'NA'

    config = c = {}

    versions = v = {}
    v['bloscpack'] = bp.__version__
    v['numpy']     = np.__version__
    v['joblib']    = jb.__version__
    v['conda']     = str(sh.conda('--version', _tty_in=True)).strip()
    v['python']     = str(sh.python('--version', _tty_in=True)).strip()

    hashes = h = {}
    h['bloscpack'] = git_sha(os.path.dirname(bp.__file__))
    h['joblib'] = git_sha(jb.__path__[0])
    h['numpy'] = git_sha(np.__path__[0])
    h['benchmark'] = git_sha()

    c['uname'] = str(sh.uname('-a')).strip()
    c['hostname'] = str(sh.hostname()).strip()
    c['whoami'] = str(sh.whoami()).strip()
    c['date'] = str(sh.date()).strip()

    c['versions'] = versions
    c['hashes'] = hashes
    return config
Пример #3
0
def __quit_handler(signum, frame):
    log("something wrong happened %d" % signum)
    os.unlink(pidfile)
    f = open(outputfile, "wb+")
    f.write(TmuxBar.parse("red", sh.date().strip(), "✗").encode('utf-8'))
    f.close()

    sys.exit()
Пример #4
0
    def do(self, id=None):
        """Actually do the task."""

        with self.redirected_output(append=False):
            self._pre_do(id=id)
            print_(self)
            print_("TASK STARTED")
            # Add a timestamp to the beginning of the output
            sh.date(_out=sys.stdout)

            try:
                success = self._do()
            except Exception as e:
                # Something went wrong
                self._post_do(state='FAILED', id=id)
                print_("TASK FAILED")
                # Add a timestamp to the end of the output
                sh.date(_out=sys.stdout)
                print_(e)
                raise

            if success:
                self._post_do(state='DONE', id=id)
                print_("TASK DONE")
            else:
                self._post_do(state='FAILED', id=id)
                print_("TASK FAILED")
            # Add a timestamp to the end of the output
            sh.date(_out=sys.stdout)

        return success
Пример #5
0
def speedoptimize(iterations, purturb_count, fen_count, sigma, pgn_file):
    """Optimizes the search parameters for speed only.
    Will overwrite params.h--that's why we have version control!
    """

    random_fens = init_fen_file(pgn_file, iterations * fen_count)

    paramset = ParamSet("../src/params.h")
    for n in xrange(iterations):
        print "Iteration %d: %s" % (n, sh.date())
        with open(".temp_fens", "w") as f:
            for line in random_fens[n * fen_count : (n+1) * fen_count]:
                f.write(line + '\n')
        paramset = speed_iteration(paramset, purturb_count, sigma)

    cleanup()
Пример #6
0
def gen_sxhkdrc(src):
    if not os.path.exists(sxhkdrc):
        return
    with open(sxhkdrc, "w") as outfile:
        outfile.write("# DO NOT EDIT!\n")
        outfile.write(f"# Generated by %s at %s\n" %
                      (__file__, sh.date().strip()))
        for b in src:
            b = Binding(b)
            if b.doc is not None:
                outfile.write("# %s\n" % b.doc)
            outfile.write("%s\n\t%s\n\n" % (b.keys, b.cmd))
    try:
        sh.pkill("-USR1", "-x", "sxhkd")
    except:
        pass
Пример #7
0
def get_snapdate(snap_naming_date_format='', snap_date=''):
    """get todays date in iso format, this can run on either node
    """
    logger = logs.get_logger()
    if not snap_naming_date_format:
        snap_naming_date_format = settings.SNAP_NAMING_DATE_FORMAT
    if not snap_date:
        snap_date = settings.SNAP_DATE
    try:
        converted_snap_date = sh.date('+%s' % snap_naming_date_format,
                                      date=snap_date).strip('\n')
    except sh.ErrorReturnCode as e:
        if e.exit_code == 1:
            raise (exceptions.SnapDateNotValidDateError(
                snap_date=snap_date, date_format=snap_naming_date_format, e=e))
        else:
            raise
    return converted_snap_date
Пример #8
0
def make_feed(feed: FeedGenerator, blogs: list[Path]):
    feed.title("Anurudh's Blog")
    feed.description("Navigating the manifold of computing")
    feed.author(name='Anurudh Peduri')
    feed.language('en')

    SITE_PATH = "https://anurudhp.github.io/"
    feed.link(href=SITE_PATH)
    feed.id(SITE_PATH)

    for blog in blogs:
        metadata = read_metadata(blog)
        if metadata is not None:
            logging.info(f'Adding: {metadata["title"]}')
            entry = feed.add_entry()
            loc = SITE_PATH + str(blog).replace('.md', '.html')
            entry.id(loc)
            entry.title(metadata['title'])
            entry.link(href=loc)
            pubDate = sh.date('-R', '-d', metadata['created']).strip()
            entry.pubDate(pubDate)
            entry.author(email='Anurudh Peduri')
Пример #9
0
def args_process_show(in_args):
    global g_conn
    password_name = str(in_args.__getattribute__("pass-name"))
    if not store_password_exists(password_name):
        stderr_out("Error: %s is not in the password store.\n" % password_name)
    else:

        def _str(val):
            return val if val is not None else ""

        if in_args.history:
            op_cnt = int(
                sqlite_get_one_value(
                    """SELECT count(*)
                                                 FROM   passwords WHERE
                                                        password_name = ? AND
                                                        group_id = 1""",
                    [password_name]))
            if op_cnt > 1:
                print("Decrypting passwords...")
            table = PrettyTable(
                ["Current", "Password", "Created at", "Login", "Description"])
            console_progress_bar(0, op_cnt)
            for rec in enumerate(g_conn.execute(
                    """SELECT deleted_bool, created_at, login, description, encrypted_value
                                                   FROM   passwords
                                                   WHERE  password_name = ? AND
                                                          group_id = 1
                                                   ORDER BY password_id DESC""",
                [password_name]),
                                 start=1):
                row = rec[1]
                table.add_row([
                    "[x]" if row[0] == 0 else "",
                    gpg_decrypt(row[4]),
                    _str(row[1]),
                    _str(row[2]),
                    _str(row[3])
                ])
                console_progress_bar(rec[0], op_cnt)
            print(table)
        elif in_args.full:
            table = PrettyTable(
                ["Password", "Created at", "Login", "Description"])
            for row in g_conn.execute(
                    """SELECT created_at, login, description, encrypted_value
                                         FROM   passwords
                                         WHERE  password_name = ? AND
                                                deleted_bool = 0 AND
                                                group_id = 1""",
                [password_name]):
                table.add_row([
                    gpg_decrypt(row[3]),
                    _str(row[0]),
                    _str(row[1]),
                    _str(row[2])
                ])
            print(table)
        else:
            decrypted_password = store_get_password(password_name)
            if in_args.clip:
                p = subprocess.Popen(["xclip", "-d", ":0", "-selection", "c"],
                                     stdin=subprocess.PIPE,
                                     close_fds=True)
                p.communicate(input=decrypted_password.encode("utf-8"))
                exec_at = str(date("+%Y%m%d%H%M.%S",
                                   date="now +45 seconds")).strip("\n")
                at(printf("printf '' | xclip -d :0 -selection c"), "-t",
                   exec_at)
                print("Copied %s to clipboard. Will clear in 45 seconds." %
                      password_name)
            else:
                print(decrypted_password)
Пример #10
0
def test_sh_date():
    '''Some lame test that nose can pick up.'''
    date_out = date()
    assert date_out.exit_code == 0
Пример #11
0
        for filename in os.listdir(os.getcwd()):
            file_path = os.path.join(os.getcwd(), filename)
            if os.path.isfile(file_path) or os.path.islink(file_path):
                os.unlink(file_path)
            elif os.path.isdir(file_path):
                rmtree(file_path)
    else:
        error(f"""
You requested a clean build, but this script thinks that {os.getcwd()} isn't
the build directory. Please contact John Freeman at [email protected] and notify him of this message.
        """)


stringio_obj2 = io.StringIO()
sh.date(_out=stringio_obj2)
datestring=re.sub("[: ]+", "_", stringio_obj2.getvalue().strip())

build_log=f"{LOGDIR}/build_attempt_{datestring}.log"

cmake="cmake"
if args.cmake_trace:
    cmake = f"{cmake} --trace"

# We usually only need to explicitly run the CMake configure+generate
# makefiles stages when it hasn't already been successfully run;
# otherwise we can skip to the compilation. We use the existence of
# CMakeCache.txt to tell us whether this has happened; notice that it
# gets renamed if it's produced but there's a failure.

running_config_and_generate=False
Пример #12
0
HOME = Path.home()
HOME = Path(os.path.expanduser("~bn"))
DATA_DIR = Path.joinpath(Path(HOME), 'data', 'backup', 'mysql', 'data')
LOG_DIR = Path.joinpath(Path(HOME), 'data', 'backup', 'mysql', 'logs')
print(DATA_DIR)

#cmd = "find ${DATA_DIR} -maxdepth 1 -type d -regextype sed -regex \"^\/.*${DB}\-[0-9].*\" -printf '%Ts\t%p\n' | sort -n | head -n -2 | cut -f 2- | xargs rm -rf"

# a =`echo "show databases;" | mysql `
DATA_BASES = sh.mysql(sh.echo('show databases;'))
DATA_BASES = [el.strip() for el in DATA_BASES]
DATA_BASES = DATA_BASES[
    1:]  # first entry is 'Database' which is not a Database
DATA_BASES += ['All-Databases']
DATA_BASES = ['trading_oanda_d1']
DATESTAMP = sh.date("+%Y-%m-%d_%H:%M").strip()

for DB in DATA_BASES:
    for DD in [DATA_DIR, LOG_DIR]:
        # step a): delete all except the latest two files for each database
        print(f'database: {DB}; dir: {DD}')
        a = sh.find(DATA_DIR, '-maxdepth', '1', '-type', 'f', '-regextype',
                    'sed', '-regex', f'^/.*{DB}\-[0-9].*', '-printf',
                    '%Ts\t%p\n')
        b = sh.sort(a, '-n')
        c = sh.head(b, '-n', '-2')
        d = sh.cut(c, '-f', '2-')
        print(d.strip())
        e = sh.xargs(d, 'rm', '-rf')

    # step b): export the databases
Пример #13
0
#!/usr/bin/python3

import sh

date = sh.date()

date_split = date.split(' ')
before = date_split[0:3]
time = date_split[3]
after = date_split[4:]

time_split = time.split(':')
hour = int(time_split[0])
if hour > 12:
    time_split = time_split[1:]
    time_split.insert(0, str(hour - 12))
    new_time = ':'.join(time_split)
    date_split[3] = new_time
    print(' '.join(date_split))
else:
    print(time)
Пример #14
0
# Get a push code following this: https://jokerqyou.github.io/ethbot
PUSH_CODE = 'xxxxxx'
# Add the directories you want to backup
DIRECTORIES = (
    '/important/data',
    '/home/user/secret/data',
)
# Add the directories or patterns you want to exclude
EXCLUDES = (
    '*.pyc',
    '*.swp',
    '/i/dont/care/about/this/data',
    '/home/Ben/Music/Justin\ Bieber',
)
HOSTNAME = hostname().strip()
DATE = date('+%Y-%m-%d').strip()


def backup(*directories, **kwargs):
    '''
    Backup a directory using borg
    '''
    directories = [d for d in directories if os.path.exists(d)]
    repository = '{}::{}-{}'.format(BORG_REPOSITORY, HOSTNAME, DATE)
    excludes = kwargs.pop('excludes', [])
    excludes = [excludes, ]\
        if not isinstance(excludes, (list, tuple, ))\
        else excludes
    arguments = ['--stats', '--compression', 'zlib,5', repository, ]
    arguments.extend(directories)
    [arguments.extend(['--exclude', ex]) for ex in excludes]
Пример #15
0
def ceph_rsnapshot():
    # if __name__=='__main__':
    parser = argparse.ArgumentParser(
        description=
        'wrapper script to backup a ceph pool of rbd images to qcow',
        argument_default=argparse.SUPPRESS)
    parser.add_argument("-c",
                        "--config",
                        required=False,
                        help="path to alternate config file")
    parser.add_argument("--host",
                        required=False,
                        help="ceph node to backup from")
    parser.add_argument('-p',
                        '--pools',
                        help='comma separated list of'
                        'ceph pools to back up (can be a single pool)',
                        required=False)
    parser.add_argument('--image_re',
                        required=False,
                        help='RE to match images to back up')
    parser.add_argument("-v",
                        "--verbose",
                        action='store_true',
                        required=False,
                        help="verbose logging output")
    parser.add_argument(
        "--noop",
        action='store_true',
        required=False,
        help=
        "noop - don't make any directories or do any actions. logging only to stdout"
    )
    parser.add_argument("--no_rotate_orphans",
                        action='store_true',
                        required=False,
                        help="don't rotate the orphans on the dest")
    parser.add_argument("--printsettings",
                        action='store_true',
                        required=False,
                        help="print out settings using and exit")
    parser.add_argument("-k",
                        "--keepconf",
                        action='store_true',
                        required=False,
                        help="keep conf files after run")
    parser.add_argument(
        "-e",
        "--extralongargs",
        required=False,
        help="extra long args for rsync of format foo,bar for arg --foo --bar")
    # TODO add param options:
    # to show names on source only
    args = parser.parse_args()

    # if we got passed an alt config file path, use that
    if args.__contains__('config'):
        config_file = args.config
        settings.load_settings(config_file)
    else:
        settings.load_settings()

    # override global settings with cli args
    # TODO get this working this way
    # for key in args.__dict__.keys():
    #   etc
    if args.__contains__('host'):
        settings.CEPH_HOST = args.host
    if args.__contains__('pools'):
        settings.POOLS = args.pools
    if args.__contains__('verbose'):
        settings.VERBOSE = args.verbose
    if args.__contains__('noop'):
        settings.NOOP = args.noop
    if args.__contains__('keepconf'):
        settings.KEEPCONF = args.keepconf
    if args.__contains__('extralongargs'):
        settings.EXTRA_ARGS = ' '.join(
            ['--' + x for x in args.extralongargs.split(',')])
        # FIXME not working correctly
    # image_filter = args.image_filter
    if args.__contains__('image_re'):
        settings.IMAGE_RE = args.image_re
    if args.__contains__('no_rotate_orphans'):
        settings.NO_ROTATE_ORPHANS = args.no_rotate_orphans

    logger = logs.setup_logging()
    logger.info("starting ceph_rsnapshot")
    logger.debug("launched with cli args: " + " ".join(sys.argv))

    try:
        helpers.validate_settings_strings()
    except NameError as e:
        logger.error('error with settings strings: %s' % e)
        sys.exit(1)

    # print out settings using and exit
    if args.__contains__('printsettings'):
        # generate SNAP_DATE for printsettings
        if settings.USE_SNAP_STATUS_FILE:
            settings.SNAP_DATE = 'TBD from SNAP_STATUS_FILE'
        else:
            # convert snap_date (might be relative) to an absolute date
            # so that it's only computed once for this entire run
            settings.SNAP_DATE = sh.date(date=settings.SNAP_DATE).strip('\n')
        # if it's there it's true
        logger.info('settings would have been:\n')
        logger.info(json.dumps(helpers.get_current_settings(), indent=2))
        logger.info('exiting')
        sys.exit(0)

    # write lockfile
    # TODO do this per ceph host or per ceph cluster
    # http://stackoverflow.com/a/789383/5928049
    pid = str(os.getpid())
    pidfile = "/var/run/ceph_rsnapshot_cephhost_%s.pid" % settings.CEPH_HOST
    if os.path.isfile(pidfile):
        logger.error("pidfile %s already exists, exiting" % pidfile)
        sys.exit(1)
    logger.info("writing lockfile at %s" % pidfile)
    file(pidfile, 'w').write(pid)

    logger.debug('running with settings:\n')
    logger.debug(json.dumps(helpers.get_current_settings(), indent=2))

    try:
        # we've made the lockfile, so rsnap the pools
        # clear this so we know if run worked or not
        all_result = {}

        # check if we have been passed SNAP_STATUS_FILE
        if settings.USE_SNAP_STATUS_FILE:
            try:
                settings.SNAP_DATE = ceph.check_snap_status_file()
                logger.info('using snap date %s' % settings.SNAP_DATE)
            except exceptions.NoSnapStatusFilesFoundError as e:
                e.log(warn=True)
                raise
            except exceptions.SnapDateNotValidDateError as e:
                e.log()
                raise
            except exceptions.SnapDateFormatMismatchError as e:
                e.log()
                raise
            except Exception as e:
                logger.exception(e)
                raise
        # convert snap_date (might be relative) to an absolute date
        # so that it's only computed once for this entire run
        # FIXME does this need snap naming format
        settings.SNAP_DATE = sh.date(date=settings.SNAP_DATE).strip('\n')

        # iterate over pools
        pools_csv = settings.POOLS
        pools_arr = pools_csv.split(',')
        for pool in pools_arr:
            logger.info('working on pool "%s"' % pool)
            if len(pool) == 0:
                logger.error('empty pool name, skipping')
                continue

            # store this pool in settings for other functions to access it
            settings.POOL = pool

            # setup directories for this pool
            dirs.setup_log_dirs_for_pool(pool)
            dirs.setup_temp_conf_dir_for_pool(pool)
            dirs.setup_backup_dirs_for_pool(pool)

            # connect to ceph node and setup qcow export path
            dirs.setup_qcow_temp_path(pool)

            try:
                # TODO pass args here instead of in settings?
                pool_result = rsnap_pool(pool)
                # now append to all_result
                for key in pool_result:
                    # they are all arrays so append
                    # but we need to make the array first if not yet there
                    if not all_result.has_key(key):
                        all_result[key] = []
                    # now append
                    all_result[key].extend(pool_result[key])
            except NameError as e:
                # TODO get some way to still have the list of images that
                # it completed before failing
                logger.error('rsnap pool %s failed error: %s' % (pool, e))
                logger.exception(e)
            except Exception as e:
                logger.error('error with pool %s' % pool)
                logger.exception(e)
            logger.info('done with pool %s' % pool)
            if not settings.KEEPCONF:
                dirs.remove_temp_conf_dir()

        # successful, so clean out snap dir
        snap_date = ceph.get_snapdate(snap_date=settings.SNAP_DATE)
        logger.info('removing snap_status file for snap_date %s on ceph host' %
                    snap_date)
        ceph.remove_snap_status_file(snap_date=snap_date)

        # write output
        successful_images = [('%s/%s' % (image['pool'], image['image']))
                             for image in all_result['successful']]
        logger.info('Successfully backed up images: ' +
                    ', '.join(successful_images))
        if all_result['failed']:
            logger.error("Images failed to back up:")
            failed_images_status = [{
                "%s/%s" % (image_hash['pool'], image_hash['image']):
                [{
                    check: False
                } for check in image_hash['status']
                 if image_hash['status'][check] is False]
            } for image_hash in all_result['failed']]
            logger.error(failed_images_status)
        if all_result['orphans_rotated']:
            logger.info("orphans rotated:")
            logger.info(all_result['orphans_rotated'])
        if all_result['orphans_failed_to_rotate']:
            logger.error("orphans failed to rotate:")
            logger.error(all_result['orphans_failed_to_rotate'])
        write_status(all_result)
        logger.info("done")
    finally:
        # done with this pool so clear the pidfile
        logger.info("removing lockfile at %s" % pidfile)
        os.unlink(pidfile)

        if settings.NOOP:
            logger.info("end of NOOP run")

        # TODO should these still sys.exit or should they let the exceptions
        # go?
        if all_result:
            if all_result['failed']:
                sys.exit(1)
            elif all_result['orphans_failed_to_rotate']:
                sys.exit(2)
            else:
                sys.exit(0)
        else:
            exit(3)
Пример #16
0
def test_sh_date():
    '''Some lame test that nose can pick up.'''
    date_out = date()
    assert date_out.exit_code == 0
Пример #17
0
def createFilename(delta=0):
    timestamp = sh.date("--iso-8601=seconds", date="%s seconds" % delta).stdout.strip()
    return "yousee-epg_%s.xml" % timestamp