Exemple #1
0
def list_all(config):
    '''
    This presents all the currently created restore points and snapshots.
    '''
    rps = sorted(m for m in paf.scan_dir(config['rp_paths'])[0]
                 if m.endswith('.meta'))
    sss = sorted(m for m in paf.scan_dir(config['ss_paths'])[0]
                 if m.endswith('.meta'))

    # Get Restore Point Data
    rps_data = list()
    for m in rps:
        num = m[-7] + m[-6]
        d = meta.read(config, m)
        rps_data.append(num + ' - Pkgs: ' + d['pkgs_installed'] +
                        ' Created: ' + d['date'])
    if not rps_data:
        rps_data.append('NONE')

    # Get Snapshot Data
    sss_data = list()
    for m in sss:
        num = m[-7] + m[-6]
        d = meta.read(config, m)
        sss_data.append(num + ' - Pkgs: ' + d['pkgs_installed'] +
                        ' Created: ' + d['date'])
    if not sss_data:
        sss_data.append('NONE')

    # Build Table
    t = Table(title='Pacback Restore Points and Snapshots')
    t.add_column('Restore Points', justify='left', style='white', no_wrap=True)
    t.add_column('Snapshots', justify='right', style='blue', no_wrap=True)

    # This Builds The Table Output Line by Line
    counter = 0
    for x in range(0, max(len(l) for l in [rps_data, sss_data])):
        try:
            a = str(rps_data[counter])
        except Exception:
            a = ''

        try:
            b = str(sss_data[counter])
        except Exception:
            b = ''

        t.add_row(a, b)
        counter += 1

    console = Console()
    console.print(t)
Exemple #2
0
	def get(self):
#		print ""
		t = test_user(self, VIEW)
	
		storage_name = self.request.get('storage_name')
		game_name = self.request.get('game_name')

#		print ""
#		print game_name

		desired = storage_key(storage_name)
		bases = Database.all().ancestor(desired)
#		print str(type(game_name))
		r = m.read(game_name)
#		print ""		
		
		b = get_database(bases, r[0], desired)

		game = f.to_html(m.game(str(b.content), r[1]))

		var_url = urllib.urlencode({'base_name':r[0], 'index':str(r[1])})
		board_url = "/board/?" + var_url
		edit_url = "/edit/?" + var_url

#		print game

		template_values = {'user_url':t[1], 'user_url_linktext':t[2],
				'main_page_url':MAIN_PAGE_URL, 'game':game,
				'board_url':board_url, 'edit_url':edit_url}
		path = os.path.join(os.path.dirname(__file__), t[0])

		self.response.out.write(template.render(path, template_values))
def make_html_page(metagamelist, bases):
	"""Turns a list of games (in their meta
	format) into an html page showing all
	of the games."""
	result = set([])
	for metagame in metagamelist:
		new = meta.read(metagame)
		for base in bases:
			if str(base.name) == new[0]:
				break
		result.add(meta.game(str(base.content), new[1]))
	return to_html(''.join(list(result)))
Exemple #4
0
def restore_point(config, id_num):
    '''
    This preps the system for a restoration then hands off to restore.main()
    '''
    id_num = str(id_num).zfill(2)
    fname = 'restore.restore_point(' + id_num + ')'
    paf.write_to_log(fname, 'Started Restoring Restore Point ID:' + id_num,
                     config['log'])

    info = {
        'id':
        id_num,
        'type':
        'rp',
        'TYPE':
        'Restore Point',
        'meta':
        config['rp_paths'] + '/rp' + id_num + '.meta',
        'meta_md5':
        config['rp_paths'] + '/.rp' + id_num + '.md5',
        'path':
        config['rp_paths'] + '/rp' + id_num,
        'pkgcache':
        config['rp_paths'] + '/rp' + id_num + '/pkg-cache',
        'tar':
        config['rp_paths'] + '/rp' + id_num + '/rp' + id_num + '_dirs.tar',
        'tar.gz':
        config['rp_paths'] + '/rp' + id_num + '/rp' + id_num + '_dirs.tar.gz'
    }

    # Read Meta File, Check Version, Compare Results
    meta.validate(config, info)
    rp_dict = meta.read(config, info['meta'])
    version.compare(config, rp_dict['version'])
    main(config, info, meta.compare_now(config, rp_dict))

    # Unpack and Compare Directories Stored By User
    if rp_dict['dir_list']:
        custom_dirs.restore(config, info, rp_dict['dir_list'],
                            rp_dict['tar_csum'])

    # Finish Last Checks and Exit
    utils.reboot_check(config)
    paf.write_to_log(fname, 'Finished Restoreing Restore Point ID:' + id_num,
                     config['log'])
Exemple #5
0
def read(filename, file=None):
    if filename.endswith('.zip'):
        import native
        return native.read(filename, file)
    elif filename.endswith('.mcl'):
        import mcell
        return mcell.read(filename, file)
    elif filename.endswith('.png'):
        import png
        return png.read(filename, file)
    elif filename.endswith('.fits'):
        import fits
        return fits.read(filename, file)
    elif filename.startswith('meta') and filename.endswith('txt'):
        import meta
        return meta.read(filename, file)
    else:
        raise ValueError, 'Do not understand how to read file "%s"' % filename
def searchgames(variablelist, valuelist, gamelist, bases):
	"""A generator for all the games in a list of files
	in which any given variable has given value
	(e.g. "White", "Dennis Bolshakov")"""
#	if filename.endswith(".pgn"):
#		f = open(directory + filename, "r")
#		desired = permute(variablelist, valuelist, format)
#		last = ""
#		newgame = False
#		worthy = False
#		for line in f:
#			if line[1] == "\n":
#				newgame = True
#			if newgame and line[0] == "[":
#				if worthy:
#					yield last
#				last = ""
#				newgame = False
#				worthy = False
#			if line in desired:
#				worthy = True
#			last += line
#		f.close()
#		if worthy:
#			yield last
#	f = open(directory + filename, "r")
	desired = permute(variablelist, valuelist, format)
	result = []
	for metagame in gamelist:
		new = meta.read(metagame)
		for base in bases:
			if str(base.name) == new[0]:
				break
		game = meta.game(str(base.content), new[1])
		if test(game, desired):
			result.append(metagame)
	return result
Exemple #7
0
def snapshot(config, id_num):
    '''
    This handles the process of restoring snapshots. This is pretty much the same as a
    standard restore point but requires post-processing after the restoration to maintain
    the order of changes made to the system.
    '''
    id_num = str(id_num).zfill(2)
    fname = 'restore.snapshot(' + id_num + ')'
    paf.write_to_log(fname, 'Started Restoring Snapshot ID:' + id_num,
                     config['log'])

    info = {
        'id': id_num,
        'type': 'ss',
        'TYPE': 'Snapshot',
        'meta': config['ss_paths'] + '/ss' + id_num + '.meta',
        'meta_md5': config['ss_paths'] + '/.ss' + id_num + '.md5',
        'path': config['ss_paths'] + '/ss' + id_num,
        'pkgcache': config['ss_paths'] + '/ss' + id_num + '/pkg-cache'
    }

    # Read Meta Data File, Check Version, Compare Results, Restore
    meta.validate(config, info)
    ss_dict = meta.read(config, info['meta'])
    version.compare(config, ss_dict['version'])
    main(config, info, meta.compare_now(config, ss_dict))

    # Resets Order So The Restored Version is Zero
    paf.write_to_log(fname, 'Started Rewinding Snapshots Back to Zero',
                     config['log'])

    # Removes Snapshots From Zero to Restored Snapshot ID
    for n in range(0, int(info['id'])):
        rm_info = {
            'id': str(n).zfill(2),
            'type': 'ss',
            'TYPE': 'Snapshot',
            'meta': config['ss_paths'] + '/ss' + str(n).zfill(2) + '.meta',
            'meta_md5': config['ss_paths'] + '/.ss' + str(n).zfill(2) + '.md5'
        }
        utils.remove_id(config, rm_info)

    # Shifts Snapshots Back, So Now Retored Snapshot Is New Zero
    id_counter = 0
    for n in range(int(info['id']), (config['max_ss'] + 1)):
        meta_path_old = config['ss_paths'] + '/ss' + str(n).zfill(2) + '.meta'
        meta_path_new = config['ss_paths'] + '/ss' + str(id_counter).zfill(
            2) + '.meta'
        hash_path_old = config['ss_paths'] + '/.ss' + str(n).zfill(2) + '.md5'
        hash_path_new = config['ss_paths'] + '/.ss' + str(id_counter).zfill(
            2) + '.md5'
        meta_found = os.path.exists(meta_path_old)
        csum_found = os.path.exists(hash_path_old)

        if meta_found and csum_found:
            os.rename(meta_path_old, meta_path_new)
            os.rename(hash_path_old, hash_path_new)
            id_counter += 1
        elif meta_found and not csum_found:
            paf.write_to_log(
                fname, 'Snapshot ' + str(n).zfill(2) +
                ' is Missing it\'s Checksum File!', config['log'])
            paf.rm_file(meta_path_old, sudo=False)
            paf.write_to_log(fname, 'Removed Snapshot ID:' + str(n).zfill(2),
                             config['log'])
        elif not meta_found and csum_found:
            paf.write_to_log(fname, hash_path_old + ' is an Orphaned Checksum',
                             config['log'])
            paf.rm_file(hash_path_old, sudo=False)
            paf.write_to_log(fname, 'Removed Orphaned Checksum', config['log'])
        else:
            pass

    paf.write_to_log(fname, 'Finished Rewinding Snapshots Back to Zero',
                     config['log'])

    # Finish Last Checks and Exit
    utils.reboot_check(config)
    paf.write_to_log(fname, 'Finished Restoring Snapshot ID:' + id_num,
                     config['log'])
Exemple #8
0
    source.generate(archives)
  return archives

def discover(meta, archives):
  for archive in archives.archives: 
    if not meta.contains(archive.meta_hash()):
      log.info("Adding archive meta (%s)" % archive)
      meta.add(archive.paths)

if __name__ == "__main__":
  top = boto.config.get('Ice', 'top')
  password = boto.config.get('Ice', 'password')
  bucket_name = boto.config.get('Ice', 'bucket')
  vault_name = boto.config.get('Ice', 'vault')

  s3 = meta.S3(password, bucket_name)
  meta = meta.Meta(s3)
  meta.read()
  archives = get_archive_set(top)
  discover(meta, archives)
  glacier = boto.connect_glacier()
  vault = glacier.create_vault(vault_name)
  for archive in archives.archives:
    log.info("Archive: %s" % archive)
    meta_hash = archive.meta_hash()
    sha1 = meta.get_sha1(meta_hash)
    archiver.upload_archive(vault, sha1, archive.paths, upload=True, dryRun=True)
  meta.write()

# EOF
Exemple #9
0
def clean_cache(config, nc):
    '''
    This provides automated cache cleaning using pacman, paccache, and pacback.
    '''
    fname = 'utils.clean_cache()'
    paf.prBold('Starting Advanced Cache Cleaning...')
    paf.write_to_log(fname, 'Starting Advanced Cache Cleaning...',
                     config['log'])
    print('')

    if nc is True or paf.yn_frame(
            'Do You Want To Uninstall Orphaned Packages?') is True:
        os.system('/usr/bin/pacman -R $(/usr/bin/pacman -Qtdq)')
        paf.write_to_log(fname, 'Removed Orphaned Packages', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Remove Old Versions of Installed Packages?'
    ) is True:
        os.system('/usr/bin/paccache -rk ' + str(config['keep_versions']))
        paf.write_to_log(fname, 'Removed Old Package Versions', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Remove Cached Orphans?') is True:
        os.system('/usr/bin/paccache -ruk0')
        paf.write_to_log(fname, 'Removed Cached Orphans', config['log'])

    if nc is True or paf.yn_frame(
            'Do You Want To Check For Old Pacback Restore Points?') is True:
        paf.write_to_log(fname, 'Starting Search For Old Restore Points...',
                         config['log'])
        meta_paths = sorted(f for f in paf.find_files(config['rp_paths'])
                            if f.endswith(".meta"))

        today = dt.datetime.now().strftime("%Y/%m/%d")
        t_split = (today.split('/'))
        today_dt = dt.date(int(t_split[0]), int(t_split[1]), int(t_split[2]))

        for m in meta_paths:
            rp_info = {
                'id':
                m[-7] + m[-6],
                'type':
                'rp',
                'TYPE':
                'Restore Point',
                'meta':
                m,
                'meta_md5':
                config['rp_paths'] + '/.rp' + m[-7] + m[-6] + '.md5',
                'path':
                config['rp_paths'] + '/rp' + m[-7] + m[-6],
                'pkgcache':
                config['rp_paths'] + '/rp' + m[-7] + m[-6] + '/pkg-cache'
            }

            # Format Dates for Compare
            m_dict = meta.read(config, m)
            o_split = (m_dict['date'].split('/'))
            old_dt = dt.date(int(o_split[0]), int(o_split[1]), int(o_split[2]))

            # Check How Old Restore Point Is
            days = (today_dt - old_dt).days
            if days > config['old_rp']:
                paf.prWarning('Failed: ' + rp_info['TYPE'] + ' ' +
                              rp_info['id'] + ' Is ' + str(days) +
                              ' Days Old!')
                paf.write_to_log(
                    fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' +
                    str(days) + ' Days Old!', config['log'])
                if paf.yn_frame('Do You Want to Remove This ' +
                                rp_info['TYPE'] + '?') is True:
                    utils.remove_id(config, rp_info)
                    paf.prSuccess('Restore Point Removed!')
                else:
                    paf.write_to_log(
                        fname, 'User Declined Removal of ' + rp_info['TYPE'] +
                        ' ' + rp_info['id'], config['log'])

            else:
                paf.prSuccess('Passed: ' + rp_info['TYPE'] + ' ' +
                              rp_info['id'] + ' Is ' + str(days) + ' Days Old')
                paf.write_to_log(
                    fname, rp_info['TYPE'] + ' ' + rp_info['id'] + ' Is ' +
                    str(days) + ' Days Old', config['log'])

    paf.write_to_log(fname, 'Finished Advanced Cache Cleaning', config['log'])
Exemple #10
0
def diff_meta(config, meta1, meta2):
    '''
    This function processes two meta data files without validating either.
    It will compare meta1 as base compared to meta2 then present the results in a table.
    The code is kind of gross but I'm not inclined to fix it.
    '''
    # Build Base Vars
    m1_num = meta1[2:].zfill(2)
    m2_num = meta2[2:].zfill(2)

    if meta1.startswith('rp'):
        m1_path = config['rp_paths'] + '/rp' + m1_num + '.meta'
    elif meta1.startswith('ss'):
        m1_path = config['ss_paths'] + '/ss' + m1_num + '.meta'

    if meta2.startswith('rp'):
        m2_path = config['rp_paths'] + '/rp' + m2_num + '.meta'
    elif meta2.startswith('ss'):
        m2_path = config['ss_paths'] + '/ss' + m2_num + '.meta'

    # Return if Missing
    if not os.path.exists(m1_path):
        return paf.prError(meta1.upper() + ' Was NOT Found!')

    if not os.path.exists(m2_path):
        return paf.prError(meta2.upper() + ' Was NOT Found!')

    # Read Meta Data
    m1 = meta.read(config, m1_path)
    m2 = meta.read(config, m2_path)
    compare = meta.compare_meta(config, m1, m2)

    # Build Info For Table
    c1 = [
        'Installed Packages: ' + m1['pkgs_installed'], 'Date: ' + m1['date'],
        'Time: ' + m1['time'], 'Pacback Version: ' + m1['version'],
        'User Label: ' + m1['label']
    ]

    if m1['stype'] == 'Full':
        c1.append('Packages Cached: ' + m1['pkgs_cached'])
        c1.append('Cache Size: ' + m1['cache_size'])

    if m1['dir_list']:
        c1.append('')
        c1.append('File Count: ' + m1['file_count'])
        c1.append('Raw File Size: ' + m1['file_raw_size'])
        c1.append('Compressed Size: ' + m1['tar_size'])
        c1.append('')
        c1.append('Directory List')
        c1.append('--------------')
        for d in m1['dir_list']:
            c1.append(d)

    c2 = list(compare['c_pkgs'])
    if not c2:
        c2.append('NONE')

    c3 = list(compare['a_pkgs'])
    if not c3:
        c3.append('NONE')

    c4 = list(compare['r_pkgs'])
    if not c4:
        c4.append('NONE')

    c5 = [
        'Installed Packages: ' + m2['pkgs_installed'], 'Date: ' + m2['date'],
        'Time: ' + m2['time'], 'Pacback Version: ' + m2['version'],
        'User Label: ' + m2['label']
    ]

    if m2['stype'] == 'Full':
        c5.append('Packages Cached: ' + m2['pkgs_cached'])
        c5.append('Cache Size: ' + m2['cache_size'])

    if m2['dir_list']:
        c5.append('')
        c5.append('File Count: ' + m2['file_count'])
        c5.append('Raw File Size: ' + m2['file_raw_size'])
        c5.append('Compressed Size: ' + m2['tar_size'])
        c5.append('')
        c5.append('Directory List')
        c5.append('--------------')
        for d in m2['dir_list']:
            c5.append(d)

    # Build Table
    t = Table(title=m1['type'] + ' #' + m1_num + ' --------> ' + m2['type'] +
              ' #' + m2_num)
    t.add_column(meta1.upper() + ' Meta Info',
                 justify='left',
                 style='bold white',
                 no_wrap=True)
    t.add_column('Changed Since Creation',
                 justify='center',
                 style='yellow',
                 no_wrap=True)
    t.add_column('Added Since Creation',
                 justify='center',
                 style='green',
                 no_wrap=True)
    t.add_column('Removed Since Creation',
                 justify='center',
                 style='red',
                 no_wrap=True)
    t.add_column(meta2.upper() + ' Meta Info',
                 justify='right',
                 style='bold white',
                 no_wrap=True)

    # This Builds The Table Output Line by Line
    counter = 0
    for x in range(0, max(len(l) for l in [c1, c2, c3, c4, c5])):
        try:
            a = str(c5[counter])
        except Exception:
            a = ''

        try:
            b = str(c2[counter])
        except Exception:
            b = ''

        try:
            c = str(c3[counter])
        except Exception:
            c = ''

        try:
            d = str(c4[counter])
        except Exception:
            d = ''

        try:
            e = str(c5[counter])
        except Exception:
            e = ''

        t.add_row(a, b, c, d, e)
        counter += 1

    console = Console()
    console.print(t)
Exemple #11
0
def print_info(config, selction):
    '''
    This function processes a meta data file without validating it,
    then compares the file to now and presents the results in a table.
    This acts as a 'dry run' of sorts not only showing info in the meta data
    file but also showing what would be changed if actually restored.
    The code is kind of gross but I'm not inclined to fix it.
    '''
    # Build Base Vars
    m_num = selction[2:].zfill(2)

    if selction.startswith('rp'):
        m_path = config['rp_paths'] + '/rp' + m_num + '.meta'
    elif selction.startswith('ss'):
        m_path = config['ss_paths'] + '/ss' + m_num + '.meta'

    # Return if Missing
    if not os.path.exists(m_path):
        return paf.prError(selction.upper() + ' Was NOT Found!')

    # Load Meta and Compare
    m = meta.read(config, m_path)
    compare = meta.compare_now(config, m)

    # Build Data For Table
    c1 = [
        'Installed Packages: ' + m['pkgs_installed'], 'Date: ' + m['date'],
        'Time: ' + m['time'], 'Pacback Version: ' + m['version'],
        'User Label: ' + m['label']
    ]

    if m['stype'] == 'Full':
        c1.append('Packages Cached: ' + m['pkgs_cached'])
        c1.append('Cache Size: ' + m['cache_size'])

    if m['dir_list']:
        c1.append('')
        c1.append('File Count: ' + m['file_count'])
        c1.append('Raw File Size: ' + m['file_raw_size'])
        c1.append('Compressed Size: ' + m['tar_size'])
        c1.append('')
        c1.append('Directory List')
        c1.append('--------------')
        for d in m['dir_list']:
            c1.append(d)

    c2 = list(compare['c_pkgs'])
    if not c2:
        c2.append('NONE')

    c3 = list(compare['a_pkgs'])
    if not c3:
        c3.append('NONE')

    c4 = list(compare['r_pkgs'])
    if not c4:
        c4.append('NONE')

    # Build Table
    t = Table(title=m['type'] + ' #' + m_num)
    t.add_column('Meta Info', justify='left', style='bold white', no_wrap=True)
    t.add_column('Changed Since Creation',
                 justify='center',
                 style='yellow',
                 no_wrap=True)
    t.add_column('Added Since Creation',
                 justify='center',
                 style='green',
                 no_wrap=True)
    t.add_column('Removed Since Creation',
                 justify='center',
                 style='red',
                 no_wrap=True)

    # This Builds The Table Output Line by Line
    counter = 0
    for x in range(0, max(len(l) for l in [c1, c2, c3, c4])):
        try:
            a = str(c1[counter])
        except Exception:
            a = ''

        try:
            b = str(c2[counter])
        except Exception:
            b = ''

        try:
            c = str(c3[counter])
        except Exception:
            c = ''

        try:
            d = str(c4[counter])
        except Exception:
            d = ''

        t.add_row(a, b, c, d)
        counter += 1

    console = Console()
    console.print(t)