Ejemplo n.º 1
0
def call_repo_sync():
    if not reposadocommon.pref('LocalCatalogURLBase'):
        download_packages = False
    else:
        download_packages = True
    fast_scan = True
    repo_sync.sync(fast_scan, download_packages)
    return jsonify(result='success')
Ejemplo n.º 2
0
def get_packages_urls(p):
    if not p.has_key('Packages'):
        return None
    packages = p['Packages']
    if len(packages) == 0:
        return None
    
    out=''
    for p in packages:
        out+="<a href='%s/%s'>%s</a> (%.2f Mb), " % (reposadocommon.pref('LocalCatalogURLBase'),p['URL'][p['URL'].index('/',9)+1:], p['URL'].split('/')[-1], p['Size']/1024./1024)
    
    return out[:-2]
Ejemplo n.º 3
0
def get_packages_urls(p):
    if not p.has_key('Packages'):
        return None
    packages = p['Packages']
    if len(packages) == 0:
        return None

    out = ''
    for p in packages:
        out += "<a href='%s/%s'>%s</a> (%.2f Mb), " % (
            reposadocommon.pref('LocalCatalogURLBase'),
            p['URL'][p['URL'].index('/', 9) + 1:], p['URL'].split('/')[-1],
            p['Size'] / 1024. / 1024)

    return out[:-2]
Ejemplo n.º 4
0
def delete_branch(branchname):
    catalog_branches = reposadocommon.getCatalogBranches()
    if not branchname in catalog_branches:
        reposadocommon.print_stderr("Branch %s does not exist!", branchname)
        return

    del catalog_branches[branchname]

    # this is not in the common library, so we have to duplicate code
    # from repoutil
    for catalog_URL in reposadocommon.pref("AppleCatalogURLs"):
        localcatalogpath = reposadocommon.getLocalPathNameFromURL(catalog_URL)
        # now strip the '.sucatalog' bit from the name
        if localcatalogpath.endswith(".sucatalog"):
            localcatalogpath = localcatalogpath[0:-10]
        branchcatalogpath = localcatalogpath + "_" + branchname + ".sucatalog"
        if os.path.exists(branchcatalogpath):
            reposadocommon.print_stdout("Removing %s", os.path.basename(branchcatalogpath))
            os.remove(branchcatalogpath)

    reposadocommon.writeCatalogBranches(catalog_branches)

    return jsonify(result=True)
Ejemplo n.º 5
0
def delete_branch(branchname):
    catalog_branches = reposadocommon.getCatalogBranches()
    if not branchname in catalog_branches:
        reposadocommon.print_stderr('Branch %s does not exist!', branchname)
        return

    del catalog_branches[branchname]

    # this is not in the common library, so we have to duplicate code
    # from repoutil
    for catalog_URL in reposadocommon.pref('AppleCatalogURLs'):
        localcatalogpath = reposadocommon.getLocalPathNameFromURL(catalog_URL)
        # now strip the '.sucatalog' bit from the name
        if localcatalogpath.endswith('.sucatalog'):
            localcatalogpath = localcatalogpath[0:-10]
        branchcatalogpath = localcatalogpath + '_' + branchname + '.sucatalog'
        if os.path.exists(branchcatalogpath):
            reposadocommon.print_stdout(
                'Removing %s', os.path.basename(branchcatalogpath))
            os.remove(branchcatalogpath)

    reposadocommon.writeCatalogBranches(catalog_branches)
    
    return jsonify(result=True);
Ejemplo n.º 6
0
Note that this script requires write permission in the UpdatesRootDir
directory of your reposado. This is so that it can write the tracking file
that is used between runs to compute the difference in update lists.

Note also it needs access to the "reposadocommon" Python library. This means
you probably want to place it in the "code" directory of your reposado
installation.
'''

from reposadolib import reposadocommon
from sys import exit
import os
import pickle

root_dir = reposadocommon.pref('UpdatesRootDir')
state_file = os.path.join(root_dir, 'reponotify.pickle')

products = reposadocommon.getProductInfo()

cur_apple_prods = []

for prod_id in products.keys():
    if len(products[prod_id]['AppleCatalogs']) > 0:
        cur_apple_prods.append(prod_id)

try:
    with open(state_file, 'rb') as pF:
        prev_apple_prods = pickle.load(pF)
except:
    # if there's a problem reading the file then assume we're "up to date" by