Esempio n. 1
0
 def resolve(self, name):
     global endpoint_url, put
     endpoint = endpoint_url + put
     a = list(filter(lambda x: x in name, self.applist))
     if len(a) > 0:
         self.name = a[0]
         self.resolve_version(name)
         if self.name == "eclipse":
             self.url = "http://archive.eclipse.org/technology/epp/downloads/release/helios/SR2/eclipse-jee-helios-SR2-macosx-cocoa-x86_64.tar.gz"
             if str(raw_input(
                     'Do you want to add plugins to the eclipse folder?(Y/n)')).lower() == "y":
                 s = str(raw_input(
                     "Enter the plugin zip URLs separated by commas (,):"))
                 if len(s) > 0:
                     self.extras["plugins"] = s
         params = {"name": self.name, "version": self.version,
                   "os": utils.get_os(), "arch": utils.get_arch(),
                   "url": self.url}
         params = dict((k, v) for k, v in params.items() if v.lower() != 'n')
         response = requests.get(
             "%s%s" % (endpoint, urllib.urlencode(params)))
     else:
         conf_file = os.path.join(os.getcwd(), name, SOFTWARE_CONFIG_NAME)
         if os.path.isfile(conf_file):
             con = fetch_conf_dict(conf_file)
             self.is_custom = True
             self.custom_conf = con
             self.name = con['name']
             self.version = con['version']
             self.url = con['url']
             self.cmd = con['cmd']
             add_custom_soft_conf(con)
             params = {"name": self.name, "version": self.version,
                       "os": utils.get_os(), "arch": utils.get_arch(),
                       "command": self.cmd, "url": self.url,
                       "tag": con.get('tag', 'n')}
             params = dict(
                 (k, v) for k, v in params.items() if v.lower() != 'n')
             response = requests.get(
                 "%s%s" % (endpoint, urllib.urlencode(params)))
         else:
             oscm.call_command('addsoftware')
             result = None
             with open("my_file", 'r') as f:
                 try:
                     result = json.load(f)
                 # if the file is empty the ValueError will be thrown
                 except ValueError:
                     result = {}
             self.is_custom = True
             self.name = result['name']
             self.version = result['version']
             self.url = result.get('url', '')
             self.cmd = result.get('cmd', '')
Esempio n. 2
0
    def __init__(self, executable):
        """ init function

        Args:
            executable: target executable file path
        """
        self.logger = LogUtil.get_logger()
        self.executable = executable
        self.elf = getELF(executable)
        self.arch = get_arch(self.elf)

        self.functions = {}
        self.addr2func = {}
        self.get_func()

        self.signs = {}
        self.explored_addr = []

        self.matched = {}

        self.sign_cache_file = "/tmp/" + md5(executable) + "_sign.cache"
        if os.path.exists(self.sign_cache_file):
            with open(self.sign_cache_file) as f:
                data = f.read()
                try:
                    self.signs = json.loads(data)
                except Exception as e:
                    self.logger.error(str(e))
Esempio n. 3
0
def dpkg_infofunc():
    debarch = utils.get_arch()
    if debarch:
        utsmachine = os.uname()[4]
        if utsmachine == debarch:
            debinfo = u'Architecture: %s\n\n' % debarch
        else:
            debinfo = u'Architecture: %s (%s)\n\n' % (debarch, utsmachine)
    else:
        debinfo = u'Architecture: ? (%s)\n\n' % utsmachine
    return debinfo
Esempio n. 4
0
def dpkg_infofunc():
    debarch = utils.get_arch()
    utsmachine = os.uname()[4]
    multiarch = utils.get_multiarch()
    if debarch:
        if utsmachine == debarch:
            debinfo = u'Architecture: %s\n' % debarch
        else:
            debinfo = u'Architecture: %s (%s)\n' % (debarch, utsmachine)
    else:
        debinfo = u'Architecture: ? (%s)\n' % utsmachine
    if multiarch:
        debinfo += u'Foreign Architectures: %s\n' % multiarch
    debinfo += '\n'
    return debinfo
Esempio n. 5
0
def dpkg_infofunc():
    debarch = utils.get_arch()
    utsmachine = os.uname()[4]
    multiarch = utils.get_multiarch()
    if debarch:
        if utsmachine == debarch:
            debinfo = u'Architecture: %s\n' % debarch
        else:
            debinfo = u'Architecture: %s (%s)\n' % (debarch, utsmachine)
    else:
        debinfo = u'Architecture: ? (%s)\n' % utsmachine
    if multiarch:
        debinfo += u'Foreign Architectures: %s\n' % multiarch
    debinfo += '\n'
    return debinfo
Esempio n. 6
0
def get_versions_available(package, timeout, dists=None, http_proxy=None, arch='i386'):
    if not dists:
        dists = ('oldstable', 'stable', 'testing', 'unstable', 'experimental')

    arch = utils.get_arch()

    url = RMADISON_URL % package
    url += '&s=' + ','.join(dists)
    # select only those lines that refers to source pkg
    # or to binary packages available on the current arch
    url += '&a=source,all,' + arch
    try:
        page = open_url(url)
    except NoNetwork:
        return {}
    except urllib2.HTTPError, x:
        print >> sys.stderr, "Warning:", x
        return {}
Esempio n. 7
0
def main(args):
    model = utils.get_arch(arch=args.arch, dataset=args.dataset)
    criterion = torch.nn.CrossEntropyLoss()

    state_dict = torch.load(args.resume_path, map_location=torch.device('cpu'))
    model.load_state_dict( state_dict['model_state_dict'] )
    del state_dict

    if not args.cpu:
        model.cuda()
        criterion = criterion.cuda()

    attacker = utils.PGDAttacker(
        radius = args.pgd_radius,
        steps = args.pgd_steps,
        step_size = args.pgd_step_size,
        random_start = True,
        norm_type = args.pgd_norm_type,
    )

    if args.grad_opt == 'get-mu':
        ''' calculate grad_mu '''
        train_loader = utils.get_loader(
            args.dataset, args.batch_size, train=True, training=False)

        grad_mu = get_grad_mu(
            model, criterion, train_loader, attacker, args.cpu)

        with open('{}/{}-mu.pkl'.format(args.save_dir, args.eval_save_name), 'wb') as f:
            pickle.dump(grad_mu, f)

    elif args.grad_opt == 'get-noise':
        ''' calculate grad noise '''
        with open(args.resume_grad_mu_path, 'rb') as f:
            grad_mu = pickle.load(f)
        train_loader = utils.get_loader(
            args.dataset, args.batch_size, train=True, training=True)

        grad_noise = get_grad_noise(
            model, criterion, train_loader, attacker, grad_mu,
            args.grad_rep_T, args.grad_samp_T, args.cpu)

        with open('{}/{}-noise.pkl'.format(args.save_dir, args.eval_save_name), 'wb') as f:
            pickle.dump(grad_noise, f)
Esempio n. 8
0
def check_built(src_package, timeout, arch=None, http_proxy=None):
    """Return True if built in the past, False otherwise (even error)"""
    if not arch:
        arch = utils.get_arch()

    try:
        page = open_url(BUILDD_URL % (arch, src_package), http_proxy, timeout)
    except NoNetwork:
        return False

    if not page:
        return False

    parser = BuilddParser()
    parser.feed(page.read())
    parser.close()
    page.close()

    return parser.found_succeeded
Esempio n. 9
0
def check_built(src_package, timeout, arch=None, http_proxy=None):
    """Return True if built in the past, False otherwise (even error)"""
    if not arch:
        arch = utils.get_arch()

    try:
        page = open_url(BUILDD_URL % (arch, src_package), http_proxy, timeout)
    except NoNetwork:
        return False

    if not page:
        return False

    parser = BuilddParser()
    parser.feed(page.read())
    parser.close()
    page.close()

    return parser.found_succeeded
Esempio n. 10
0
def get_versions_available(package,
                           timeout,
                           dists=None,
                           http_proxy=None,
                           arch='i386'):
    if not dists:
        dists = ('oldstable', 'stable', 'testing', 'unstable', 'experimental')

    arch = utils.get_arch()

    url = RMADISON_URL % package
    url += '&s=' + ','.join(dists)
    # select only those lines that refers to source pkg
    # or to binary packages available on the current arch
    url += '&a=source,all,' + arch
    try:
        page = open_url(url)
    except NoNetwork:
        return {}
    except urllib2.HTTPError, x:
        print >> sys.stderr, "Warning:", x
        return {}
Esempio n. 11
0
def download_sec(sec, config_file):
    v = "0"
    if config_file.has_option(sec, "version"):
        v = config_file.get(sec, "version")
    OS = utils.get_os()
    arch = utils.get_arch()
    d = utils.get_info({
        "name": sec,
        "os": OS,
        "version": v,
        "arch": arch
    })
    for software in d:
        if len(software['command']) > 0:
            subprocess.call(software['command'], shell=True)
        elif len(software["url"]) > 0:
            if os.path.isfile(os.path.basename(software["url"])):
                print 'Downloading ' + sec + '...'
                r = requests.get(software["url"], stream=True)
                if r.status_code == 200:
                    with open(os.path.basename(software["url"]), 'wb') as f:
                        for chunk in r:
                            f.write(chunk)
                        if tarfile.is_tarfile(f.name):
                            tfile = tarfile.open(
                                os.path.basename(software["url"]), "r:gz")
                            tfile.extractall(sec)
                        elif zipfile.is_zipfile(f.name):
                            z = zipfile.ZipFile(f)
                            z.extractall(sec)
                else:
                    print 'Error downloading package, Please download ' + sec + ' on your own!'
            else:
                print sec + ' already present in folder, extracting...'
            print 'Running command ' + str(
                ['tar', '-xvf', os.path.basename(software["url"])])
            subprocess.call(['tar', '-xvf', os.path.basename(software["url"])])
Esempio n. 12
0
#  along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
############################################################################
import sys

import xbmcgui, xbmcaddon

import utils

if len(sys.argv) > 1 and sys.argv[1] == "dump_edid":
    with utils.remount():
        utils.dump_edid()
        xbmcgui.Dialog().notification(utils.ADDON_NAME,
                                      "Dumped edid to /flash/edid.dat",
                                      xbmcaddon.Addon().getAddonInfo('icon'),
                                      sound=False)
else:
    utils.log("Started script")
    if utils.get_arch().startswith('RPi'):
        with utils.busy():
            try:
                utils.maybe_init_settings()
            except IOError as e:
                utils.read_error(utils.CONFIG_PATH, str(e))
        utils.log("Opening settings")
        xbmcaddon.Addon().openSettings()
    else:
        utils.log("Not a Raspberry Pi")
        xbmcgui.Dialog().ok(utils.ADDON_NAME,
                            "This add-on only works on a Raspberry Pi")
Esempio n. 13
0
import sys
import json
import time
import struct

from unicorn import *
from unicorn.x86_const import *
from capstone import *
from capstone.x86 import *

import utils
import x64utils

import config

cs = utils.init_capstone(utils.get_arch(config.ARCH))


def unicorn_debug_instruction(uc, address, size, user_data):
    try:
        mem = uc.mem_read(address, size)
        for (cs_address, cs_size, cs_mnemonic, cs_opstr) in cs.disasm_lite(bytes(mem), size):
            print("    Instr: {:#016x}:\t{}\t{}".format(
                address, cs_mnemonic, cs_opstr))
    except Exception as e:
        print(hex(address))
        print("e: {}".format(e))
        print("size={}".format(size))
        for (cs_address, cs_size, cs_mnemonic, cs_opstr) in cs.disasm_lite(bytes(uc.mem_read(address, 30)), 30):
            print("    Instr: {:#016x}:\t{}\t{}".format(
                address, cs_mnemonic, cs_opstr))
Esempio n. 14
0
def main(input_file, debug=False, trace=False):

    arch = utils.get_arch(config.ARCH)
    uc = Uc(arch.unicorn_arch, arch.unicorn_mode)

    if debug:
        # Try to load udbg
        sys.path.append(os.path.join(os.path.dirname(
            os.path.realpath(__file__)), "uDdbg"))
        try:
            from udbg import UnicornDbg
            print("[+] uDdbg debugger loaded.")
        except:
            debug = False
            trace = True
            raise Exception(
                "[!] Could not load uDdbg (install with ./setupdebug.sh), falling back to trace output: {}".format(ex))
    if trace:
        print("[+] Settings trace hooks")
        uc.hook_add(UC_HOOK_BLOCK, unicorn_debug_block)
        uc.hook_add(UC_HOOK_CODE, unicorn_debug_instruction)
        uc.hook_add(UC_HOOK_MEM_WRITE | UC_HOOK_MEM_READ |
                    UC_HOOK_MEM_FETCH, unicorn_debug_mem_access)

    # On error: map memory.
    uc.hook_add(UC_HOOK_MEM_UNMAPPED, unicorn_debug_mem_invalid_access)

    utils.map_known_mem(uc)

    if debug or trace:
        print("[*] Reading from file {}".format(input_file))

    # we leave out gs_base and fs_base on x64 since they start the forkserver
    utils.uc_load_registers(uc) 

    # let's see if the user wants a change.
    config.init_func(uc)

    # get pc from unicorn state since init_func may have altered it.
    pc = utils.uc_get_pc(uc, arch)

    # if we only have a single exit, there is no need to potentially slow down execution with an insn hook.
    if len(config.EXITS) or len(config.ENTRY_RELATIVE_EXITS):

        # add MODULE_EXITS to EXITS
        config.EXITS += [x + pc for x in config.ENTRY_RELATIVE_EXITS]
        # add final exit to EXITS
        config.EXITS.append(pc+config.LENGTH)

        if arch == utils.X64:
            exit_hook = x64utils.init_syscall_hook(config.EXITS, os._exit)
            uc.hook_add(UC_HOOK_INSN, exit_hook, None,
                        1, 0, UC_X86_INS_SYSCALL)
        else:
            # TODO: (Fast) solution for X86, ARM, ...
            raise Exception("Multiple exits not yet suppored for arch {}".format(arch))
    
    # starts the afl forkserver
    utils.uc_start_forkserver(uc) 

    input_file = open(input_file, 'rb')  # load afl's input
    input = input_file.read()
    input_file.close()

    try:
        config.place_input(uc, input)
    except Exception as ex:
        print("[!] Error setting testcase for input {}: {}".format(input, ex))
        os._exit(1)

    if not debug:
        try:
            uc.emu_start(pc, pc + config.LENGTH, timeout=0, count=0)
        except UcError as e:
            print("[!] Execution failed with error: {} at address {:x}".format(
                e, utils.uc_get_pc(uc, arch)))
            force_crash(e)
        # Exit without clean python vm shutdown: "The os._exit() function can be used if it is absolutely positively necessary to exit immediately"
        os._exit(0)
    else:
        print("[*] Starting debugger...")
        udbg = UnicornDbg()

        # TODO: Handle mappings differently? Update them at some point? + Proper exit after run?
        udbg.initialize(emu_instance=uc, entry_point=pc, exit_point=pc+config.LENGTH,
                        hide_binary_loader=True, mappings=[(hex(x), x, utils.PAGE_SIZE) for x in utils.MAPPED_PAGES])

        def dbg_except(x, y):
            raise Exception(y)
        os.kill = dbg_except
        udbg.start()
        # TODO will never reach done, probably.
        print("[*] Done.")
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-m',
                        '--major',
                        help='the SapMachine major version to build',
                        metavar='MAJOR',
                        required=True)
    parser.add_argument('-d',
                        '--destination',
                        help='the download destination',
                        metavar='DIR',
                        required=True)
    args = parser.parse_args()

    boot_jdk_major_max = int(args.major)
    boot_jdk_major_min = boot_jdk_major_max - 1
    destination = os.path.realpath(args.destination)
    releases = utils.get_github_releases()
    platform = str.format('{0}-{1}_bin', utils.get_system(), utils.get_arch())
    retries = 2

    releases = extra_bootjdks + releases

    while retries > 0:
        for release in releases:

            if release['prerelease']:
                continue

            tag = SapMachineTag.from_string(release['name'])

            if tag is None:
                print(
                    str.format("SapMachine release {0} not recognized",
                               release['name']))
                continue
            major = tag.get_major()

            if major <= boot_jdk_major_max and major >= boot_jdk_major_min:
                assets = release['assets']

                for asset in assets:
                    asset_name = asset['name']
                    asset_url = asset['browser_download_url']

                    if 'jdk' in asset_name and platform in asset_name and (
                            asset_name.endswith('.tar.gz')
                            or asset_name.endswith('.zip')
                    ) and 'symbols' not in asset_name:
                        archive_path = join(destination, asset_name)
                        utils.remove_if_exists(archive_path)
                        utils.download_artifact(asset_url, archive_path)
                        boot_jdk_exploded = join(destination, 'boot_jdk')
                        utils.remove_if_exists(boot_jdk_exploded)
                        os.makedirs(boot_jdk_exploded)
                        utils.extract_archive(archive_path, boot_jdk_exploded)

                        sapmachine_folder = [
                            f for f_ in [
                                glob.glob(join(boot_jdk_exploded, e))
                                for e in ('sapmachine*', 'jdk*')
                            ] for f in f_
                        ]

                        if sapmachine_folder is not None:
                            sapmachine_folder = sapmachine_folder[0]
                            files = os.listdir(sapmachine_folder)

                            for f in files:
                                shutil.move(join(sapmachine_folder, f),
                                            boot_jdk_exploded)

                            utils.remove_if_exists(sapmachine_folder)

                            if utils.get_system() == 'osx':
                                files = os.listdir(
                                    join(boot_jdk_exploded, 'Contents',
                                         'Home'))

                                for f in files:
                                    shutil.move(
                                        join(boot_jdk_exploded, 'Contents',
                                             'Home', f), boot_jdk_exploded)

                                utils.remove_if_exists(
                                    join(boot_jdk_exploded, 'Contents'))

                        return 0
        retries -= 1
        if retries == 1:
            boot_jdk_major_min = boot_jdk_major_max - 2

    return 0
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-m', '--major', help='the SapMachine major version to build', metavar='MAJOR', required=True)
    parser.add_argument('-d', '--destination', help='the download destination', metavar='DIR', required=True)
    args = parser.parse_args()

    boot_jdk_major_max = int(args.major)
    boot_jdk_major_min = boot_jdk_major_max - 1
    destination = os.path.realpath(args.destination)
    releases = utils.github_api_request('releases', per_page=100)
    platform = str.format('{0}-{1}_bin', utils.get_system(), utils.get_arch())

    for release in releases:

        if release['prerelease']:
            continue

        version, version_part, major, build_number, sap_build_number, os_ext = utils.sapmachine_tag_components(release['name'])

        if major is None:
            continue

        major = int(major)

        if major <= boot_jdk_major_max and major >= boot_jdk_major_min:
            assets = release['assets']

            for asset in assets:
                asset_name = asset['name']
                asset_url = asset['browser_download_url']

                if 'jdk' in asset_name and platform in asset_name and not asset_name.endswith('.txt'):
                    archive_path = join(destination, asset_name)
                    utils.remove_if_exists(archive_path)
                    utils.download_artifact(asset_url, archive_path)
                    boot_jdk_exploded = join(destination, 'boot_jdk')
                    utils.remove_if_exists(boot_jdk_exploded)
                    os.makedirs(boot_jdk_exploded)
                    utils.extract_archive(archive_path, boot_jdk_exploded)

                    sapmachine_folder = glob.glob(join(boot_jdk_exploded, 'sapmachine*'))

                    if sapmachine_folder is not None:
                        sapmachine_folder = sapmachine_folder[0]
                        files = os.listdir(sapmachine_folder)

                        for f in files:
                            shutil.move(join(sapmachine_folder, f), boot_jdk_exploded)

                        utils.remove_if_exists(sapmachine_folder)

                        if utils.get_system() == 'osx':
                            files = os.listdir(join(boot_jdk_exploded, 'Contents', 'Home'))

                            for f in files:
                                shutil.move(join(boot_jdk_exploded, 'Contents', 'Home', f), boot_jdk_exploded)

                            utils.remove_if_exists(join(boot_jdk_exploded, 'Contents'))

                    return 0

    return 0
Esempio n. 17
0
def main(workdir, module=None, breakoffset=None, breakaddress=None, reset_state=True, arch="x64", gdb_port=1234):
    request_path = os.path.join(workdir, REQUEST_FOLDER)
    output_path = os.path.join(workdir, STATE_FOLDER)

    if arch != "x64":
        raise("Unsupported arch")
    if reset_state:
        try:
            shutil.rmtree(output_path)
        except:
            pass
    try:
        os.makedirs(output_path, exist_ok=True)
    except:
        pass

    if module:
        if breakaddress is not None:
            raise("Breakaddress and module supplied. They are not compatible.")
        if breakoffset is None:
            raise("Module but no breakoffset specified. Don't know where to break.")

        mem_addr = os.popen("./get_mod_addr.sh " + module).readlines()
        try:
            mem_addr = int(mem_addr[0], 16)
        except ValueError as ex:
            print("Error decoding module addr. Either module {} has not been loaded or something went wrong with ssh ({})".format(module, ex))
            exit(-1)
        print("Module " + module + " is at memory address " + hex(mem_addr))
        breakaddress = hex(mem_addr + breakoffset)
    else:
        breakaddress = hex(breakaddress)

    avatar = Avatar(arch=get_arch(arch),
                    output_directory=os.path.join(workdir, "avatar"))
    target = avatar.add_target(
        GDBTarget, gdb_port=gdb_port, gdb_executable=GDB_PATH)
    target.init()

    target.set_breakpoint("*{}".format(breakaddress))
    print("[*] Breakpoint set at {}".format(breakaddress))
    print("[+] waiting for bp hit...")
    target.cont()
    target.wait()

    print("[+] hit! dumping registers and memory")

    # dump registers
    for reg in all_regs(get_arch(arch)):
        written = True
        reg_file = os.path.join(output_path, reg)
        with open(reg_file, "w") as f:
            try:
                val = target.read_register(reg)
                if isinstance(val, list):
                    # Avatar special registers (xmm, ...)
                    i32list = val
                    val = 0
                    for shift, i32 in enumerate(i32list):
                        val += (i32 << (shift * 32))
                f.write(str(val))
            except Exception as ex:
                #print("Ignoring {}: {}".format(reg, ex))
                written = False
        if not written:
            os.unlink(reg_file)

    try:
        os.mkdir(request_path)
    except:
        pass

    forward_requests(target, workdir, request_path, output_path)
    print("[*] Initial dump complete. Listening for requests from ./harness.py.")

    i = inotify.adapters.Inotify()
    # only readily written files
    i.add_watch(request_path, mask=inotify.constants.IN_CLOSE_WRITE)
    for event in i.event_gen(yield_nones=False):
        #print("Request: ", event)
        forward_requests(target, workdir, request_path, output_path)

    print("[*] Exiting probe_wrapper (keyboard interrupt)")
Esempio n. 18
0
#  You should have received a copy of the GNU General Public License
#  along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
############################################################################

import os
import sys
import re
from collections import OrderedDict
from functools import partial

import xbmc, xbmcgui

import utils

ARCH = utils.get_arch()

if not ARCH.startswith('RPi'):
    sys.exit(1)


class Main(object):
    def __init__(self):
        utils.log("Started service")

        revision = utils.get_revision()
        utils.log("Board revision: {}".format(revision))
        if revision is not None:
            utils.set_property_setting('revision', revision)

        max_ram = utils.get_max_ram()
Esempio n. 19
0
def main(args):
    model = utils.get_arch(arch=args.arch, dataset=args.dataset)

    if args.optim == 'sgd':
        optim = torch.optim.SGD(model.parameters(),
                                lr=args.lr,
                                weight_decay=args.weight_decay,
                                momentum=args.momentum)
    elif args.optim == 'adam':
        optim = torch.optim.Adam(model.parameters(),
                                 lr=args.lr,
                                 weight_decay=args.weight_decay)

    criterion = torch.nn.CrossEntropyLoss()
    train_loader = utils.get_loader(args.dataset,
                                    args.batch_size,
                                    train=True,
                                    training=True)

    attacker = utils.PGDAttacker(
        radius=args.pgd_radius,
        steps=args.pgd_steps,
        step_size=args.pgd_step_size,
        random_start=args.pgd_random_start,
        norm_type=args.pgd_norm_type,
    )

    log = dict()

    if not args.cpu:
        model.cuda()
        criterion = criterion.cuda()

    if args.resume:
        # raise NotImplementedError
        state_dict = torch.load('{}-model.pkl'.format(args.resume_path))
        model.load_state_dict(state_dict['model_state_dict'])
        optim.load_state_dict(state_dict['optim_state_dict'])

        with open('{}-log.pkl'.format(args.resume_path), 'rb') as f:
            log = pickle.load(f)

    # x, y = next(train_loader)
    # if not args.cpu: x, y = x.cuda(), y.cuda()
    # adv_x = attacker.perturb(model, criterion, x, y)
    # max_grad_norm, grad_norm_list = get_grad_norm(model,criterion,adv_x,y)
    # utils.add_log(log, 'max_grad_norm', max_grad_norm)
    # utils.add_log(log, 'grad_norm_list', grad_norm_list)
    # logger.info('step [{}/{}]: max_grad_norm {:.3e}'
    #             .format(0, args.train_steps, max_grad_norm))
    # logger.info('')

    if not args.resume:
        ''' save the initial model parameter '''
        save_checkpoint('ckpt-{}'.format(0), '{}/ckpts/'.format(args.save_dir),
                        model, optim, log)

    start_step = args.resume_train_step if args.resume else 0
    for step in range(start_step, args.train_steps, 1):
        lr = args.lr * (args.lr_decay_rate**(step // args.lr_decay_freq))
        for group in optim.param_groups:
            group['lr'] = lr

        x, y = next(train_loader)
        if not args.cpu:
            x, y = x.cuda(), y.cuda()
        adv_x = attacker.perturb(model, criterion, x, y)

        if (step + 1) % args.calc_mg_freq == 0:
            max_grad_norm, grad_norm_list = get_grad_norm(
                model, criterion, adv_x, y)
            utils.add_log(log, 'max_grad_norm', max_grad_norm)
            utils.add_log(log, 'grad_norm_list', grad_norm_list)
            logger.info('step [{}/{}]: max_grad_norm {:.3e}'.format(
                step + 1, args.train_steps, max_grad_norm))
            logger.info('')

        with torch.no_grad():
            model.eval()
            _y = model(x)
            nat_loss = criterion(_y, y)
            nat_acc = (_y.argmax(dim=1) == y).sum().item() / len(y)
            utils.add_log(log, 'nat_loss', nat_loss.item())
            utils.add_log(log, 'nat_acc', nat_acc)

        # ''' ERM begin '''
        # model.train()
        # _y = model(x)
        # nat_loss = criterion(_y, y)
        # nat_acc = (_y.argmax(dim=1) == y).sum().item() / len(y)
        # utils.add_log(log, 'nat_loss', nat_loss.item())
        # utils.add_log(log, 'nat_acc', nat_acc)

        # model.zero_grad()
        # nat_loss.backward()

        # nat_grad_norm = 0
        # for pp in model.parameters():
        #     nat_grad_norm += (pp.grad.data**2).sum().item()
        # nat_grad_norm = np.sqrt(nat_grad_norm)
        # utils.add_log(log, 'nat_grad_norm', nat_grad_norm)
        # ''' ERM end '''
        ''' adv begin (includes gradient descent) '''
        model.train()
        _y = model(adv_x)
        adv_loss = criterion(_y, y)
        adv_acc = (_y.argmax(dim=1) == y).sum().item() / len(y)
        utils.add_log(log, 'adv_loss', adv_loss.item())
        utils.add_log(log, 'adv_acc', adv_acc)

        optim.zero_grad()
        adv_loss.backward()
        optim.step()

        adv_grad_norm = 0
        for pp in model.parameters():
            adv_grad_norm += (pp.grad.data**2).sum().item()
        adv_grad_norm = np.sqrt(adv_grad_norm)
        utils.add_log(log, 'adv_grad_norm', adv_grad_norm)
        ''' adv end '''

        # xjb_rate = batch_grad_norm / old_batch_grad_norm
        # logger.info('RI??? {:.3e}'.format(xjb_rate))

        if (step + 1) % args.report_freq == 0:
            logger.info('step [{}/{}]:'.format(step + 1, args.train_steps))
            logger.info('nat_acc {:.2%} \t nat_loss {:.3e}'.format(
                nat_acc, nat_loss.item()))
            logger.info('adv_acc {:.2%} \t adv_loss {:.3e}'.format(
                adv_acc, adv_loss.item()))
            # logger.info('nat_grad_norm {:.3e} \t adv_grad_norm {:.3e} \t rate {:.3e}'
            #             .format( nat_grad_norm, adv_grad_norm, adv_grad_norm/nat_grad_norm ))
            logger.info('')

        if (step+1) % args.save_freq == 0 \
            or (step+1) == args.train_steps:
            save_checkpoint('ckpt-{}'.format(step + 1),
                            '{}/ckpts/'.format(args.save_dir), model, optim,
                            log)
Esempio n. 20
0
            usage()

        if opt == "--arch":
            arch = val

        if opt == "--region":
            region = val

        if opt == "--size":
            kwargs["size"] = int(val)

        if opt == "--name":
            kwargs["name"] = val

        if opt == "--desc":
            kwargs["desc"] = val

    if len(args) != 1:
        usage("incorrect number of arguments")

    snapshot_id = args[0]
    arch = arch if arch else utils.get_arch()
    region = region if region else utils.get_region()

    ami_id, ami_name = register(snapshot_id, region, arch, **kwargs)
    print ami_id, ami_name


if __name__ == "__main__":
    main()
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
# 
#  You should have received a copy of the GNU General Public License
#  along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
############################################################################

import xbmcgui, xbmcaddon

import utils

utils.log("Started script")
if utils.get_arch() == 'RPi.arm':
    with utils.busy():
        try:
            utils.maybe_init_settings()
        except IOError as e:
            utils.read_error(utils.CONFIG_PATH, str(e))
    utils.log("Opening settings")
    xbmcaddon.Addon().openSettings()
else:
    utils.log("Not a Raspberry Pi")
    xbmcgui.Dialog().ok(utils.ADDON_NAME,
                        "This add-on only works on a Raspberry Pi")



Esempio n. 22
0
File: eval.py Progetto: fshp971/RPG
def main(args):
    model = utils.get_arch(arch=args.arch, dataset=args.dataset)
    criterion = torch.nn.CrossEntropyLoss()

    state_dict = torch.load(args.resume_path, map_location=torch.device('cpu'))
    model.load_state_dict(state_dict['model_state_dict'])
    del state_dict

    if not args.cpu:
        model.cuda()
        criterion = criterion.cuda()

    attacker = utils.PGDAttacker(
        radius=args.pgd_radius,
        steps=args.pgd_steps,
        step_size=args.pgd_step_size,
        random_start=False,
        norm_type=args.pgd_norm_type,
    )

    train_loader = utils.get_loader(args.dataset,
                                    args.batch_size,
                                    train=True,
                                    training=False)
    test_loader = utils.get_loader(args.dataset,
                                   args.batch_size,
                                   train=False,
                                   training=False)

    log = dict()

    atk_acc = utils.membership_inference_attack(model, train_loader,
                                                test_loader, args.cpu)
    log['atk_acc'] = atk_acc

    nat_train_acc, nat_train_loss = evaluate(model,
                                             criterion,
                                             train_loader,
                                             cpu=args.cpu)
    log['nat_train_acc'] = nat_train_acc
    log['nat_train_loss'] = nat_train_loss

    adv_train_acc, adv_train_loss = evaluate(model,
                                             criterion,
                                             train_loader,
                                             attacker=attacker,
                                             cpu=args.cpu)
    log['adv_train_acc'] = adv_train_acc
    log['adv_train_loss'] = adv_train_loss

    nat_test_acc, nat_test_loss = evaluate(model,
                                           criterion,
                                           test_loader,
                                           cpu=args.cpu)
    log['nat_test_acc'] = nat_test_acc
    log['nat_test_loss'] = nat_test_loss

    adv_test_acc, adv_test_loss = evaluate(model,
                                           criterion,
                                           test_loader,
                                           attacker=attacker,
                                           cpu=args.cpu)
    log['adv_test_acc'] = adv_test_acc
    log['adv_test_loss'] = adv_test_loss

    with open('{}/{}.pkl'.format(args.save_dir, args.eval_save_name),
              'wb') as f:
        pickle.dump(log, f)
Esempio n. 23
0
#  You should have received a copy of the GNU General Public License
#  along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
############################################################################

import os
import sys
import re
from collections import OrderedDict
from functools import partial

import xbmc, xbmcgui

import utils

ARCH = utils.get_arch()

if not ARCH.startswith('RPi'):
    sys.exit(1)


class Main(object):
    def __init__(self):
        utils.log("Started service")

        revision = utils.get_revision()
        utils.log("Board revision: {}".format(revision))
        if revision is not None:
            utils.set_property_setting('revision', revision)

        max_ram = utils.get_max_ram()
Esempio n. 24
0
def parse_config(config_file):
    secs = config_file.sections()
    v = "0"
    for sec in secs:
        if config_file.has_option(sec, "version"):
            v = config_file.get(sec, "version")
        OS = utils.get_os()
        arch = utils.get_arch()
        d = utils.get_info({
            "name": sec,
            "os": OS,
            "version": v,
            "arch": arch
        })
        print d, str({"name": sec, "os": OS, "version": v, "arch": arch})
        for software in d:
            if len(software.get('command', '')) > 0:
                subprocess.call(software['command'], shell=True)
            elif len(software["url"]) > 0:
                print software["url"]
                if not os.path.isfile(os.path.basename(software["url"])):
                    print 'Downloading ' + sec + '...'
                    r = requests.get(software["url"], stream=True)
                    if r.status_code == 200:
                        with open(os.path.basename(software["url"]), 'wb') as f:
                            for chunk in r:
                                f.write(chunk)
                            # if tarfile.is_tarfile(f.name):
                            #     tfile = tarfile.open(os.path.basename(software["url"]), "r:gz")
                            #     tfile.extractall(sec)
                            # elif zipfile.is_zipfile(f.name):
                            #     z = zipfile.ZipFile(f)
                            #     z.extractall(sec)
                            subprocess.call('tar -xvf ' + f.name, shell=True)
                    else:
                        print 'Error downloading package, Please download ' + sec + ' on your own!'
                else:
                    print sec + ' already present in folder, extracting...'
                subprocess.call('tar -xvf ' + os.path.basename(software["url"]),
                                shell=True)
                # if tarfile.is_tarfile(os.path.basename(software["url"])):
                #     tfile = tarfile.open(os.path.basename(software["url"]), "r:gz")
                #     tfile.extractall(sec)
                # elif zipfile.is_zipfile(os.path.basename(software["url"])):
                #     z = zipfile.ZipFile(os.path.basename(software["url"]))
                #     z.extractall(sec)
        if not os.path.exists(sec):
            os.mkdir(sec)
        if sec == 'eclipse':
            if config_file.has_option(sec, "plugins"):
                plugins = config_file.get(sec, "plugins").split(",")
                if os.path.isdir(os.path.join(os.getcwd(), sec, "dropins")):
                    for plugin in plugins:
                        f = download_file(plugin)
                        if len(f) > 0 and zipfile.is_zipfile(f):
                            z = zipfile.ZipFile(open(f, "rb"))
                            path = os.path.join(os.getcwd(), "eclipse",
                                                "dropins",
                                                os.path.splitext(f)[0])
                            if not os.path.exists(path):
                                os.makedirs(path)
                            z.extractall(path)
Esempio n. 25
0
        if opt == "--pvmregister":
            pvmregister = True

    if len(args) != 1:
        usage("incorrect number of arguments")

    rootfs = args[0]
    if not os.path.exists(rootfs):
        fatal("rootfs path does not exist: %s" % rootfs)

    if not name:
        turnkey_version = utils.get_turnkey_version(rootfs)
        name = '_'.join([turnkey_version, str(int(time.time()))])

    arch = utils.get_arch()
    region = utils.get_region()
    snapshot_id, snapshot_name = bundle(rootfs, name)
    log.important(' '.join([snapshot_id, arch, region]))

    if marketplace:
        share_marketplace(snapshot_id, region)

    ami_id, ami_name = register(snapshot_id, region, arch)

    log.info(ami_name)
    log.important(' '.join([ami_id, arch, region]))

    if pvmregister:
        ami_id, ami_name = register(snapshot_id, region, arch, pvm=True)
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-t',
                        '--tag',
                        help='the SapMachine git tag',
                        metavar='TAG')
    parser.add_argument(
        '-m',
        '--major',
        help='the SapMachine major version, overrules any value from tag(s)',
        metavar='MAJOR')
    parser.add_argument(
        '-b',
        '--build',
        help='the build number, overrules any value from tag(s)',
        metavar='BUILD_NR')
    parser.add_argument('-r',
                        '--release',
                        help='set if this is a release build',
                        action='store_true',
                        default=False)
    args = parser.parse_args()

    configure_opts = []

    # initialize major from args
    major = args.major
    if major is not None:
        major = int(major)

    # initialize build number from args
    build_number = args.build
    if args.build is not None:
        print(str.format("Set build number from parameter: {0}", build_number),
              file=sys.stderr)

    # parse tag, if given, and determine major
    tag = None
    if args.tag:
        tag = SapMachineTag.from_string(args.tag)
        if tag is None:
            print(str.format("Tag {0} not recognized as SapMachine tag",
                             args.tag),
                  file=sys.stderr)
        else:
            if (major is not None and tag.get_major() != major):
                print(str.format(
                    "Warning: Using major version {0}, given by parameter. Major version from tag would be {1}.",
                    major, tag.get_major()),
                      file=sys.stderr)
            else:
                major = tag.get_major()

            # determine build number from tag
            if build_number is None:
                build_number = tag.get_build_number()
                if build_number is not None:
                    print(str.format("Set build number from tag: {0}",
                                     build_number),
                          file=sys.stderr)
                else:
                    latest_non_ga_tag = tag.get_latest_non_ga_tag()
                    if latest_non_ga_tag is not None:
                        build_number = latest_non_ga_tag.get_build_number()
                        if build_number is not None:
                            print(str.format(
                                "Tag seems to be a ga tag, using build number from latest non-ga tag {0}: {1}",
                                latest_non_ga_tag.as_string(), build_number),
                                  file=sys.stderr)

    # if major could not be determined, use default
    if major is None:
        major = utils.sapmachine_default_major()

    # set build number
    if build_number is not None:
        configure_opts.append(VERSION_BUILD_ARG.format(build_number))

    # determine and set version date in non-release builds
    # in release builds we rely on DEFAULT_VERSION_DATE in version-numbers.conf
    if not args.release:
        release_date = None
        if tag is not None:
            releases = utils.get_github_releases()
            if releases is not None:
                for release in releases:
                    if release['tag_name'] == tag.as_string():
                        release_date = release['created_at'].split('T')[0]
                        print(str.format(
                            "Set date to release date of {0}: {1}",
                            tag.as_string(), release_date),
                              file=sys.stderr)
                        break
            if release_date is None:
                print(str.format(
                    "Tag {0} does not seem to exist or data could not be loaded from GitHub",
                    tag.as_string()),
                      file=sys.stderr)

        if release_date is None:
            release_date = date.today().strftime("%Y-%m-%d")
            print(str.format("Set date to today: {0}", release_date),
                  file=sys.stderr)

        configure_opts.append(VERSION_DATE_ARG.format(release_date))

    # set version pre
    version_pre = ''
    if not args.release:
        if tag is None:
            version_pre = 'snapshot'
        else:
            version_pre = 'ea'

    if utils.get_system(major) == 'linux' and utils.get_arch().startswith(
            'aarch64'):
        if not version_pre:
            version_pre = 'beta'
        else:
            version_pre += '-beta'

    configure_opts.append(VERSION_PRE_ARG.format(version_pre))

    # set version opt
    if tag is None:
        configure_opts.append(VERSION_OPT_ARG.format(release_date))
    else:
        if args.release and utils.sapmachine_is_lts(major):
            if major < 15:
                configure_opts.append(VERSION_OPT_ARG.format('LTS-sapmachine'))
            else:
                configure_opts.append(VERSION_OPT_ARG.format('LTS'))
        else:
            if major < 15:
                configure_opts.append(VERSION_OPT_ARG.format('sapmachine'))
            else:
                configure_opts.append(VERSION_OPT_ARG.format(''))

    # set version extra1 arg (= sap version)
    if tag is not None and tag.get_version_sap() is not None:
        configure_opts.append(VERSION_EXTRA1_ARG.format(tag.get_version_sap()))

    # set vendor version string
    if (tag is None or (major > 14) or (major == 14 and tag.get_update() > 1)
            or (major == 11 and tag.get_update() > 7)):
        configure_opts.append(VENDOR_VERSION_STRING_ARG)

    # set other vendor options
    configure_opts.append(VENDOR_NAME_ARG)
    configure_opts.append(VENDOR_URL_ARG)
    configure_opts.append(VENDOR_BUG_URL_ARG)
    configure_opts.append(VENDOR_VM_BUG_URL_ARG)

    # set getest option
    if 'GTEST_DIR' in os.environ:
        if major >= 15:
            configure_opts.append(GTEST_OPT.format(os.environ['GTEST_DIR']))

    print(' '.join(configure_opts))

    return 0
Esempio n. 27
0
    if not os.path.exists(rootfs):
        fatal("rootfs path does not exist: %s" % rootfs)

    if not name:
        turnkey_version = utils.get_turnkey_version(rootfs)
        name = '_'.join([turnkey_version, str(int(time.time()))])

    if not virts:
        virts.add('hvm')
        virts.add('pvm')

    for virt in virts:
        if not virt in ('hvm', 'pvm'):
            fatal("virtualization type not supported: %s" % virt)

    arch = utils.get_arch()
    region = utils.get_region()
    snapshot_id, snapshot_name = bundle(rootfs, name)

    if marketplace:
        share_marketplace(snapshot_id, region)

    for virt in virts:
        ami_id, ami_name = register(snapshot_id, region, virt, arch)

        log.info(ami_name)
        log.important(' '.join([ami_id, arch, virt, region]))

        if publish:
            share_public(ami_id, region)
def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument('-t', '--tag', help='the SapMachine git tag', metavar='TAG')
    parser.add_argument('-b', '--build', help='the build number to use, overrules any value from tag(s)', metavar='BUILD_NR')
    parser.add_argument('-r', '--release', help='set if this is a release build', action='store_true', default=False)
    parser.add_argument('-g', '--branch', help='the SapMachine git branch', metavar='BRANCH')
    args = parser.parse_args()

    configure_opts = []

    # parse tag, if given
    tag = None
    if args.tag:
        tag = SapMachineTag.from_string(args.tag)
        if tag is None:
            print(str.format("Passed tag {0} not recognized as SapMachine tag, handling as snapshot build", args.tag), file=sys.stderr)

    # parse major from SapMachine branch, if given
    major_from_branch = 0
    if args.branch:
        branch_pattern = re.compile(utils.sapmachine_branch_pattern())
        match = branch_pattern.match(args.branch)
        if match is not None:
            if match.group(1) is not None:
                major_from_branch = int(match.group(1))
            else:
                major_from_branch = 9999

    # determine and set version date
    release_date = None
    if tag is not None:
        releases = utils.get_github_releases()
        if releases is not None:
            for release in releases:
                if release['tag_name'] == tag.as_string():
                    release_date = release['published_at'].split('T')[0]
                    print(str.format("Set date to release date of {0}: {1}", tag.as_string(), release_date), file=sys.stderr)
                    break
        if release_date is None:
            print(str.format("Tag {0} does not seem to exist or data could not be loaded from GitHub", tag.as_string()), file=sys.stderr)

    if release_date is None:
        release_date = date.today().strftime("%Y-%m-%d")
        print(str.format("Set date to today: {0}", release_date), file=sys.stderr)

    configure_opts.append(VERSION_DATE_ARG.format(release_date))

    # determine and set build number
    build_number = None
    if args.build is not None:
        build_number = args.build
        print(str.format("Set build number from parameter: {0}", build_number), file=sys.stderr)

    if build_number is None and tag is not None:
        build_number = tag.get_build_number()
        if build_number is not None:
            print(str.format("Set build number from tag: {0}", build_number), file=sys.stderr)
        else:
            latest_non_ga_tag = tag.get_latest_non_ga_tag()
            if latest_non_ga_tag is not None:
                build_number = latest_non_ga_tag.get_build_number()
                if build_number is not None:
                    print(str.format("Tag seems to be a ga tag, using build number from latest non-ga tag {0}: {1}",
                        latest_non_ga_tag.as_string(), build_number), file=sys.stderr)

    if build_number is not None:
        configure_opts.append(VERSION_BUILD_ARG.format(build_number))

    # set version pre
    version_pre = ''
    if not args.release:
        if tag is None:
            version_pre = 'snapshot'
        else:
            version_pre = 'ea'

    if utils.get_arch().startswith('aarch64'):
        if not version_pre:
            version_pre = 'beta'
        else:
            version_pre += '-beta'

    configure_opts.append(VERSION_PRE_ARG.format(version_pre))

    # set version opt
    if tag is None:
        configure_opts.append(VERSION_OPT_ARG.format(release_date))
    else:
        if args.release and utils.sapmachine_is_lts(tag.get_major()):
            if tag.get_major() < 15:
                configure_opts.append(VERSION_OPT_ARG.format('LTS-sapmachine'))
            else:
                configure_opts.append(VERSION_OPT_ARG.format('LTS'))
        else:
            if tag.get_major() < 15:
                configure_opts.append(VERSION_OPT_ARG.format('sapmachine'))
            else:
                configure_opts.append(VERSION_OPT_ARG.format(''))

    # set version extra1 arg (= sap version)
    if tag is not None and tag.get_version_sap() is not None:
        configure_opts.append(VERSION_EXTRA1_ARG.format(tag.get_version_sap()))

    # set vendor version string
    if (tag is None or
        (tag.get_major() > 14) or
        (tag.get_major() == 14 and tag.get_update() > 1) or
        (tag.get_major() == 11 and tag.get_update() > 7)):
        configure_opts.append(VENDOR_VERSION_STRING_ARG)

    # set other vendor options
    configure_opts.append(VENDOR_NAME_ARG)
    configure_opts.append(VENDOR_URL_ARG)
    configure_opts.append(VENDOR_BUG_URL_ARG)
    configure_opts.append(VENDOR_VM_BUG_URL_ARG)

    # set getest option
    if 'GTEST_DIR' in os.environ:
        if (tag is not None and tag.get_major() >= 15) or major_from_branch >= 15:
            configure_opts.append(GTEST_OPT.format(os.environ['GTEST_DIR']))

    print(' '.join(configure_opts))

    return 0
Esempio n. 29
0
#  You should have received a copy of the GNU General Public License
#  along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
############################################################################
import sys

import xbmcgui, xbmcaddon

import utils


if len(sys.argv) > 1 and sys.argv[1] == "dump_edid":
    with utils.remount():
        utils.dump_edid()
        xbmcgui.Dialog().notification(utils.ADDON_NAME, "Dumped edid to /flash/edid.dat",
                                      xbmcaddon.Addon().getAddonInfo('icon'), sound=False)
else:
    utils.log("Started script")
    if utils.get_arch().startswith('RPi'):
        with utils.busy():
            try:
                utils.maybe_init_settings()
            except IOError as e:
                utils.read_error(utils.CONFIG_PATH, str(e))
        utils.log("Opening settings")
        xbmcaddon.Addon().openSettings()
    else:
        utils.log("Not a Raspberry Pi")
        xbmcgui.Dialog().ok(utils.ADDON_NAME,
                            "This add-on only works on a Raspberry Pi")
Esempio n. 30
0
        if opt == "--region":
            region = val

        if opt == "--size":
            kwargs['size'] = int(val)

        if opt == "--name":
            kwargs['name'] = val

        if opt == "--desc":
            kwargs['desc'] = val

    if len(args) != 2:
        usage("incorrect number of arguments")

    snapshot_id = args[0]
    virt = args[1]
    arch = arch if arch else utils.get_arch()
    region = region if region else utils.get_region()

    if not virt in ('hvm', 'pvm'):
        fatal("virtualization type not supported: %s" % virt)

    ami_id, ami_name = register(snapshot_id, region, virt, arch, **kwargs)

    print ami_id, ami_name


if __name__ == "__main__":
    main()
#  the Free Software Foundation, either version 3 of the License, or
#  (at your option) any later version.
#
#  This program is distributed in the hope that it will be useful,
#  but WITHOUT ANY WARRANTY; without even the implied warranty of
#  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#  GNU General Public License for more details.
#
#  You should have received a copy of the GNU General Public License
#  along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
############################################################################

import xbmcgui, xbmcaddon

import utils

utils.log("Started script")
if utils.get_arch() == 'RPi.arm':
    with utils.busy():
        try:
            utils.maybe_init_settings()
        except IOError as e:
            utils.read_error(utils.CONFIG_PATH, str(e))
    utils.log("Opening settings")
    xbmcaddon.Addon().openSettings()
else:
    utils.log("Not a Raspberry Pi")
    xbmcgui.Dialog().ok(utils.ADDON_NAME,
                        "This add-on only works on a Raspberry Pi")