示例#1
0
class Config(object):
    SECRET_KEY = '33456@#$456@#$12%^&*('
    ROOT_PATH = dn(dn(dn(ap(__file__))))

    # 可见性超时时间定义了等待职程在消息分派到其他职程之前确认收到任务的秒数。一定要阅读下面的 警示 一节。
    # 这个选项通过 BROKER_TRANSPORT_OPTIONS 设置:

    BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 3600}  # 1 hour.

    BROKER_URL = 'redis://127.0.0.1:6379/6'
    CELERY_RESULT_BACKEND = 'redis://127.0.0.1:6379/5'

    CELERY_TIMEZONE = 'Asia/Shanghai'
    CELERY_ENABLE_UTC = True

    # 读取任务结果一般性能要求不高,所以使用了可读性更好的JSON
    CELERY_RESULT_SERIALIZER = 'json'

    # 指定接受的内容类型
    CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml', 'pickle']

    # 任务序列化和反序列化使用msgpack方案
    CELERY_TASK_SERIALIZER = 'msgpack'

    # 任务过期时间,不建议直接写86400,应该让这样的magic数字表述更明显
    CELERY_TASK_RESULT_EXPIRES = 60 * 60 * 24

    MAIL_HOST = os.getenv("MAIL_HOST")  # 设置服务器
    MAIL_USER = os.getenv("MAIL_USER")  # 用户名
    MAIL_PASS = os.getenv("MAIL_PASS")  # 口令
示例#2
0
def load_presets():
    with open(join(dirname(ap(__file__)), "presets/obj_presets.json")) as f:
        data = json.loads(f.read())
        camel = (lambda dat: ''.join("{} ".format(x.title())
                                     for x in dat.split("_"))[:-1])
        return [(key, camel(key), "Load lk_{}".format(key))
                for key in data.keys()]
示例#3
0
    def setUp(self):

        self.testhost = os.environ.get('EASYAVRO_TESTING_HOST', 'localhost')
        c = CachedSchemaRegistryClient(url='http://{}:4002'.format(self.testhost))

        self.topic = 'easyavro-testing-topic'

        rp = ap(dn(__file__))
        with open(opj(rp, 'key.avsc'), 'rt') as f:
            avro_key_schema = schema.Parse(f.read())
        with open(opj(rp, 'value.avsc'), 'rt') as f:
            avro_value_schema = schema.Parse(f.read())

        c.register(self.topic + '-key', avro_key_schema)
        c.register(self.topic + '-value', avro_value_schema)

        self.bp = EasyAvroProducer(
            schema_registry_url='http://{}:4002'.format(self.testhost),
            kafka_brokers=['{}:4001'.format(self.testhost)],
            kafka_topic=self.topic
        )

        self.bc = EasyAvroConsumer(
            schema_registry_url='http://{}:4002'.format(self.testhost),
            kafka_brokers=['{}:4001'.format(self.testhost)],
            consumer_group='easyavro.testing',
            kafka_topic=self.topic,
            offset='earliest'
        )

        def on_recieve(key: str, value: str) -> None:
            self.recieved.append((key, value))
            L.info("Recieved message")
        self.recieved = []
        self.on_recieve = on_recieve
def main(argv):  # {{{
    hexagon_path = None
    elf_path = None

    i = 0
    while i < len(argv):
        arg = argv[i]

        if arg.lower() == '--hexagon-path':
            if not (i + 1 < len(argv)):
                return print_err(file_name,
                                 'Expecting argument after --hexagon-path')

            hexagon_path = argv[i + 1]

            i += 2
            continue

        elif arg.lower().startswith('--hexagon-path='):
            path = '='.join(arg.split('=')[1:])

            if len(path) < 1:
                return print_err(file_name,
                                 'Expecting argument after --hexagon-path=')

            hexagon_path = path

            i += 1
            continue

        elif arg.lower() == '-v' or arg.lower() == '--verbose':
            global verbose
            verbose = True

        else:
            elf_path = arg

        i += 1

    if not elf_path:
        return print_err(file_name, 'No ELF path given')

    if not os.path.exists(elf_path):
        return print_err(file_name, 'ELF does not exist: %s' % elf_path)

    print '%s\n' % (ps(ap(elf_path))[-1], )

    # get elf data
    mc = MemoryChecker(elf_path, hexagon_path=hexagon_path)

    mc.pull_info()

    dp('')  # separate verbose data from regular data
    ok = mc.print_info()

    if not ok:
        return 1

    else:
        return 0
示例#5
0
def update_enum_presets(self, context):
    print("Load pre-made {} data".format(self.presets))
    obj = context.object

    # Fetch data
    with open(join(dirname(ap(__file__)), "presets/obj_presets.json")) as f:
        data = json.loads(f.read())[self.presets]

    obj.liquidknot.params.clear()
    preset_to_lk(data, obj)
示例#6
0
def unpack():
    # Preparations
    genepool = {
        "mkdir": ("mkdir", "md"),
        "python": ("/bin/python3.7m", "/bin/python.exe"),
        "pip": ("bin/pip", "Scripts/pip.exe")
    }
    index = int(not sys.platform == 'linux')

    python_exec = sys.exec_prefix + genepool["python"][index]
    venv_path = join(dirname(ap(__file__)), '../openGL/.venv')
    temp_path = join(dirname(ap(__file__)), '../openGL/temp')

    # Worthless cases
    if isdir(venv_path) and isdir(temp_path):
        return

    call([
        python_exec,
        # Path to script
        "{}".format(join(dirname(ap(__file__)), 'get_pip.py'))
    ])

    call([python_exec, '-m', 'pip', 'install', 'virtualenv', '--user'])

    call([
        python_exec,
        '-m',
        'virtualenv',
        venv_path  # Path to venv
    ])

    call([
        join(venv_path, genepool["pip"][index]), "install", "-r",
        join(dirname(ap(__file__)), 'requirements.txt')
    ])

    print(temp_path)
    mkdir(temp_path)
示例#7
0
    def execute(self, context):
        if not context.mode == "Object":
            mesh = bpy.data.meshes.new('LK_{}'.format(camel))
            obj = bpy.data.objects.new('LK_{}'.format(camel), mesh)

            # Construct the bmesh
            bm = bmesh.new()
            try:
                meshtod = getattr(bmesh.ops, "create_{}".format(snake))
                meshtod(bm)
            except:
                path_to_file = join(dirname(ap(__file__)),
                                    "{}.obj".format(snake))
                bpy.ops.import_scene.obj(filepath=path_to_file)
                mesh_host = context.selected_objects[0]
                bm.from_mesh(mesh_host.data)
                bpy.ops.object.delete()
            bm.to_mesh(mesh)
            bm.free()

            # Set location to cursor
            obj.location = context.scene.cursor.location

            # Set Liquidknot props
            obj.liquidknot.active = True
            preset_to_lk(data[CAPS_SNAKE], obj)
            context.scene.collection.objects.link(obj)

            # Finishing touches
            shade_s(obj, snake not in flats)

        else:
            self.report({'WARNING'},
                        "Liquidknot: Option only valid in Object mode")
            return {'CANCELED'}

        return {'FINISHED'}
示例#8
0
from gevent import monkey

monkey.patch_all()

import os
import sys
from os.path import abspath as ap, dirname as dn

sys.path.append(dn(dn(ap(__file__))))

from kervice.utils import when
from kervice.utils.app import Application
from kervice.app.const import Env
from kervice.app.main import init_app

app = Application.instance()
app.env = os.getenv("SERVICE_ENV", Env.local)
app.root_path = dn(dn(ap(__file__)))
app.name = os.getenv("SERVICE_NAME", None) or "test"
print(app.name)
app.debug = when(app.env == Env.production, False, True)


from kervice.utils.log_util import KLog
from kervice.bussiness.log import log_callback

KLog(callback=log_callback).init_log()
init_app()

app.config_from_object(app.config)
示例#9
0
def main():
    # Use optparse to set up usage
    use = "Usage: python %prog [options] <Input ELF> <Output ELF>"
    use += "\n       python %prog -s symbolName <ELF>"
    parser = optparse.OptionParser(usage=use, version="%prog 1.11")
    parser.add_option("-d",
                      action="store_true",
                      dest="debug",
                      help="enable debug")
    parser.add_option("-r",
                      action="store",
                      dest="removeSection",
                      type="str",
                      help="Section to remove")
    parser.add_option("-t",
                      action="store_true",
                      dest="timing",
                      help="print program execution time")
    parser.add_option("-V",
                      action="store_true",
                      dest="verification",
                      help="enable ELF verification (currently none)")
    parser.add_option("-R",
                      action="store_true",
                      dest="ro_fatal",
                      help="Enable ro fatal section removal/compression")
    parser.add_option("-M",
                      action="store_true",
                      dest="overlay_mem_dump",
                      help="Enable memory dump overlay")
    parser.add_option("-c",
                      action="store",
                      dest="compressorRoot",
                      help="Specify the root folder of the Q6Zip compressor")
    parser.add_option("-s",
                      action="store",
                      dest="coprocMerge",
                      type="str",
                      help="Path of coprocessor (silver) ELF")
    options, arguments = parser.parse_args()

    # Check arguments
    if len(arguments) != 2:
        parser.error("Unexpected argument length")
        exit(const.RC_ERROR)

    baseELF = arguments[0]
    modifiedELF = arguments[1]
    if not pe(baseELF):
        parser.error("Specified ELF file does not exist.")
        exit(const.RC_ERROR)

    # Print configuration
    if not options.debug:
        print "================================================================"
        print " elfManipulator.py - Generic manipulator for ELF files"
        print "----------------------------------------------------------------"
        print " Base ELF: ".ljust(20) + baseELF
        print " Modified ELF: ".ljust(20) + modifiedELF
        print " Debug: ".ljust(20) + str(options.debug)
        print " Verification: ".ljust(20) + str(options.verification)
        print "================================================================"

    # Record the starting time
    if options.timing:
        start_time = time.time()

    # Get the elf file as an elfFile object
    if not options.debug:
        print "----------------------------------------------------------------"
        print " Getting ELF data from input ELF..."
        print "----------------------------------------------------------------"
    elf = elfFileClass.elfFile(baseELF)

    # If debug is enabled, show ELF contents similarly to readelf -a
    if options.debug:
        elf.printInfo()

    if not options.debug:
        print "----------------------------------------------------------------"
        print " Applying requested ELF modifications..."
        print "----------------------------------------------------------------"
    """
    HERE IS WHERE ALL THE ELF MODIFICATIONS SHOULD HAPPEN
    """
    # -r option: Remove specified section
    if options.removeSection:
        if not options.debug:
            print "Attempting to remove section '" + options.removeSection + "'"
        elf.removeSectionByName(options.removeSection)

    symbolDict = {}

    # -M option: Remove BSS section for 'overlay_mem_dump' feature
    overlay_mem_dumpSize = -1
    if options.overlay_mem_dump:
        print "OVERLAY_MEM_DUMP"
        overlay_mem_dumpSh = elf.getSectionByName("overlay_mem_dump")
        ro_fatalSh = elf.getSectionByName("ro_fatal")
        if overlay_mem_dumpSh != const.RC_ERROR and ro_fatalSh != const.RC_ERROR:
            # If an overlay_mem_dump section is found
            print "\t'overlay_mem_dump' section found"
            print "\tAttempting to remove 'overlay_mem_dump' BSS section."
            overlay_mem_dumpSize = overlay_mem_dumpSh.sh_size
            elf.removeBssOverlaySectionByName("overlay_mem_dump")
            if overlay_mem_dumpSize > ro_fatalSh.sh_size:
                # It is expected that size of ro_fatal section matches the
                # program segment size in subsequent steps. elfManipulator is
                # currently written to manipulate single section segments,
                # with the EXCEPTION of removing BSS sections.
                print "\t'overlay_mem_dump' exceeded 'ro_fatal' size. Re-sizing."
                elf.resizeSectionByName("ro_fatal", len(ro_fatalSh.contents))
        else:
            if overlay_mem_dumpSh == const.RC_ERROR:
                print "\tNo 'overlay_mem_dump' section found. No-op."
            if ro_fatalSh == const.RC_ERROR:
                print "\tNo 'ro_fatal' section found. No-op."

    # -R option: Remove or compress 'ro_fatal' section
    if options.ro_fatal:
        print
        print "RO_FATAL COMPRESSION"
        # Get the section in question
        sh = elf.getSectionByName("ro_fatal")
        if sh != const.RC_ERROR:
            # If an ro_fatal section is found
            print "\t'ro_fatal' section found"
            if sh.sh_size == 0:
                # If 0 size section is found, remove the section for mbn
                print "\tZero-sized 'ro_fatal' section. Attempting removal."
                elf.removeSectionByName("ro_fatal")
            else:
                # Move up the ro_fatal section and then compress it
                print "\tMoving up 'ro_fatal' to closest section"
                symbolDict["__ro_fatal_old_start"] = sh.sh_addr
                oldSize = sh.sh_size
                elf.moveupSectionByName("ro_fatal")
                print "\tCompressing 'ro_fatal' contents using zlib"
                sh = elf.compressSectionByName("ro_fatal")
                if (sh != const.RC_ERROR):
                    # If compression is successful, update symbols accordingly
                    symbolDict["__ro_fatal_new_start"] = sh.sh_addr
                    symbolDict["__ro_fatal_new_end"] = sh.sh_addr + len(
                        sh.contents)
                else:
                    utils.raiseElfManipulatorError(
                        "Failed to compress ro_fatal")
                # Print out statistics
                print "\tOld Size: " + str(oldSize)
                print "\tNew Size: " + str(sh.sh_size)

        else:
            # The last segment is still created due to linker script
            print "\tNo 'ro_fatal' section found, checking for zero-sized segment"
            if ((elf.programHeaderTable[-1].p_memsz == 0)
                    and (elf.programHeaderTable[-1].p_vaddr == 0)):
                print "\tRemoving zero-sized segment"
                elf.elfHeader.e_phnum -= 1
                elf.programHeaderTable.pop()

    #=============================================
    # START: Q6_ZIP CHANGES
    #=============================================
    q6_roSection = ".candidate_compress_section"
    q6_rwSection = ".rw_candidate_compress_section"
    pageSize = 4096

    # determine where the compressors reside
    my_d = ps(inspect.getframeinfo(inspect.currentframe()).filename)[0]

    compressorDirs = [
        ap(pj(my_d, "../../core/kernel/dlpager/compressor")),
        ap(pj(my_d, "../../../core/kernel/dlpager/compressor")),
        ap(pj(my_d, "./include")),
    ]

    if options.compressorRoot:
        compressorDirs.insert(0, options.compressorRoot)

    for d in compressorDirs:
        if os.path.isdir(d) and 'q6zip_compress.py' in os.listdir(d):
            sys.path.insert(0, d)
            break

    print
    print "Q6ZIP FEATURE (RO)"

    # Get the ELF section containing the code to compress
    sh = elf.getSectionByName(q6_roSection)
    # Check if the section exists
    if (sh != const.RC_ERROR):

        # Save the old VA before ANY modifications to the section/segment
        oldVA = sh.sh_addr
        # Save old size
        oldSize = sh.sh_size
        print "oldVA = " + hex(oldVA)
        print "Size of " + q6_roSection + " is " + str(sh.sh_size)
        symbolDict["start_va_uncompressed_text"] = sh.sh_addr
        symbolDict["end_va_uncompressed_text"] = sh.sh_addr + sh.sh_size

        # Move up the vaddr of the section, corresponding segment and mark all
        # its symbols as SHN_UNDEF. Will fail if q6_roSection is part of multi-
        # section segment in the ELF.
        # Save the new VA after moving up the section/segment
        print "Moving up " + q6_roSection + " to closest segment"
        elf.moveupSectionByName(q6_roSection)
        newVA = sh.sh_addr
        print "newVA = " + hex(newVA)

        print "Compress " + q6_roSection
        import q6zip_compress
        try:
            text_nonpartial_start = elf.getSymbolByName(
                "__swapped_range_text_partial_end__").st_value
            if text_nonpartial_start != 0:
                size_partial = text_nonpartial_start - oldVA
            print "text nonpartial start = " + hex(text_nonpartial_start)
            print "text partial size = " + str(size_partial)
            sh.contents = q6zip_compress.compress(pageSize, newVA, sh.contents,
                                                  size_partial)
        except:
            sh.contents = q6zip_compress.compress(pageSize, newVA, sh.contents)

        # Alignment needed for Q6ZIP RW in the next section
        alignedSize = pageSize * (len(sh.contents) / pageSize + 1)
        for i in xrange(alignedSize - len(sh.contents)):
            sh.contents += '\0'
        elf.resizeSectionByName(q6_roSection, alignedSize)

        print "Size of " + q6_roSection + " after compression " + str(
            sh.sh_size)
        print("Memory Savings Report: Q6Zip RO: Gross Memory Saved: %u\n" %
              (oldSize - sh.sh_size))

        symbolDict["start_va_compressed_text"] = newVA
        symbolDict["end_va_compressed_text"] = sh.sh_addr + sh.sh_size

        # Change permissions (remove execute)
        print "\tRemoving X from " + q6_roSection
        sh.sh_flags = sh.sh_flags & ~const.sectionFlags.SHF_EXECINSTR
        ph = elf.getProgramHeaderBySectionName(q6_roSection)
        if ph != const.RC_ERROR:
            ph.p_flags &= ~const.segmentFlags.PF_X
        else:
            utils.raiseElfManipulatorError(
                "Unexpected error while changing permissions for " +
                q6_roSection)

        if (sh != const.RC_ERROR):
            print "Success compressing " + q6_roSection
        else:
            utils.raiseElfManipulatorError("Failed to compress " +
                                           q6_roSection)
    else:
        # Like ro_fatal, need to check for zero-sized segment
        print "No " + q6_roSection + " section found, checking for zero-sized segment"
        if ((elf.programHeaderTable[-1].p_memsz == 0)
                and (elf.programHeaderTable[-1].p_vaddr == 0)):
            print "Removing zero-sized segment"
            elf.elfHeader.e_phnum -= 1
            elf.programHeaderTable.pop()

    print
    print "Q6ZIP FEATURE (RW)"
    # Get the ELF section containing the code to compress
    sh = elf.getSectionByName(q6_rwSection)
    # Check if the section exists
    if (sh != const.RC_ERROR):
        print "\t'" + q6_rwSection + " section found"
        print "Size of " + q6_rwSection + " is " + str(sh.sh_size)
        # Save the old VA before ANY modifications to the section/segment
        oldSize = sh.sh_size
        bss_common_start = elf.getSymbolByName(
            "__swapped_segments_bss_start__").st_value
        print "\bss_common_start = " + hex(bss_common_start)
        symbolDict["start_va_uncompressed_rw"] = sh.sh_addr
        symbolDict["end_va_uncompressed_rw"] = sh.sh_addr + sh.sh_size
        rw_contents = bss_common_start - sh.sh_addr
        print "\t rw_contents = " + hex(rw_contents)

        # Move up the section
        print "\tMoving up " + q6_rwSection + " to closest section"
        elf.moveupSectionByName(q6_rwSection)
        newVA = sh.sh_addr

        print "\tCompressing " + q6_rwSection + " contents using q6 compressor"
        import rw_py_compress
        sh.contents = rw_py_compress.rw_py_compress(pageSize, newVA,
                                                    sh.contents[0:rw_contents])
        #pad with 0s till pageSize
        rem = len(sh.contents) % pageSize
        if rem != 0:
            sh.contents = sh.contents.ljust(
                len(sh.contents) + pageSize - rem, '0')

        # Resize the section
        elf.resizeSectionByName(q6_rwSection, len(sh.contents))

        symbolDict["start_va_compressed_rw"] = newVA
        symbolDict["end_va_compressed_rw"] = sh.sh_addr + sh.sh_size

        # Change permissions (remove execute)
        print "\tRemoving X from " + q6_rwSection
        sh.sh_flags = sh.sh_flags & ~const.sectionFlags.SHF_EXECINSTR
        ph = elf.getProgramHeaderBySectionName(q6_rwSection)
        if ph != const.RC_ERROR:
            ph.p_flags &= ~const.segmentFlags.PF_X
        else:
            utils.raiseElfManipulatorError(
                "Unexpected error while changing permissions for " +
                q6_rwSection)

        # Print out statistics
        print "Size of " + q6_rwSection + " after compression " + str(
            sh.sh_size)
        print("Memory Savings Report: Q6Zip RW: Gross Memory Saved: %u" %
              (oldSize - sh.sh_size))

        # Moving up sections
        elf.moveupElfOffsetSectionByName(q6_roSection)
        elf.moveupElfOffsetSectionByName(q6_rwSection)
        # moving this part to pplkcmd.py for prototyping.
        #elf.moveupElfOffsetSectionByName(dynrec_section)
        #elf.moveupElfOffsetSectionByName("QSR_STRING")
    else:
        # Like ro_fatal, need to check for zero-sized segment
        print "\tNo " + q6_rwSection + " section found, checking for zero-sized segment"
        if ((elf.programHeaderTable[-1].p_memsz == 0)
                and (elf.programHeaderTable[-1].p_vaddr == 0)):
            print "\tRemoving zero-sized segment"
            elf.elfHeader.e_phnum -= 1
            elf.programHeaderTable.pop()

    #=============================================
    # END: Q6_ZIP CHANGES
    #=============================================

    # -M option: Remove BSS section for 'overlay_mem_dump' feature
    if options.overlay_mem_dump and overlay_mem_dumpSize >= 0:
        # Need to check that the overlay_memdump does not exceed ro_fatal +
        # candidate_compress_section AFTER manipulations.
        roFatalSh = elf.getSectionByName("ro_fatal")
        roCompressSh = elf.getSectionByName(q6_roSection)
        rwCompressSh = elf.getSectionByName(q6_rwSection)

        roFatalSz = 0
        roCompressSz = 0
        rwCompressSz = 0

        if roFatalSh != const.RC_ERROR:
            roFatalSz = roFatalSh.sh_size

        if roCompressSh != const.RC_ERROR:
            roCompressSz = roCompressSh.sh_size

        if rwCompressSh != const.RC_ERROR:
            rwCompressSz = rwCompressSh.sh_size

        if overlay_mem_dumpSize > (roFatalSz + roCompressSz + rwCompressSz):
            print "FATAL ERROR: memdump exceeds ro_fatal + ro/rw candidate_compress_sections."
            print "ro_fatal size: %s bytes" % str(roFatalSz)
            print "ro candidate_compress_section size: %s bytes" % str(
                roCompressSz)
            print "rw candidate_compress_section size: %s bytes" % str(
                rwCompressSz)
            print "FW memdump overlay size: %s bytes" % str(
                overlay_mem_dumpSize)
            print "Short by: %s bytes" % (
                (roFatalSz + roCompressSz + rwCompressSz) -
                overlay_mem_dumpSize)
            print "Aborting"
            exit(const.RC_ERROR)

    # After all modifications, update image_vend pointing to the end of the
    # last program segment, aligned using integer division
    lastPh = elf.programHeaderTable[-1]
    for ph in elf.programHeaderTable:
        if ph.p_vaddr > lastPh.p_vaddr:
            lastPh = ph
    endAddress = lastPh.p_vaddr + lastPh.p_align * (
        lastPh.p_memsz / lastPh.p_align + 1)
    symbolDict["image_vend"] = endAddress

    #===========================================================================
    # Option:       -s <COPROC_ELF>
    # Description:  Combine silver coprocessor image with primary ELF. No debug
    #               information will be preserved.
    # Requirements:
    #   [1] Coprocessor image only has a single segment
    #   [2] Primary image has a single section segment (COPROC_IMAGE)
    #   [3] Coprocessor segment does not exceed the original size of COPROC_IMAGE
    #   [4] Coprocessor segment contents becomes the contents of COPROC_IMAGE
    #   [5] __coproc_image_start__ must point to the start of the section
    #===========================================================================
    if options.coprocMerge:
        print "Combining coprocessor image with primary image..."
        # Ensure that coprocessor segment does not exceed COPROC_IMAGE size
        coproc_imageSh = elf.getSectionByName("COPROC_IMAGE")
        coproc_imagePh = elf.getProgramHeaderBySectionName("COPROC_IMAGE")
        coproc_align = 256 * (1 << 10)
        print "\tLooking for COPROC_IMAGE section in primary image"
        if coproc_imageSh != const.RC_ERROR and coproc_imagePh != const.RC_ERROR:
            print "\tCOPROC_IMAGE section found in primary image"
            print "\tValidating coprocessor ELF"
            coprocELF = options.coprocMerge
            # Read in the coprocessor ELF and verify [1]
            coproc = elfFileClass.elfFile(coprocELF)
            if len(coproc.programHeaderTable) != 1:
                utils.raiseElfManipulatorError(
                    "Coproc image has more than 1 segment")
            print "\tComparing size of coprocessor image with reserved COPROC_IMAGE"
            if coproc_imagePh.p_filesz >= coproc.programHeaderTable[0].p_filesz:
                # Move up COPROC_IMAGE, if required (both VA/PA and ELF offsets)
                print "\tMoving up COPROC_IMAGE (address and ELF offset)"
                elf.moveupSectionByName("COPROC_IMAGE", align=coproc_align)
                elf.moveupElfOffsetSectionByName("COPROC_IMAGE")

                # Read in the coprocessor image
                coprocImage = utils.getDataFromELF(
                    coprocELF, coproc.programHeaderTable[0].p_offset,
                    coproc.programHeaderTable[0].p_filesz)

                # Replace the contents of COPROC_IMAGE
                print "\tReplacing contents of COPROC_IMAGE and resizing"
                coproc_imageSh.contents = coprocImage
                coprocFinalSize = coproc_imagePh.p_align * (
                    len(coproc_imageSh.contents) / coproc_imagePh.p_align + 1)
                for i in xrange(coprocFinalSize -
                                len(coproc_imageSh.contents)):
                    coproc_imageSh.contents += '\0'
                elf.resizeSectionByName("COPROC_IMAGE",
                                        len(coproc_imageSh.contents))

                print "\tUpdating __coproc_image_start__"
                symbolDict["__coproc_image_start__"] = coproc_imagePh.p_vaddr
                symbolDict["fw_coproc_image_start"] = coproc_imagePh.p_vaddr

                print "\tUpdating MMU entries surrounding the coprocessor"
                symbolDict[
                    "__MMU_region_unmapped_align_padding_start_coproc"] = coproc_imagePh.p_vaddr

                coproc_end = coproc_imagePh.p_vaddr + len(
                    coproc_imageSh.contents)
                coproc_end = (coproc_end + coproc_imagePh.p_align -
                              1) & ~(coproc_imagePh.p_align - 1)

                symbolDict[
                    "__MMU_region_start_name_qsr_A0_1_R_1_W_1_X_0_lock_1"] = coproc_end

                # move this step to pplkcmd.py for CR800980.
                #print "\tMoving up QSR_STRING"
                #elf.moveupElfOffsetSectionByName("QSR_STRING")
            else:
                # If coprocessor segment exists but is larger than reserved size, need to catch this immediately
                utils.raiseElfManipulatorError(
                    "Coprocessor segment > COPROC_IMAGE")
        else:
            print "\tPrimary image does not have COPROC_IMAGE (no-op)"

    # Update all symbols
    if symbolDict:
        print "----------------------------------------------------------------"
        print " Updating symbol values from all ELF modifications..."
        print "----------------------------------------------------------------"
        elf.updateSymbolValuesByDict(symbolDict)

    # If verification is enabled, enable verification on
    if options.verification:
        print "----------------------------------------------------------------"
        print " Verifying modified ELF data..."
        print "----------------------------------------------------------------"
        elf.verify()

    # Write out the ELF file based on the elfFile object
    if not options.debug:
        print "----------------------------------------------------------------"
        print " Writing out modified ELF..."
        print "----------------------------------------------------------------"
    elf.writeOutELF(modifiedELF)

    # Record the starting time
    if options.timing:
        print("Execution time: %.2f seconds" % (time.time() - start_time))

    # elfManipulator ran to completed, exit with return code 0
    exit(const.RC_SUCCESS)
示例#10
0
文件: spm12.py 项目: xgrg/alfa
    parser.add_argument('input', type=str,
            help='NIfTI image to run SPM12 segmentation pipeline on')
    parser.add_argument('--subject', type=str,
            help='subject identifier to name Nipype nodes after')
    parser.add_argument('--destdir', type=str,
            help='destination folder where to write/run the jobs.')
    args = parser.parse_args()
    source = args.input
    basedir = args.destdir
    log.info('Workflow base directory set to: %s'%basedir)

    if not args.subject:
        from brainvisa import axon
        axon.initializeProcesses()
        subject = _get_subject_(ap(source))
    else:
        subject = args.subject

    log.info('Processing file %s (subject: %s)'%(ap(source), subject))
    w = create_workflow([ap(source)], [subject], basedir=basedir)
    res = w.run()

    if not args.subject:
        spm_node_name = [e for e in w.list_node_names() if 'spm12' in e][0]
        log.info('SPM node name: %s'%spm_node_name)
        spm_node = w.get_node(spm_node_name)
        import gzip, pickle
        import os.path as osp
        pp = pickle.load(gzip.open(osp.join(w.base_dir, w.name, spm_node_name,
                'result_%s.pklz'%spm_node_name), 'rb'))
示例#11
0
  def __init__(self, target, source, env, debug=False, keeplog=True):  # {{{
    '''Take everything we need from 'env' and prep it for our subroutines.

    Note: to find env items:
    print [i for i in env._dict.keys() if 'search_string' in i.lower()]
    '''
    self.elfpath = ap(str(source[0]))
    self.outdir = ap(ps(self.elfpath)[0])
    if target is not None:
      self.outpath = ap(str(target[0]))
      if len(target) == 1:
        self.qdbpath = ""
      elif len(target) == 2:
        self.qdbpath = ap(str(target[1]))
    else:
      self.outpath = None
      self.qdbpath = None
    self.buildms = ap(env.subst('${BUILD_MS_ROOT}'))
    self.buildpath = ap(env.subst('${BUILD_ROOT}'))
    self.memreport = (env.subst('${MEMREPORT}') == '1' or
                      ('USES_INTERNAL_BUILD' in env and
                       # last dir of $TARGET_ROOT is 'b'
                       ps(env.subst('${TARGET_ROOT}'))[-1] == 'b'))
    self.buildroot = env.subst('${TARGET_ROOT}')
    self.objcopy = env.subst('${OBJCOPY}')
    self.threads = []
    self.python = env.subst('${PYTHONCMD}')
    self.short_buildpath = env.subst('${SHORT_BUILDPATH}')
    self.cust_config = env.FindConfigFiles('cust_config.xml')[0]
    self.core_root = env.subst('${COREBSP_ROOT}')
    self.debug = debug

    if len(source) > 1 and source[1] is not None:
      self.coproc_path = ap(str(source[1]))

    else:
      self.coproc_path = None

    # BYO OrderedDict implementation: [0]: order list, [1]: dict
    self.savings_report = [[], {}]

    self.mypspath = ap(file_dir)

    if env.subst('${FAKE_ENV}') == '1':
      self.replay = True

    else:
      self.replay = False

    self.executed_manips = []
    self.finalized = False

    if keeplog:
      self.log_handle = open(
        ap(pj(self.buildms, 'pplk-%s.log' % self.short_buildpath)),
        'wb',
      )

    else:
      self.log_handle = tempfile.TemporaryFile(
        prefix='pplk-tmp-%s-' % (self.short_buildpath,),
        suffix='.log',
        dir=self.buildms,
      )

    self.log('pplkcmd log version: %i' % (self.log_version,))
    self.log('pplkcmd __dict__: %s' % (base64.b64encode(repr(self.__dict__)),))
示例#12
0
        if type(v) is not float:
            exp = re.search(r"([A-Z]+_[A-Z]{1,3}) ?(.*)", v).groups()
            add_driver(obj.liquidknot.params[i], 'value', -1, obj, exp[0],
                       exp[1])


def preset_to_lk(data, obj):
    add_params(data["params"], obj)
    obj.liquidknot.de = data["de"]


# List the Flat Shaded primitives
flats = ["cube", "octahedron", "cylinder"]

# Fetch data
with open(join(dirname(ap(__file__)), "obj_presets.json")) as f:
    data = json.loads(f.read())


def shade_s(obj, mode=True):
    if not obj.data.polygons[0] == mode:
        for poly in obj.data.polygons:
            poly.use_smooth = mode


def OperatorFactory(name):
    snake = re.sub(" ", "_", name.lower())
    camel = '_'.join([x.title() for x in re.sub("_", " ", name).split()])
    CAPS_SNAKE = snake.upper()

    def execute(self, context):
示例#13
0
#pylint: disable=too-many-locals,no-self-use
#pylint: disable=too-many-public-methods,too-many-statements
"""
General unit tests.

Run 'python3 -m unittest -v' from the ebml directory.
"""

import unittest
from io import BytesIO
import os
import sys
import random

from os.path import dirname as dn, abspath as ap
sys.path.append(dn(dn(dn(ap(__file__)))))

TEST_DATA_DIR = os.path.dirname(__file__)
TEST_FILE = os.path.join(TEST_DATA_DIR, 'test.mkv')

__all__ = ['EbmlTest', 'UtilityTest', 'HeaderTest', 'TagsTest', 'ParsedTest',
           'FilesTest', 'UNK_ID', 'TEST_FILE_DATA']

import logging
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)

UNK_ID = 0x01223344

with open(TEST_FILE, 'rb') as mkv_file:
    TEST_FILE_DATA = mkv_file.read()
示例#14
0
from os.path import abspath as ap, dirname as dn

from sqlalchemy import Column, ForeignKey, Integer, String, Date, Float, Text, DateTime
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
from sqlalchemy import create_engine


Base = declarative_base()
DB_PATH = '{0}/d2modeling.db'.format(dn(dn(ap(__file__))))
DB_NAME = 'sqlite:///{0}'.format(DB_PATH)
engine = create_engine(DB_NAME)
Base.metadata.bind = engine


class Team(Base):
    __tablename__ = 'team'
    name = Column("name", String(250), primary_key=True)
    elo = Column("elo", Float, default=1200)


class Match(Base):
    __tablename__ = 'match'

    id = Column("id", Integer, primary_key=True)
    dire_score = Column("dire_score", Integer)
    radiant_score = Column("radiant_score", Integer)
    time = Column("time", Float)
    date = Column("date", Date, index=True)
    winner = Column("winner", String(250))
    dire_name = Column("dire_name", String(250), ForeignKey('team.name'), index=True)
示例#15
0
if __name__ == "__main__":
    import sys
    from os.path import abspath as ap, dirname as dn

    sys.path.append(dn(dn(ap(__file__))))

    from kservices.main import app

    app.run(host="0.0.0.0", port=8110, workers=1, debug=True)
示例#16
0
class Config(object):
    SECRET_KEY = '33456@#$456@#$12%^&*('
    ROOT_PATH = dn(dn(dn(ap(__file__))))
示例#17
0
  def gen_memreport(self):  # {{{
    '''Call mpssmem.py and QMAT'''
    t1 = timer()
    base_outdir = ap(pj(self.buildms, 'reports'))

    self.p('\nGenerating MPSS memory reports in %s' % base_outdir)

    memreport = {
      'mpssmem': {
        'paths': [
          'mpssmem.py',
        ],

        'arguments': [
          '-b',
          ap(self.buildroot),
          '-f',
          '-v',
          self.short_buildpath],

        'pyscript': True},

      'qcmat': {
        'paths': [
          pj('reports', 'QMAT', 'rel', 'QMemoryAnalyzer_unix_cmd'),
          pj('reports', 'QMAT', 'rel', 'QMemoryAnalyser_cmd.exe')],

        'arguments': [
          '-b',
          ap(self.buildroot)],

        'pyscript': False}}

    for report_tool, items in memreport.iteritems():
      self.p('  Generating the "%s" memory report' % report_tool)

      command = None

      for path in items['paths']:
        if items['pyscript']:
          potential_command = self.python

        else:
          potential_command = ap(pj(self.mypspath, path))

        if self.test_executable(potential_command):
          command = [potential_command]
          break

      if command:
        outdir = ap(pj(base_outdir, report_tool))

        if not os.path.exists(outdir):
          os.makedirs(outdir)

        if memreport[report_tool]['pyscript']:
          retcode, stdout, stderr = self.call(
            command +
            [ap(pj(self.mypspath, memreport[report_tool]['paths'][0]))] +
            memreport[report_tool]['arguments'] +
            ['-o', outdir])

        else:
          retcode, stdout, stderr = self.call(
            command +
            memreport[report_tool]['arguments'] +
            ['-o', outdir])

        self.log('memreport - %s:' % report_tool)
        self.log('return code: %s' % repr(retcode))
        self.log('stdout:\n%s' % stdout)
        self.log('stderr:\n%s' % stderr)
        self.log()

      else:
        self.p('  Could not execute the "%s" tool' % report_tool)

    t2 = timer()
    t_diff = t2 - t1

    self.p('  Memory report execution time: %s' % t_str(t_diff))