Ejemplo n.º 1
0
    def _compile(self, md, ars):
        mcp = md.Compile()
        smsg = mcp.get_name()
        try:
            cflgs = self._cpp_compiler_flags
            if cflgs is not None:
                if utils.is_windows():
                    mcp.set_cpp_compiler_flags("windows", cflgs)
                elif utils.is_linux():
                    mcp.set_cpp_compiler_flags("linux", cflgs)
                elif utils.is_mac():
                    mcp.set_cpp_compiler_flags("mac", cflgs)

            lflgs = self._linker_flags
            if lflgs is not None:
                if utils.is_windows():
                    mcp.set_linker_flags("windows", lflgs)
                elif utils.is_linux():
                    mcp.set_linker_flags("linux", lflgs)
                elif utils.is_mac():
                    mcp.set_linker_flags("mac", lflgs)

            mcp.run()
            smsg += " - OK!"
            ars.append(smsg)
        except Exception as e:
            smsg += " - ERROR: " + utils.exception_to_string(e)
            ars.append(smsg)
            raise e
Ejemplo n.º 2
0
 def _compile(self, md, ars):
     mcp = md.Compile()
     smsg = mcp.get_name()
     try:
         mcp.run()
         smsg += " - OK!"
         ars.append(smsg)
     except Exception as e:
         smsg += " - ERROR: " + utils.exception_to_string(e)
         ars.append(smsg)
         raise e
Ejemplo n.º 3
0
def lambda_handler(event, context):
    """ Entrypoint for lambda
    """

    for record in event["Records"]:
        job = json.loads(record["body"])
        # At this point never want to rerun jobs on failure, but we
        # want to see the error.
        try:
            run_job(job)

        except Exception as e:
            print(utils.exception_to_string(e))

    return {'statusCode': 200, 'body': json.dumps('Hello from Lambda!')}
Ejemplo n.º 4
0
            if not PROXY_PASSWORD == "":
                prx.set_host(PROXY_PASSWORD)
            print "Check installation code..."
            prop = get_url_prop(url, prx)
            if 'error' in prop:
                print "installation code error: " + prop['error']
            else:
                config = {}
                config['url_primary'] = URL
                config['key'] = prop['key']
                config['password'] = obfuscate_password(prop['password'])
                config['enabled'] = True
                config['debug_indentation_max'] = 0
                config['debug_mode'] = True
                config['develop_mode'] = True
                config['proxy_type'] = PROXY_TYPE
                config['proxy_host'] = PROXY_HOST
                config['proxy_port'] = PROXY_PORT
                config['proxy_user'] = PROXY_USER
                config['proxy_password'] = PROXY_PASSWORD
                s = json.dumps(config, sort_keys=True, indent=1)
                f = codecs.open(pthconfig, 'wb')
                f.write(s)
                f.close()
                print "file core/config.json generated."
        except Exception as e:
            print "Connection error: " + utils.exception_to_string(e)

    print ""
    print "END."
Ejemplo n.º 5
0

if __name__ == "__main__":
    # Before running this, do revise following parameters in config.py
    # 1. LEARNING_RATE: the regularization technique to obtain the better accuracy and less over fitting
    # 2. MAX_EPOCH: the maximum number of epoches for the traing. Remember each epoch will output >500MB model output.
    # 3. MODEL_PROCESS_CONTEXT: specify the array of hardware (CPU, GPU) used to traing the model. The training duration will be shorted with GPU. MXNET support mutiple GPUS, the
    # it can put list of your GPU into this araay.
    # Example: python train.py -e

    # Init the argument parser
    ap = argparse.ArgumentParser()
    ap.add_argument("-s",
                    "--start_epoch",
                    type=int,
                    default=0,
                    help="resume the training from this input epoch")
    args = vars(ap.parse_args())

    # Specify the epoch number to start/resume the training from last checkpoint
    start_epoch = args["start_epoch"]

    # Init logger
    logger = utils.get_logger(f"./logs/training_{start_epoch}.log")

    try:
        run_train(logger)
    except Exception as ex:
        error_message = utils.exception_to_string(ex)
        logger.error(error_message)
Ejemplo n.º 6
0
    def _dependency(self, snm, sver, ars):
        spth = self.get_path_tmp() + os.sep + snm
        smsg = snm + " " + sver
        utils.info("BEGIN " + snm)
        try:
            conf = utils.read_json_file(spth + os.sep + snm + ".json")
            bupd = True
            if conf is not None:
                if "version" in conf:
                    if conf["version"] == sver:
                        bupd = False
                    else:
                        utils.info("incorrect version.")
                else:
                    utils.info("version not found.")
            else:
                utils.info("version not found.")
            if bupd:
                sfx = detectinfo.get_native_suffix()
                if sfx is None or "generic" in sfx:
                    utils.info("os not detected.")
                    raise Exception("You have to compile it manually.")
                if self._b32bit:
                    sfx = sfx.replace("64", "32")
                utils.init_path(spth)
                utils.info("download headers and library ...")
                nurl = utils.get_node_url()

                if snm is not "lib_gcc" and snm is not "lib_stdcpp":
                    appnm = "headers_" + snm + ".zip"
                    utils.download_file(nurl + "getAgentFile.dw?name=" + appnm,
                                        spth + os.sep + appnm)
                    utils.unzip_file(spth + os.sep + appnm, spth + os.sep)
                    utils.remove_file(spth + os.sep + appnm)

                appnm = snm + "_" + sfx + ".zip"
                utils.download_file(nurl + "getAgentFile.dw?name=" + appnm,
                                    spth + os.sep + appnm)
                utils.unzip_file(spth + os.sep + appnm, spth + os.sep,
                                 "native/")
                utils.remove_file(spth + os.sep + appnm)
                #FIX Version
                conf = utils.read_json_file(spth + os.sep + snm + ".json")
                if conf is not None:
                    if "version" not in conf:
                        conf["version"] = sver
                        utils.write_json_file(conf,
                                              spth + os.sep + snm + ".json")

            #COPY LIB TO NATIVE
            for f in os.listdir(spth):
                if f.endswith('.dll') or f.endswith('.so') or f.endswith(
                        '.dylib'):
                    shutil.copy2(spth + os.sep + f,
                                 self.get_path_native() + os.sep + f)

            #POST FIX
            self._dependency_post_fix(snm, sver)

            smsg += " - OK!"
            ars.append(smsg)
            utils.info("END " + snm)
        except Exception as e:
            smsg += " - ERROR: " + utils.exception_to_string(e)
            ars.append(smsg)
            raise e