pca_vector = pca.transform(scaled_vector.reshape(1, -1))

    prediction = clf.predict(pca_vector.reshape(1, -1))
    if prediction == 1:
        prediction = "BIOFILM"
    else:
        prediction = "OTHER"

    return prediction


if __name__ == '__main__':

    # connect to the JVM
    gateway = JavaGateway(gateway_parameters=GatewayParameters(port=25335))

    # get the patient path.
    patient = gateway.entry_point
    load_path = patient.getPathCampi()

    # get the results path
    biofilm_path = patient.getPathBiofilmsi()
    other_path = patient.getPathBiofilmno()

    # load the scaler, the PCA and the classifier
    scaler = joblib.load('scaler.pkl')
    clf = joblib.load('clf.pkl')
    pca = joblib.load('pca.pkl')

    for file in os.listdir(load_path):
Пример #2
0
    def _launch_gateway(self, class_path, popen_kwargs=None):
        """
        launch jvm gateway
        :param popen_kwargs: Dictionary of kwargs to pass to Popen when spawning
            the py4j JVM. This is a developer feature intended for use in
            customizing how pyspark interacts with the py4j JVM (e.g., capturing
            stdout/stderr).
        """

        command = ["java"]
        command.append("-cp")
        command.append(class_path)
        command.append("org.apache.spark.deploy.raydp.AppMasterEntryPoint")

        # Create a temporary directory where the gateway server should write the connection
        # information.
        conn_info_dir = tempfile.mkdtemp()
        try:
            fd, conn_info_file = tempfile.mkstemp(dir=conn_info_dir)
            os.close(fd)
            os.unlink(conn_info_file)

            env = dict(os.environ)
            env["_RAYDP_APPMASTER_CONN_INFO_PATH"] = conn_info_file

            # Launch the Java gateway.
            popen_kwargs = {} if popen_kwargs is None else popen_kwargs
            # We open a pipe to stdin so that the Java gateway can die when the pipe is broken
            popen_kwargs['stdin'] = PIPE
            # We always set the necessary environment variables.
            popen_kwargs['env'] = env

            # Don't send ctrl-c / SIGINT to the Java gateway:
            def preexec_func():
                signal.signal(signal.SIGINT, signal.SIG_IGN)

            popen_kwargs['preexec_fn'] = preexec_func
            proc = Popen(command, **popen_kwargs)

            # Wait for the file to appear, or for the process to exit, whichever happens first.
            while not proc.poll() and not os.path.isfile(conn_info_file):
                time.sleep(0.1)

            if not os.path.isfile(conn_info_file):
                raise Exception(
                    "Java gateway process exited before sending its port number"
                )

            with open(conn_info_file, "rb") as info:
                length = info.read(4)
                if not length:
                    raise EOFError
                gateway_port = struct.unpack("!i", length)[0]

        finally:
            shutil.rmtree(conn_info_dir)

        gateway = JavaGateway(gateway_parameters=GatewayParameters(
            port=gateway_port, auto_convert=True))

        # Store a reference to the Popen object for use by the caller (e.g., in reading stdout/stderr)
        gateway.proc = proc

        return gateway
Пример #3
0
def launch_gateway(conf=None):
    """
    launch jvm gateway
    :param conf: spark configuration passed to spark-submit
    :return:
    """
    if "PYSPARK_GATEWAY_PORT" in os.environ:
        gateway_port = int(os.environ["PYSPARK_GATEWAY_PORT"])
        gateway_secret = os.environ["PYSPARK_GATEWAY_SECRET"]
    else:
        SPARK_HOME = _find_spark_home()
        # Launch the Py4j gateway using Spark's run command so that we pick up the
        # proper classpath and settings from spark-env.sh
        on_windows = platform.system() == "Windows"
        script = "./bin/spark-submit.cmd" if on_windows else "./bin/spark-submit"
        command = [os.path.join(SPARK_HOME, script)]
        if conf:
            for k, v in conf.getAll():
                command += ['--conf', '%s=%s' % (k, v)]
        submit_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
        if os.environ.get("SPARK_TESTING"):
            submit_args = ' '.join(
                ["--conf spark.ui.enabled=false", submit_args])
        command = command + shlex.split(submit_args)

        # Create a temporary directory where the gateway server should write the connection
        # information.
        conn_info_dir = tempfile.mkdtemp()
        try:
            fd, conn_info_file = tempfile.mkstemp(dir=conn_info_dir)
            os.close(fd)
            os.unlink(conn_info_file)

            env = dict(os.environ)
            env["_PYSPARK_DRIVER_CONN_INFO_PATH"] = conn_info_file

            # Launch the Java gateway.
            # We open a pipe to stdin so that the Java gateway can die when the pipe is broken
            if not on_windows:
                # Don't send ctrl-c / SIGINT to the Java gateway:
                def preexec_func():
                    signal.signal(signal.SIGINT, signal.SIG_IGN)

                proc = Popen(command,
                             stdin=PIPE,
                             preexec_fn=preexec_func,
                             env=env)
            else:
                # preexec_fn not supported on Windows
                proc = Popen(command, stdin=PIPE, env=env)

            # Wait for the file to appear, or for the process to exit, whichever happens first.
            while not proc.poll() and not os.path.isfile(conn_info_file):
                time.sleep(0.1)

            if not os.path.isfile(conn_info_file):
                raise Exception(
                    "Java gateway process exited before sending its port number"
                )

            with open(conn_info_file, "rb") as info:
                gateway_port = read_int(info)
                gateway_secret = UTF8Deserializer().loads(info)
        finally:
            shutil.rmtree(conn_info_dir)

        # In Windows, ensure the Java child processes do not linger after Python has exited.
        # In UNIX-based systems, the child process can kill itself on broken pipe (i.e. when
        # the parent process' stdin sends an EOF). In Windows, however, this is not possible
        # because java.lang.Process reads directly from the parent process' stdin, contending
        # with any opportunity to read an EOF from the parent. Note that this is only best
        # effort and will not take effect if the python process is violently terminated.
        if on_windows:
            # In Windows, the child process here is "spark-submit.cmd", not the JVM itself
            # (because the UNIX "exec" command is not available). This means we cannot simply
            # call proc.kill(), which kills only the "spark-submit.cmd" process but not the
            # JVMs. Instead, we use "taskkill" with the tree-kill option "/t" to terminate all
            # child processes in the tree (http://technet.microsoft.com/en-us/library/bb491009.aspx)
            def killChild():
                Popen([
                    "cmd", "/c", "taskkill", "/f", "/t", "/pid",
                    str(proc.pid)
                ])

            atexit.register(killChild)

    # Connect to the gateway
    gateway = JavaGateway(gateway_parameters=GatewayParameters(
        port=gateway_port, auth_token=gateway_secret, auto_convert=True))

    # Import the classes used by PySpark
    java_import(gateway.jvm, "org.apache.spark.SparkConf")
    java_import(gateway.jvm, "org.apache.spark.api.java.*")
    java_import(gateway.jvm, "org.apache.spark.api.python.*")
    java_import(gateway.jvm, "org.apache.spark.ml.python.*")
    java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
    # TODO(davies): move into sql
    java_import(gateway.jvm, "org.apache.spark.sql.*")
    java_import(gateway.jvm, "org.apache.spark.sql.api.python.*")
    java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
    java_import(gateway.jvm, "scala.Tuple2")

    return gateway
Пример #4
0
 def __init__(self, port=25333):
     self.gateway = JavaGateway(gateway_parameters=GatewayParameters(
         port=port))
Пример #5
0
def launch_gateway():
    SPARK_HOME = os.environ["SPARK_HOME"]

    gateway_port = -1
    if "PYSPARK_GATEWAY_PORT" in os.environ:
        gateway_port = int(os.environ["PYSPARK_GATEWAY_PORT"])
    else:
        # Launch the Py4j gateway using Spark's run command so that we pick up the
        # proper classpath and settings from spark-env.sh
        on_windows = platform.system() == "Windows"
        script = "./bin/spark-submit.cmd" if on_windows else "./bin/spark-submit"
        submit_args = os.environ.get("PYSPARK_SUBMIT_ARGS")
        submit_args = submit_args if submit_args is not None else ""
        submit_args = shlex.split(submit_args)
        command = [os.path.join(SPARK_HOME, script), "pyspark-shell"
                   ] + submit_args
        if not on_windows:
            # Don't send ctrl-c / SIGINT to the Java gateway:
            def preexec_func():
                signal.signal(signal.SIGINT, signal.SIG_IGN)

            proc = Popen(command,
                         stdout=PIPE,
                         stdin=PIPE,
                         preexec_fn=preexec_func)
        else:
            # preexec_fn not supported on Windows
            proc = Popen(command, stdout=PIPE, stdin=PIPE)

        try:
            # Determine which ephemeral port the server started on:
            gateway_port = proc.stdout.readline()
            gateway_port = int(gateway_port)
        except ValueError:
            (stdout, _) = proc.communicate()
            exit_code = proc.poll()
            error_msg = "Launching GatewayServer failed"
            error_msg += " with exit code %d! " % exit_code if exit_code else "! "
            error_msg += "(Warning: unexpected output detected.)\n\n"
            error_msg += gateway_port + stdout
            raise Exception(error_msg)

        # Create a thread to echo output from the GatewayServer, which is required
        # for Java log output to show up:
        class EchoOutputThread(Thread):
            def __init__(self, stream):
                Thread.__init__(self)
                self.daemon = True
                self.stream = stream

            def run(self):
                while True:
                    line = self.stream.readline()
                    sys.stderr.write(line)

        EchoOutputThread(proc.stdout).start()

    # Connect to the gateway
    gateway = JavaGateway(GatewayClient(port=gateway_port), auto_convert=False)

    # Import the classes used by PySpark
    java_import(gateway.jvm, "org.apache.spark.SparkConf")
    java_import(gateway.jvm, "org.apache.spark.api.java.*")
    java_import(gateway.jvm, "org.apache.spark.api.python.*")
    java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
    java_import(gateway.jvm, "org.apache.spark.sql.SQLContext")
    java_import(gateway.jvm, "org.apache.spark.sql.hive.HiveContext")
    java_import(gateway.jvm, "org.apache.spark.sql.hive.LocalHiveContext")
    java_import(gateway.jvm, "org.apache.spark.sql.hive.TestHiveContext")
    java_import(gateway.jvm, "scala.Tuple2")

    return gateway
Пример #6
0
    def __init__(self,case_files, dyn_sim_config_file,rl_config_file , server_port_num = 25333, cnts=[2,2,2]):
        
        # change from global to class-level variable to support parallel process
        
        #global gateway
        self.a_gateway = JavaGateway(gateway_parameters=GatewayParameters(port = server_port_num,auto_convert=True))
        #global ipss_app  
        
        self.ipss_app = self.a_gateway.entry_point

        from gym import spaces

        _case_files = transfer2JavaStringAry(self.a_gateway,case_files)
      

        #initialize the power system simulation service

        #  {observation_history_length,observation_space_dim, action_location_num, action_level_num};
        dim_ary= self.ipss_app.initStudyCase(_case_files,dyn_sim_config_file,rl_config_file)
        
        print ('observation_history_length,observation_space_dim, action_location_num, action_level_num = ')
        print (dim_ary[0], dim_ary[1],dim_ary[2], dim_ary[3])

        observation_history_length = dim_ary[0]
        observation_space_dim =  dim_ary[1]

        action_location_num =  dim_ary[2]
        action_level_num = dim_ary[3]
        num = action_level_num ** action_location_num
        self.action_space = spaces.Discrete(num)
        self.cnts = cnts




        #define action and observation spaces
        """
        if(action_location_num == 1):
            self.action_space      = spaces.Discrete(action_level_num) # Discrete, 1-D dimension
        else:
            #print('N-D dimension Discrete Action space is not supported it yet...TODO')
            # the following is based on the latest  gym dev version
            # action_def_vector   = np.ones(action_location_num, dtype=np.int32)*action_level_num

            # for gym version 0.10.4, it is parametrized by passing an array of arrays containing [min, max] for each discrete action space
            # for exmaple,  MultiDiscrete([ [0,4], [0,1], [0,1] ])

            action_def_vector = np.ones((action_location_num,2),dtype=np.int32)
            action_def_vector[:,1] = action_level_num -1
            aa = np.asarray(action_def_vector, dtype=np.int32)

            self.action_space   = spaces.MultiDiscrete(action_def_vector) # Discrete, N-D dimension
        """


        self.observation_space = spaces.Box(-999,999,shape=(observation_history_length * observation_space_dim,)) # Continuous

        self.seed()

        #TOOD get the initial states
        self.state = None

        self.steps_beyond_done = None
        self.restart_simulation = True
Пример #7
0
        else:
            result = json.dumps(
                list(
                    filter(lambda x: not re.match("^__.*", x),
                           list(completionList))))
            self.interpreter.setStatementsFinished(result, False)


host = sys.argv[1]
port = int(sys.argv[2])

if "PY4J_GATEWAY_SECRET" in os.environ:
    from py4j.java_gateway import GatewayParameters

    gateway_secret = os.environ["PY4J_GATEWAY_SECRET"]
    gateway = JavaGateway(gateway_parameters=GatewayParameters(
        address=host, port=port, auth_token=gateway_secret, auto_convert=True))
else:
    gateway = JavaGateway(GatewayClient(address=host, port=port),
                          auto_convert=True)

intp = gateway.entry_point
_zcUserQueryNameSpace = {}

completion = PythonCompletion(intp, _zcUserQueryNameSpace)
_zcUserQueryNameSpace["__zeppelin_completion__"] = completion
_zcUserQueryNameSpace["gateway"] = gateway

from zeppelin_context import PyZeppelinContext

if intp.getZeppelinContext():
    z = __zeppelin__ = PyZeppelinContext(intp.getZeppelinContext(), gateway)
Пример #8
0
# Test script. Tests brightness and contrast commands.
# Created by Toni Sagrista

from py4j.java_gateway import JavaGateway, GatewayParameters

gateway = JavaGateway(gateway_parameters=GatewayParameters(auto_convert=True))
gs = gateway.entry_point

gs.disableInput()
gs.cameraStop()

gs.print("Testing brightness")

gs.setBrightnessLevel(-1.0)
gs.sleep(1)
gs.setBrightnessLevel(-0.5)
gs.sleep(1)
gs.setBrightnessLevel(-0.0)
gs.sleep(1)
gs.setBrightnessLevel(0.5)
gs.sleep(1)
gs.setBrightnessLevel(1.0)
gs.sleep(1)
gs.setBrightnessLevel(0.0)
gs.sleep(1)

gs.print("Testing contrast")

gs.setContrastLevel(0.0)
gs.sleep(1)
gs.setContrastLevel(0.5)
Пример #9
0
    def run(self, parent_data_objs):

        # Run the java py4j entry point
        comp_dir = self._dag_node.comp_root_path()
        self._logger.info("comp_dir: {}".format(comp_dir))

        jar_files = glob.glob(os.path.join(comp_dir, "*.jar"))
        self._logger.info("Java classpath files: {}".format(jar_files))
        component_class = self._dag_node.comp_class()

        java_jars = [self._mlcomp_jar] + jar_files
        class_path = ":".join(java_jars)
        java_gateway = None
        all_ok = False
        monitor_proc = None

        try:
            total_phys_mem_size_mb = ByteConv.from_bytes(
                psutil.virtual_memory().total).mbytes
            jvm_heap_size_option = "-Xmx{}m".format(
                int(math.ceil(total_phys_mem_size_mb)))

            java_opts = [jvm_heap_size_option]
            self._logger.info("JVM options: {}".format(java_opts))

            # Note: the jarpath is set to be the path to the mlcomp jar since the launch_gateway code is checking
            #       for the existence of the jar. The py4j jar is packed inside the mlcomp jar.
            java_port = launch_gateway(port=0,
                                       javaopts=java_opts,
                                       die_on_exit=True,
                                       jarpath=self._mlcomp_jar,
                                       classpath=class_path,
                                       redirect_stdout=sys.stdout,
                                       redirect_stderr=sys.stderr)

            java_gateway = JavaGateway(
                gateway_parameters=GatewayParameters(port=java_port),
                callback_server_parameters=CallbackServerParameters(port=0),
                python_server_entry_point=MLOpsPY4JWrapper())
            python_port = java_gateway.get_callback_server(
            ).get_listening_port()
            self._logger.debug("Python port: {}".format(python_port))

            java_gateway.java_gateway_server.resetCallbackClient(
                java_gateway.java_gateway_server.getCallbackClient().
                getAddress(), python_port)

            mlops_wrapper = MLOpsPY4JWrapper()
            entry_point = java_gateway.jvm.com.parallelm.mlcomp.ComponentEntryPoint(
                component_class)
            component_via_py4j = entry_point.getComponent()
            component_via_py4j.setMLOps(mlops_wrapper)

            # Configure
            m = java_gateway.jvm.java.util.HashMap()
            for key in self._params.keys():
                m[key] = self._params[key]

            component_via_py4j.configure(m)

            # Materialized
            l = java_gateway.jvm.java.util.ArrayList()
            for obj in parent_data_objs:
                l.append(obj)
                self._logger.info("Parent obj: {} type {}".format(
                    obj, type(obj)))
            self._logger.info("Parent objs: {}".format(l))

            if mlops_loaded:
                monitor_proc = ProcessMonitor(mlops, self._ml_engine)
                monitor_proc.start()

            py4j_out_objs = component_via_py4j.materialize(l)

            self._logger.debug(type(py4j_out_objs))
            self._logger.debug(len(py4j_out_objs))

            python_out_objs = []
            for obj in py4j_out_objs:
                self._logger.debug("Obj:")
                self._logger.debug(obj)
                python_out_objs.append(obj)
            self._logger.info(
                "Done running of materialize and getting output objects")
            all_ok = True
        except Py4JJavaError as e:
            self._logger.error("Error in java code: {}".format(e))
            raise MLCompException(str(e))
        except Exception as e:
            self._logger.error("General error: {}".format(e))
            raise MLCompException(str(e))
        finally:
            self._logger.info("In finally block: all_ok {}".format(all_ok))
            if java_gateway:
                java_gateway.close_callback_server()
                java_gateway.shutdown()

            if mlops_loaded and monitor_proc:
                monitor_proc.stop_gracefully()

        return python_out_objs
Пример #10
0
            doors.write("\n")
    doors.close()


if __name__ == "__main__":
    try:
        numero_pruebas = int(sys.argv[1])
        plan_elegido = PLANARRAY[int(sys.argv[2])]
        print u"Total de evaluaciones a realizar por plan: ", sys.argv[1]
        print u'El plan elegido es: ', plan_elegido
    except:
        numero_pruebas = 10
        plan_elegido = PLANARRAY[0]
        print u"El número de pruebas a realizar será el por defecto (10 rondas)"
        print u'El plan elegido es: ', plan_elegido
    gw = JavaGateway()  # New gateway connection
    bridge = gw.entry_point  # The actual NetLogoBridge object
    bridge.openModel(NETLOGOMODEL)

    createPlanFile(plan_elegido)
    posiciones = obtenerPosiciones(PLANFILE)
    # en genes se guardan todas las posibles puertas
    genes = obtenerGenes(posiciones)
    # generarArchivo(genes)

    individuos = generarPoblacion(genes, 100)

    # aca especificamos el numero de generaciones
    for x in range(0, numero_pruebas):
        print "generacion: {0}".format(x)
        for individuo in individuos:
Пример #11
0
def index():
    movie = request.form['movie']
    print movie
    imdbTitle =  movie.replace(" ","%20")
    link = 'http://www.omdbapi.com/?i=&t=' + imdbTitle +'&tomatoes=true'      
    linkRequest = Request(link)
    try:
        
        linkResponse = urlopen(linkRequest)
        j = json.load(linkResponse)    
        print j['Title']    
     
    except :
        pass
        return render_template('index.html',errMsg="Pls Enter Correct Movie")
    try:
        rd = j['Released']
        date_time = datetime.datetime.strptime(rd,"%d %b %Y")
        t1= datetime.datetime.strftime(date_time, "%Y-%m-%d")
        Dates = t1.split()  
        Year= Dates[0][0:4]
        Month = Dates[0][5:7]
        t = Year+Month 
        Dates = t1.split()  
        Day= Dates[0][8:10]
        Month = Dates[0][5:7]
        if Month == "09":
           Month = 9
        elif Month == "08":
          Month = 8
        elif Day == "09":
          Day = 9
        elif Day == "08":
         Day = 8
        Month = int(Month)
        Day = int(Day)
        d = (Month,Day)
        print d
        d1 = (01,9)
        d2 = (01,19)
        d3 = (02,06)
        d4 = (02,16)
        d5 = (05,15)
        d6 = (05,25)
        d7 = (06,24)
        d8 = (07,04)
        d9 = (8,28)
        d10 = (9,07)
        d11 = (10,02)
        d12 =(10,12)
        d13 = (10,21)
        d14= (10,31)
        d15 = (11,01)
        d16 = (11,11)
        d17 = (11,16)
        d18 = (11,26)
        d19 = (12,15)
        d20 = (12,31)
        d21 = (01,01)
     
        if (d1 <= d <= d2) or ( d3 <= d <= d4) or (d5<= d <= d6) or (d7 <= d <= d8) or (d9 <= d <= d10) or (d11<= d<= d12) or (d13<= d<=d14) or (d15<= d<= d16) or (d17 <= d <= d18) or (d19<= d<= d20) :
	        status = "1"
        else:
          if (d == d21)   :
             status = "1"
          else:
             status = "0";
        try:
            wiki_url = 'http://stats.grok.se/json/en/'+t+'/'+movie
            print wiki_url
            wiki_read = json.loads(urlopen(wiki_url).read())
            total_views = sum([count for count in wiki_read['daily_views'].values()])
            total_str =str(total_views)
            print total_str
        except :
            pass
                    
        imdb_genre = j['Genre'].replace(",","-")
        j['Title'] = j['Title'].replace(",","-")
        imdb_Actors = j['Actors'].replace(",","-") 
        imdb_Director = j['Director'].replace(",","-") 
        imdb_Awards = j['Awards'].replace(",","-") 
        imdb_Production = j['Production'].replace(",","-")   
        imdb_votes = j['imdbVotes'].replace(",","") 
        tomatoUserReviews = j['tomatoUserReviews'].replace(",","") 
        tomatoUserRating = j['tomatoUserRating'].replace(",","") 
        tomatoUserMeter = j['tomatoUserMeter'].replace(",","") 
        imdb_rating = j['imdbRating'].replace(",","")
        Rated = j['Rated'].replace(",","")
        Metascore = j['Metascore'].replace(",","")
        print "*********"
        if 'USA' in j['Country']:
            print "comin"
            resultant = j['Title'] +','+ imdb_genre +','+ imdb_Actors +','+imdb_Director +','+imdb_Awards+',' +imdb_Production+','+imdb_rating +',' + imdb_votes + ','+j['Released']+','+ tomatoUserReviews +','+tomatoUserRating+','+tomatoUserMeter                 
            print resultant   
        else:
            return render_template('index.html',errMsg="Pls Enter USA Movie")
    except:
            return render_template('index.html',errMsg="Pls Enter Correct Movie")
    try:
        genre=imdb_genre.strip()             
        query = {"genre":genre}
        colums = {"_id":1}
        doc = genreColl.find_one(query,colums)
        if doc is None:
            genreId=0 
        else:
            genreId = doc['_id']
        print "genre Id " + `genreId`
        
        topActorCount=0
        actors=imdb_Actors
        actorList = actors.split("-")
        for actors in actorList:
            actor = actors.strip()           
            query = {"actor":actor}
            colums = {"_id":1}
            doc = actorsColl.find_one(query,colums)
            if doc is None:
                print actor
            else:
                topActorCount = topActorCount+1
        print "topActorCount "+`topActorCount`
        
        topDirectorCount=0
        imdb_Director = j['Director'].replace(",","-") 
        dires=imdb_Director
        direList = dires.split("-")
        for directors in direList:
            director = directors.strip()           
            query = {"director":director}
            colums = {"_id":1}
            doc = direColl.find_one(query,colums)
            if doc is None:
                print director
            else:
                topDirectorCount = topDirectorCount+1
                
        print("topDirectorCount "+`topDirectorCount`)
        
        
         
        prod=imdb_Production
        prodList = prod.split("/")
        cnt = 0
           
        for prodHouse in prodList:
                cnt = cnt+ 1
                if (cnt == 1):
                    productioHouse = prodHouse.strip()
                    #print productioHouse
                    if "Screen Media" in productioHouse:
                        productioHouse="SMedia"
                    elif "Screen Gems" in productioHouse:
                        productioHouse="SGems"
                    elif "Open Road Films" in productioHouse:
                        productioHouse = "OpRd"
                    elif "Metro-Goldwyn-Mayer" in productioHouse:
                        productioHouse ="MGM"
                    elif  "FilmBuff" in productioHouse:
                        productioHouse = "Buff"
                    elif "Tribeca" in productioHouse:
                        productioHouse = "Tribecca"
                    elif "FilmDistrict" in productioHouse:
                        productioHouse="District"
                    elif "New Films" in productioHouse:
                        productioHouse = "NeFm"
                    elif "LD Entertainment"  in productioHouse:
                        productioHouse = "LDEnte"
                    elif "Vitagraph" in productioHouse:
                        productioHouse ="ViGrp"
                    elif "Area 23a"   in productioHouse:
                        productioHouse = "Area23A"
                    elif "Arc Entertainment"   in productioHouse:
                        productioHouse = "ArcEn"
                    elif "FilmDisctrict" in productioHouse:
                        productioHouse = "District"
                    elif "High Top" in productioHouse:
                        productioHouse="Hightop"
                    elif "Millennium"  in productioHouse:
                        productioHouse="Milennium"
                    elif "Radius-TWC" in productioHouse:
                        productioHouse="Radius"
                    elif "World Wide"in productioHouse:
                        productioHouse="WrlWid"
                    elif "First Independent Pictures" in productioHouse:
                        productioHouse = "FirInde"
                    elif "New Line Cinema" in productioHouse:
                        productioHouse = "Warner"
                    elif "TriStar" in productioHouse:
                        productioHouse = "Sony"
                    elif "Stage 6" in productioHouse:
                        productioHouse = "Sony"
                    elif "Touchstone Pictures" in productioHouse:
                        productioHouse = "Walt"
                    elif "Disneynature"in productioHouse:
                        productioHouse = "Walt"
                    elif "Focus Features"in productioHouse:
                        productioHouse = "Universal"
                    elif "Nickelodeon" in productioHouse:
                        productioHouse = "Paramount"
                    elif "Roadside" in productioHouse:
                        productioHouse = "Lionsgate"
                    elif "Dimension" in productioHouse:
                        productioHouse = "Weinstein"
                        
                    
                    doc = db.command("text", "topProdHouse",search=productioHouse, project={"_id": 1})
                    rs = doc['results']
                    #print rs
                    if rs == []:
                        
                        prodHouseId=0
                    else:
                        print productioHouse
                        prodHouseId=1
            
        print "prodHouseId "+`prodHouseId` 

    except:
            return render_template('index.html',errMsg="Error while transforming the data")
        
    gateway = JavaGateway() 
    predictionModel = gateway.entry_point

    #Genre Production Released Actor Director imdbRating Metascore Wikistats BoxOffice */
    output=predictionModel.boxOfficePre(str(genreId),str(prodHouseId),str(status),str(topActorCount),str(topDirectorCount),imdb_rating,Metascore,total_str)
    print "Hi "+`output`
    boxOfficeValue=""
    if output==0.0:
        boxOfficeValue="181--200000"
    elif output==1.0:
        boxOfficeValue="200000--5000000"
    elif output==2.0:
        boxOfficeValue="5000000--20000000"
    elif output==3.0:
        boxOfficeValue="20000000--40000000"
    elif output==4.0:
        boxOfficeValue="40000000--80000000"
    elif output==5.0:
        boxOfficeValue="80000000--760500000"
    parts = boxOfficeValue.split("--")	
    value1 = parts[0]
    value2 = parts[1]
    mid = (int(value1) + int(value2))/2
    if mid > 100000000:
       mid = 100000000
    return render_template('predict.html',boxOffice=boxOfficeValue,high_chart = mid, title = movie,poster = j['Poster'],plot = j['Plot'])
Пример #12
0
    def __init__(self):
        """Starts a new instance of SystemDSContext, in which the connection to a JVM systemds instance is handled
        Any new instance of this SystemDS Context, would start a separate new JVM.

        Standard out and standard error form the JVM is also handled in this class, filling up Queues,
        that can be read from to get the printed statements from the JVM.
        """

        root = os.environ.get("SYSTEMDS_ROOT")
        if root == None:
            # If there is no systemds install default to use the PIP packaged java files.
            root =  os.path.join(get_module_dir(), "systemds-java")
        
        # nt means its Windows
        cp_separator = ";" if os.name == "nt" else ":"
        lib_cp = os.path.join(root, "target","lib", "*")
        systemds_cp = os.path.join(root,"target","SystemDS.jar")
        classpath = cp_separator.join([lib_cp , systemds_cp])

        command = ["java", "-cp", classpath]

        if os.environ.get("SYSTEMDS_ROOT") != None:
            files = glob(os.path.join(root, "conf", "log4j*.properties"))
            if len(files) > 1:
                print("WARNING: Multiple logging files found selecting: " + files[0])
            if len(files) == 0:
                print("WARNING: No log4j file found at: "
                      + os.path.join(root, "conf")
                      + " therefore using default settings")
            else:
                command.append("-Dlog4j.configuration=file:" + files[0])

        command.append("org.apache.sysds.api.PythonDMLScript")

        # TODO add an argument parser here

        # Find a random port, and hope that no other process
        # steals it while we wait for the JVM to startup
        port = self.__get_open_port()
        command.append(str(port))

        process = Popen(command, stdout=PIPE, stdin=PIPE, stderr=PIPE)
        first_stdout = process.stdout.readline()
        
        if(b"GatewayServer Started" in first_stdout):
            print("Startup success")
        else:
            stderr = process.stderr.readline().decode("utf-8")
            if(len(stderr) > 1):
                raise Exception("Exception in startup of GatewayServer: " + stderr)
            outputs = []
            outputs.append(first_stdout.decode("utf-8"))
            max_tries = 10
            for i in range(max_tries):
                next_line = process.stdout.readline()
                if(b"GatewayServer Started" in next_line):
                    print("WARNING: Stdout corrupted by prints: " + str(outputs))
                    print("Startup success")
                    break
                else:
                    outputs.append(next_line)

                if (i == max_tries-1):
                    raise Exception("Error in startup of systemDS gateway process: \n gateway StdOut: " + str(outputs) + " \n gateway StdErr" + process.stderr.readline().decode("utf-8") )

        # Handle Std out from the subprocess.
        self.__stdout = Queue()
        self.__stderr = Queue()

        Thread(target=self.__enqueue_output, args=(
            process.stdout, self.__stdout), daemon=True).start()
        Thread(target=self.__enqueue_output, args=(
            process.stderr, self.__stderr), daemon=True).start()

        # Py4j connect to the started process.
        gateway_parameters = GatewayParameters(
            port=port, eager_load=True, read_timeout=5)
        self.java_gateway = JavaGateway(
            gateway_parameters=gateway_parameters, java_process=process)
Пример #13
0
import numpy
from collections import Counter as mset
"""""" """""" """""" """
similarity score parameters a,b,c,d
""" """""" """""" """"""
a = float(1 / 11)  # commen Noun
b = float(5 / 11)  # properNoun
c = float(1 / 11)  # verb
d = float(4 / 11)  # hashtag threshold
threshold = 0.3
threshold2 = 0.4
threshold3 = .4
"""""" """""" """
Initializing NER model and files input
""" ""
gateway = JavaGateway()  # coect to the JVM
inputFile = "../Data/mytweet2"
Alltweets = pd.read_csv(inputFile, ",")
# print(len(Alltweets))
Outfilename = "trainingOutputs/clustersIds03_04_04.csv"
output = open(Outfilename, mode='wt', encoding='utf-8')
fieldnames = ['clusterno', 'tweetd']
writer = csv.DictWriter(output,
                        fieldnames=fieldnames,
                        quoting=csv.QUOTE_MINIMAL)
writer.writeheader()
# File path which consists of Abbreviations.
fileName = "../utils/slang.txt"  #
#  File Access mode [Read Mode]
accessMode = "r"
abbrRemov = {}
Пример #14
0
# -*- coding: utf-8 -*-
# @Time    : 2020/3/7 13:30
# @Author  : Deng Wenxing
# @Email   : [email protected]
# @File    : qq_msg.py
# @Software: PyCharm
# @content : qq相关信息

import argparse
import subprocess
from py4j.java_gateway import JavaGateway

if __name__ == '__main__':
    getaway = JavaGateway()
    random = getaway.jvm.java.util.Random()
    print random.nextInt(10)
Пример #15
0
 def __init__(self, bigdl_type, port=25333):
     self.value = JavaGateway(GatewayClient(port=port), auto_convert=True)
Пример #16
0
    def testPythonToJavaToPythonClose(self):
        def play_with_ping(gateway):
            ping = InstrumentedPythonPing()
            pingpong = gateway.jvm.py4j.examples.PingPong()
            total = pingpong.start(ping)
            return total

        def internal_work(assert_memory):
            gateway2 = InstrJavaGateway(
                gateway_parameters=GatewayParameters(port=DEFAULT_PORT + 5),
                callback_server_parameters=CallbackServerParameters(
                    port=DEFAULT_PYTHON_PROXY_PORT + 5))
            sleep()
            play_with_ping(gateway2)
            python_gc()
            sleep()
            gateway2.close(close_callback_server_connections=True,
                           keep_callback_server=True)
            sleep()
            assert_memory()
            gateway2.shutdown()
            sleep()

        with gateway_server_example_app_process():
            gateway = JavaGateway()
            gateway.entry_point.startServer2()

            def perform_memory_tests():
                python_gc()
                gateway.jvm.py4j.instrumented.MetricRegistry.\
                    forceFinalization()
                sleep()
                createdSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
                    getCreatedObjectsKeySet()
                finalizedSet = gateway.jvm.py4j.instrumented.MetricRegistry.\
                    getFinalizedObjectsKeySet()

                # 10 objects: GatewayServer, 4 GatewayConnection,
                # CallbackClient, 4 CallbackConnection
                self.assertEqual(10, len(createdSet))
                # 13 objects: JavaGateway, CallbackSerer, GatewayClient,
                # GatewayProperty, PythonPing, 4 GatewayConnection,
                # 4 CallbackConnection. Notice the symmetry between callback
                # and gateway connections.
                self.assertEqual(13, len(CREATED))
                # 4 gateway connections, 3 callback connections.
                # There is still one callback connection staying around
                # following Java finalization that called back Python.
                self.assertEqual(7, len(finalizedSet))
                # Same amount of connections for the Python side
                self.assertEqual(7, len(FINALIZED))

            internal_work(perform_memory_tests)
            python_gc()
            gateway.jvm.py4j.instrumented.MetricRegistry.forceFinalization()
            sleep()
            gateway.shutdown()
            # 14 objects: JavaGateway, CallbackSerer, GatewayClient,
            # GatewayProperty, PythonPing, 5 GatewayConnection,
            # 4 CallbackConnection. Notice the symmetry
            # One more gateway connection created because we called shutdown
            # after close (which requires a connection to send a shutdown
            # command).
            assert_python_memory(self, 14)
Пример #17
0
# limitations under the License.
#


from py4j.java_gateway import java_import, JavaGateway, GatewayClient
from pyspark.conf import SparkConf
from pyspark.context import SparkContext

# for back compatibility
from pyspark.sql import SQLContext

# start JVM gateway
if "PY4J_GATEWAY_SECRET" in os.environ:
    from py4j.java_gateway import GatewayParameters
    gateway_secret = os.environ["PY4J_GATEWAY_SECRET"]
    gateway = JavaGateway(gateway_parameters=GatewayParameters(address="${JVM_GATEWAY_ADDRESS}",
        port=${JVM_GATEWAY_PORT}, auth_token=gateway_secret, auto_convert=True))
else:
    gateway = JavaGateway(GatewayClient(address="${JVM_GATEWAY_ADDRESS}", port=${JVM_GATEWAY_PORT}), auto_convert=True)

java_import(gateway.jvm, "org.apache.spark.SparkEnv")
java_import(gateway.jvm, "org.apache.spark.SparkConf")
java_import(gateway.jvm, "org.apache.spark.api.java.*")
java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")

intp = gateway.entry_point
jsc = intp.getJavaSparkContext()

java_import(gateway.jvm, "org.apache.spark.sql.*")
java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
java_import(gateway.jvm, "scala.Tuple2")
Пример #18
0
 def setUp(self):
     self.p = start_example_app_process()
     self.gateway = JavaGateway()
Пример #19
0
        obs, rew, done, _ = env.step(action)
        episode_rew += rew
        cnt += 1
    print("Episode reward", episode_rew)
    print('total cnt', cnt)

    return np.array(actions), np.array(observations)


folder_dir = r'C:\Users\huan289\git\RLGC'
#os.chdir(folder_dir)

print(os.getcwd())

java_port = 25333
gateway = JavaGateway(
    gateway_parameters=GatewayParameters(port=java_port, auto_convert=True))

ipss_app = gateway.entry_point

# case_files = ['testData\\Kundur-2area\\kunder_2area_ver30.raw','testData\\Kundur-2area\\kunder_2area.dyr']
# Need to use the following way to define a String array in Python for Py4J

case_files_array = gateway.new_array(gateway.jvm.String, 2)
case_files_array[
    0] = folder_dir + '\\' + 'testData\\IEEE39\\IEEE39bus_multiloads_xfmr4_smallX_v30.raw'
case_files_array[1] = folder_dir + '\\' + 'testData\\IEEE39\\IEEE39bus_3AC.dyr'

dyn_config_file = folder_dir + '\\' + 'testData\\IEEE39\\json\\IEEE39_dyn_config_v0.65.json'
rl_config_file = folder_dir + '\\' + 'testData\\IEEE39\\json\\IEEE39_RL_loadShedding_3motor_3levels_v0.65.json'

sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
Пример #20
0
def Cluster(dir, coef=0.2):
    gateway = JavaGateway()
    Entry = gateway.entry_point
    SimilarityMatrix = GenerateMatrix(dir, coef)
    print(QTCluster(SimilarityMatrix, Entry))
Пример #21
0
            # Формируем цель обучения сети
            q_values[0][action] = target
            # Обучение сети
            self.model.fit(state, q_values, verbose=0)
        if self.freeroll_rate > self.freeroll_min: 
            self.freeroll_rate *= self.freeroll_decay
            
if __name__ == "__main__":
    def get_state(java_state):
        done = java_state.isDone()
        reward = java_state.getReward()
        state = np.array([[java_state.getX(), java_state.getXDot(), java_state.getTheta(), java_state.getThetaDot()]], dtype = np.float64)
        return state, done, reward
    
    # DQN - глубокая Q-нейронная сеть
    java_gateway = JavaGateway()
    env = java_gateway.jvm.ru.dutov.cartpole.env.CartPoleEnv()
    
    observ_space = env.getObservationSpace()
    action_space = env.getActionSpace()
    
    agent = Agent(observ_space, action_space) # Создаем агента
    
    episodes = 500 # Число игровых эпизодов
    
    scores = deque(maxlen = 100)
    # scores - хранит длительность последних 100 игр

    for e in range(episodes + 1):
        # Получаем начальное состояние объекта перед началом каждого эпизода
        state, done, reward = get_state(env.reset())        
Пример #22
0
# Python side of Java-Python bridge
# see info.kwarc.mmt.python.Py4JGateway (an MMT extension that must be added to MMT for the bridge to work) for the counterpart

from py4j.java_gateway import JavaGateway, JavaObject, GatewayParameters, CallbackServerParameters
import py4j

# create the gateway that communicates with the JVM
gwp = GatewayParameters(auto_field=True, auto_convert=True)
cbp = CallbackServerParameters()
gateway = JavaGateway(gateway_parameters=gwp, callback_server_parameters=cbp)

# MMT sets the entry point to be the MMT controller
controller = gateway.entry_point

# jvm yields access to Java namespace
mmt = gateway.jvm.info.kwarc.mmt
api = mmt.api

# everything below here are optional improvements to smoothen the Scala-Python integration
# they also provide examples how to use the bridge

# General remarks for handling from Python Scala features that are not present in the JVM anymore
#  companion object: fields are static methods, call as usual
#  nullary functions: () is mandatory
#  sequence arguments: needs a Seq, use Seq(pythonlist) conversion
#  default arguments: all arguments must be provided
#  implicit conversions: apply explicitly
#  magic functions: some correspondence established below
#  symbolic method names as operators: use .M below, some infix operators can be mapped to Python magic functions (see below)

Пример #23
0
def launch_gateway(conf=None):
    """
    launch jvm gateway
    :param conf: spark configuration passed to spark-submit
    :return:
    """
    if "PYSPARK_GATEWAY_PORT" in os.environ:
        gateway_port = int(os.environ["PYSPARK_GATEWAY_PORT"])
    else:
        SPARK_HOME = _find_spark_home()
        # Launch the Py4j gateway using Spark's run command so that we pick up the
        # proper classpath and settings from spark-env.sh
        on_windows = platform.system() == "Windows"
        script = "./bin/spark-submit.cmd" if on_windows else "./bin/spark-submit"
        command = [os.path.join(SPARK_HOME, script)]
        if conf:
            for k, v in conf.getAll():
                command += ['--conf', '%s=%s' % (k, v)]
        submit_args = os.environ.get("PYSPARK_SUBMIT_ARGS", "pyspark-shell")
        if os.environ.get("SPARK_TESTING"):
            submit_args = ' '.join(
                ["--conf spark.ui.enabled=false", submit_args])
        command = command + shlex.split(submit_args)

        # Start a socket that will be used by PythonGatewayServer to communicate its port to us
        callback_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        callback_socket.bind(('127.0.0.1', 0))
        callback_socket.listen(1)
        callback_host, callback_port = callback_socket.getsockname()
        env = dict(os.environ)
        env['_PYSPARK_DRIVER_CALLBACK_HOST'] = callback_host
        env['_PYSPARK_DRIVER_CALLBACK_PORT'] = str(callback_port)

        # Launch the Java gateway.
        # We open a pipe to stdin so that the Java gateway can die when the pipe is broken
        if not on_windows:
            # Don't send ctrl-c / SIGINT to the Java gateway:
            def preexec_func():
                signal.signal(signal.SIGINT, signal.SIG_IGN)

            proc = Popen(command, stdin=PIPE, preexec_fn=preexec_func, env=env)
        else:
            # preexec_fn not supported on Windows
            proc = Popen(command, stdin=PIPE, env=env)

        gateway_port = None
        # We use select() here in order to avoid blocking indefinitely if the subprocess dies
        # before connecting
        while gateway_port is None and proc.poll() is None:
            timeout = 1  # (seconds)
            readable, _, _ = select.select([callback_socket], [], [], timeout)
            if callback_socket in readable:
                gateway_connection = callback_socket.accept()[0]
                # Determine which ephemeral port the server started on:
                gateway_port = read_int(gateway_connection.makefile(mode="rb"))
                gateway_connection.close()
                callback_socket.close()
        if gateway_port is None:
            raise Exception(
                "Java gateway process exited before sending the driver its port number"
            )

        # In Windows, ensure the Java child processes do not linger after Python has exited.
        # In UNIX-based systems, the child process can kill itself on broken pipe (i.e. when
        # the parent process' stdin sends an EOF). In Windows, however, this is not possible
        # because java.lang.Process reads directly from the parent process' stdin, contending
        # with any opportunity to read an EOF from the parent. Note that this is only best
        # effort and will not take effect if the python process is violently terminated.
        if on_windows:
            # In Windows, the child process here is "spark-submit.cmd", not the JVM itself
            # (because the UNIX "exec" command is not available). This means we cannot simply
            # call proc.kill(), which kills only the "spark-submit.cmd" process but not the
            # JVMs. Instead, we use "taskkill" with the tree-kill option "/t" to terminate all
            # child processes in the tree (http://technet.microsoft.com/en-us/library/bb491009.aspx)
            def killChild():
                Popen([
                    "cmd", "/c", "taskkill", "/f", "/t", "/pid",
                    str(proc.pid)
                ])

            atexit.register(killChild)

    # Connect to the gateway
    gateway = JavaGateway(GatewayClient(port=gateway_port), auto_convert=True)

    # Import the classes used by PySpark
    java_import(gateway.jvm, "org.apache.spark.SparkConf")
    java_import(gateway.jvm, "org.apache.spark.api.java.*")
    java_import(gateway.jvm, "org.apache.spark.api.python.*")
    java_import(gateway.jvm, "org.apache.spark.ml.python.*")
    java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
    # TODO(davies): move into sql
    java_import(gateway.jvm, "org.apache.spark.sql.*")
    java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
    java_import(gateway.jvm, "scala.Tuple2")

    return gateway
Пример #24
0
from py4j.java_gateway import JavaGateway, GatewayParameters
from py4j.protocol import Py4JNetworkError
from django.conf import settings
import re
from decimal import Decimal

gateway = JavaGateway(gateway_parameters=GatewayParameters(
    address=settings.BPAY_GATEWAY, port=25333, read_timeout=10))


def test_connection():
    try:
        gateway.jvm.System.currentTimeMillis()
    except Py4JNetworkError:
        raise Exception('Cannot connect to gateway server')
    except Exception as e:
        raise e
    finally:
        gateway.close()


def getCRN(number):
    if settings.BPAY_GATEWAY:
        test_connection()
        crn = gateway.entry_point.getCRN()
        return crn.generateBPAYCrnWithMod10V05(str(number))
    return generate_crn(number)


def getICRN(number, amount, option='ICRNAMT'):
    if settings.BPAY_GATEWAY:
Пример #25
0
    else:
        raise ValueError("fmt must be 'png' or 'svg'")

    html = "<div style='width:{width};height:{height}'>{img}<div>"
    #print(html.format(width=width, height=height, img=img_str))
    intp.showHTML(html.format(width=width, height=height, img=img_str))
    img.close()


signal.signal(signal.SIGINT, handler_stop_signals)

_pUserQueryNameSpace = {}
client = GatewayClient(port=int(sys.argv[1]))

#gateway = JavaGateway(client, auto_convert = True)
gateway = JavaGateway(client)

output = Logger()
sys.stdout = output
sys.stderr = output
intp = gateway.entry_point

show = __show__ = PythonContext(intp)
__show__._setup_matplotlib()

intp.onPythonScriptInitialized(os.getpid())

while True:
    req = intp.getStatements()
    if req == None:
        break
Пример #26
0
 def __init__(self):
     self.gateway = JavaGateway()
Пример #27
0
from pyspark.accumulators import Accumulator, AccumulatorParam
from pyspark.broadcast import Broadcast
from pyspark.serializers import MarshalSerializer, PickleSerializer

from time import sleep

# for back compatibility
from pyspark.sql import SparkSession, DataFrame, Row

client = GatewayClient(port=int(sys.argv[1]))
sparkVersion = sys.argv[2]

print("PYTHON:: Starting gateway")
if re.match("^1\.[456]\..*$", sparkVersion) or re.match(
        "^2\..*$", sparkVersion):
    gateway = JavaGateway(client, auto_convert=True)
else:
    gateway = JavaGateway(client)
print("PYTHON:: Gateway started")

java_import(gateway.jvm, "org.apache.spark.SparkEnv")
java_import(gateway.jvm, "org.apache.spark.SparkConf")
java_import(gateway.jvm, "org.apache.spark.api.java.*")
java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")

bridge = gateway.entry_point
state = bridge.state()
state.markReady()

if sparkVersion.startswith("1.2"):
Пример #28
0
from py4j.java_gateway import JavaGateway, CallbackServerParameters


class PythonListener(object):
    def __init__(self, gateway):
        self.gateway = gateway

    def notify(self, obj):
        print("Notified by Java")
        print(obj)
        gateway.jvm.System.out.println("Hello from python!")

        return "A Return Value"

    class Java:
        implements = ["py4j.examples._3_3_callback.ExampleListener"]


if __name__ == "__main__":
    gateway = JavaGateway(
        callback_server_parameters=CallbackServerParameters())
    listener = PythonListener(gateway)
    gateway.entry_point.registerListener(listener)
    gateway.entry_point.notifyAllListeners()
    gateway.shutdown()

#in the cmd window ,the python output is as follows:
# Notified by Java
# <ListenerApplication> instance
Пример #29
0
from py4j.java_gateway import JavaGateway
from py4jfml.knowledgebasevariable import AggregatedFuzzyVariableType as afvt
from py4jfml.knowledgebasevariable import FuzzyVariableType as fvt
from py4jfml.term import TskTermType as tsktt

gateway = JavaGateway()


class TskVariableType:
    '''
    Python class for tskVariableType complex type
    '''
    def __init__(self, name=None):
        '''
        :param name:
        '''
        if name == None:
            self.java_kbv = gateway.entry_point.getJFMLKnowledgebaseVariable_Factory(
            ).createTskVariableType()
        else:
            assert type(name) == str
            self.java_kbv = gateway.entry_point.getJFMLKnowledgebaseVariable_Factory(
            ).createTskVariableType(name)

    # Method of class KnowledgeBaseVariable
    def isInput(self):
        '''
        Tests if the variable is input type
        :return: true if the variable is input type; false otherwise
        '''
        return self.java_kbv.isInput()
Пример #30
0
def get_jvm() -> JVMView:
    import geopyspark
    pysc = geopyspark.get_spark_context()
    gateway = JavaGateway(eager_load=True, gateway_parameters=pysc._gateway.gateway_parameters)
    jvm = gateway.jvm
    return jvm