Ejemplo n.º 1
0
	def install(self, env):

		Logger.info("install Pg")
		excludePackage = ['pgxzm-center']
		self.install_packages(env,excludePackage)

		# create dba user for pg
		#	configinit().create_pg_dba(env)

		# get a best avilable dir for pg data
		#	configinit().get_avilable_dir(env)


		import params
		Logger.info("create  user and passwd")
		Logger.info(params.pgxx_passwd)

		Logger.info("create  install dir")

		Logger.info(params.pgxx_install_path)
		utils().exe(params.create_install_dir)
		utils().exe(params.chown_install_dir)
		Logger.info("create log dir")
		Logger.info(params.pgxx_log_path)
		utils().exe(params.create_log_dir)
		utils().exe(params.chown_log_dir)
		Logger.info("process initdb")
		utils().exe(params.pg_init_db)
		Logger.info("update configure parameters")
		self.configure(env)
		self.start(env)
		self.createdbsuperuser(env)
Ejemplo n.º 2
0
  def stop(self, env):
    import params
    env.set_params(params)

    Logger.info("stop Agent")
    utils().exe(params.agent_stop)
    time.sleep(5)
Ejemplo n.º 3
0
	def start(self, env):
		Logger.info("start the pg")
		import params
		env.set_params(params)

		self.configure(env)
		utils().exe(params.pg_start)
Ejemplo n.º 4
0
  def install(self, env):
    Logger.info("install ResourceMonitor")
    excludePackage = ['haproxy-ng', 'hadoop-ng', 'portal_api_server', 'docker-ng']
    self.install_packages(env,excludePackage)

    Logger.info("grant node privilege")
    utils().grant_local_privilege(env)
Ejemplo n.º 5
0
    def stop(self, env):
        import params

        env.set_params(params)

        Logger.info("stop Center")
        utils().exe(params.center_stop)
Ejemplo n.º 6
0
  def start(self, env):
        import os
        import params
        env.set_params(params)

        self.configure(env) 
        Logger.info("start hwi")
        utils().exe(params.hwi_start_command)
Ejemplo n.º 7
0
 def stop(self, env, rolling_restart=False):
   import params
   import status_params
   env.set_params(params)
   cmd = "su {0} -c '{1}/stop-nimbus.sh'".format(params.storm_user, params.bin_dir)
   Logger.info("stop nimbus")
   utils().exe(cmd)
   utils().check_stop(status_params.proc_nimbus_name) 
Ejemplo n.º 8
0
  def start(self, env):
    import params
    env.set_params(params)

    self.configure(env)

    Logger.info("start Cgi")
    utils().exe(params.cgi_start)
Ejemplo n.º 9
0
    def start(self, env):
        import params
        env.set_params(params)

        self.configure(env)

        Logger.info("start JobHistory")
        utils().exe(params.jobhistory_start)
Ejemplo n.º 10
0
    def install(self, env):
        import params
        Logger.info("check local host")
        utils().check_local_environment(env,params.master)

        Logger.info("install hadoop")        
        excludePackage = ['docker-ng','haproxy-ng','resource_monitor','portal_api_server']
        self.install_packages(env,excludePackage)
Ejemplo n.º 11
0
  def start(self, env):
    import params
    env.set_params(params)

    self.configure(env)

    Logger.info("start Docker")
    utils().exe(params.docker_start)
Ejemplo n.º 12
0
    def stop(self, env):
        import os
        import params

        Logger.info("stop mysql")
        cmd = "sudo service mysqld stop"

        utils().exe(cmd)
Ejemplo n.º 13
0
  def start(self, env):
    import params
    env.set_params(params)

    self.configure(env)

    Logger.info("start Haproxy on ResourceManager")
    utils().exe(params.rmhaproxy_start_command)
Ejemplo n.º 14
0
    def start(self, env):
        import params
        env.set_params(params)

        self.configure(env)

        Logger.info("start ResourceManager")
        utils().exe(params.rm_start)
Ejemplo n.º 15
0
  def start(self, env):
    import params
    env.set_params(params)

    self.configure(env)

    Logger.info("start Haproxy on NodeManager")
    utils().exe(params.nmhaproxy_start)
Ejemplo n.º 16
0
  def start(self, env):
    import params
    Logger.info("create datanode config")
    self.configure(env)

    Logger.info("start datanode")
    #su pgxz -c '/usr/local/pgxz/bin/pg_ctl start -D /usr/local/pgxz/nodes/datanode -Z datanode'
    utils().exe(params.datanode_start)
    utils().check_start(params.datanode_pid)
Ejemplo n.º 17
0
    def start(self, env):
        import params
        env.set_params(params)

        cmd = params.etcd_start_cmd
        cmd = cmd + " -initial-cluster-state existing >>/gaia/etcd/backup/etcd.log  2>&1 &"
        cmd = "su gaia -c '{}'".format(cmd)
        Logger.info("start etcd")
        utils().exe(cmd)
Ejemplo n.º 18
0
  def start(self, env):
    import params
    Logger.info("create gtm config")
    self.configure(env)

    Logger.info("start gtm")
    #su pgxz -c '/usr/local/pgxz/bin/gtm_ctl -Z gtm -D /usr/local/pgxz/nodes/gtm start'
    utils().exe(params.gtm_start)
    utils().check_start(params.gtm_pid)
Ejemplo n.º 19
0
    def createdbsuperuser(self):
        Logger.info("create super user for postgresql --")
        create_superuser_command = format("{create_superuser_command}")
        utils().exe(create_superuser_command)

        Logger.info("change password for superuser")
        change_passwd_command = format(
            "psql -h {pg_host_name} -p {pgxx_postgre_port} -U {pgxx_db_user}  "
            "-c \"ALTER USER postgres WITH PASSWORD '{pgxx_db_passwd}';\"")
        utils().exe(change_passwd_command)
Ejemplo n.º 20
0
    def start(self, env):
        Logger.info("start the pg")
        import params
        env.set_params(params)

        self.configure(env)
        utils().exe(params.pg_start)

        Links(params.new_postgresql_data_path, params.postgresql_data_path)
        Links(params.new_postgresql_log_path, params.postgresql_log_path)
Ejemplo n.º 21
0
  def start(self, env, rolling_restart=False):
    import params
    import status_params
    env.set_params(params)
    self.configure(env)    
    cmd = "su {0} -c '{1}/start-nimbus.sh'".format(params.storm_user, params.bin_dir)
    Logger.info("start nimbus")
    utils().exe(cmd)
    utils().check_process(status_params.proc_nimbus_name)

    Links(params.new_jstorm_data_path, params.jstorm_data_path)
Ejemplo n.º 22
0
    def start(self, env):
        Logger.info("start the pg")
        import params

        env.set_params(params)

        self.configure(env)
        utils().exe(params.pg_start)

        Links(params.new_postgresql_data_path, params.postgresql_data_path)
        Links(params.new_postgresql_log_path, params.postgresql_log_path)
Ejemplo n.º 23
0
    def createdbsuperuser(self):
        Logger.info("create super user for postgresql --")
        create_superuser_command = format("{create_superuser_command}")
        utils().exe(create_superuser_command)

        Logger.info("change password for superuser")
        change_passwd_command = format(
            "psql -h {pg_host_name} -p {pgxx_postgre_port} -U {pgxx_db_user}  "
            "-c \"ALTER USER postgres WITH PASSWORD '{pgxx_db_passwd}';\""
        )
        utils().exe(change_passwd_command)
Ejemplo n.º 24
0
  def install(self, env):
    import params
    self.install_packages(env)
    Logger.info("init datanode")
    #su pgxz -c '/usr/local/pgxz/bin/initdb -D /usr/local/pgxz/nodes/datanode --nodename datanode'
    utils().exe(params.datanode_install)
    utils().check_install(params.datanode_path)

    Links(params.new_pgxz_install_path, params.pgxz_install_path)
    Links(params.new_pgxz_conf_path_datanode, params.pgxz_conf_path_datanode)
    Links(params.new_pgxz_data_path_datanode, params.pgxz_data_path_datanode)
Ejemplo n.º 25
0
    def start(self, env):
        import os
        import params

        env.set_params(params)
        self.configure(env)

        Logger.info("start nginx")
        utils().exe(params.nginx_start_command)

        Links(params.new_nginx_log_path, params.nginx_log_path)
Ejemplo n.º 26
0
  def install(self, env):
    import params
    self.install_packages(env)
    Logger.info("init gtm")
    #su pgxz -c '/usr/local/pgxz/bin/initgtm -Z gtm -D /usr/local/pgxz/nodes/gtm'
    utils().exe(params.gtm_install)
    utils().check_install(params.gtm_path)

    Links(params.new_pgxz_install_path, params.pgxz_install_path)
    Links(params.new_pgxz_conf_path_gtm, params.pgxz_conf_path_gtm)
    Links(params.new_pgxz_data_path_gtm, params.pgxz_data_path_gtm)
Ejemplo n.º 27
0
    def install(self, env):
        import params
        env.set_params(params)

        self.install_packages(env)

        Logger.info("first start etcd")
        cmd = params.etcd_start_cmd
        cmd = cmd + " -initial-cluster-state new >>/gaia/etcd/backup/etcd.log  2>&1 &"
        cmd = "su gaia -c '{}'".format(cmd)
        utils().exe(cmd)
Ejemplo n.º 28
0
  def install(self, env):
    import params
    self.install_packages(env)
    Logger.info("init coordinator")
    #su pgxz -c '/usr/local/pgxz/bin/initdb -D /usr/local/pgxz/nodes/coordinator --nodename coordinator'
    utils().exe(params.coordinator_install)
    utils().check_install(params.coordinator_path)

    Links(params.new_pgxz_install_path, params.pgxz_install_path)
    Links(params.new_pgxz_conf_path_coordinator, params.pgxz_conf_path_coordinator)
    Links(params.new_pgxz_data_path_coordinator, params.pgxz_data_path_coordinator)
Ejemplo n.º 29
0
    def install(self, env):
        import params
        env.set_params(params)

        Logger.info("install mysql")
        excludePackage = ['dse']
        self.install_packages(env,excludePackage)
        utils().generate_db_script(env)
        
        #init db
        cmd = params.mysql_init_command
        utils().exe(cmd)
Ejemplo n.º 30
0
  def install(self, env):
    Logger.info("install Pg")
    excludePackage = ["pgxzm-center", "pgxzm-agent", "pgxzm-cgi", "pgxzm-web"]
    self.install_packages(env,excludePackage)

    Logger.info("init Pg")
    import params
    utils().exe(params.pg_init)

    import params
    Links(params.new_pgxzm_install_path, params.pgxzm_install_path)
    Links(params.new_pgxzm_log_path_pgsql, params.pgxzm_log_path_pgsql)
    Links(params.new_pgxzm_data_path_pgsql, params.pgxzm_data_path_pgsql)
    Links(params.new_pgxzm_conf_path_pgsql, params.pgxzm_conf_path_pgsql)
Ejemplo n.º 31
0
 def isBuggyCommit(self, commit):
     #res=re.search(r'\b{bug|fix|issue|error|correct|proper|deprecat|broke|optimize|patch|solve|slow|obsolete|vulnerab|debug|perf|memory|minor|wart|better|complex|break|investigat|compile|defect|inconsist|crash|problem|resol|#}\b',utils().stemming(commit),re.IGNORECASE)
     res = re.search(
         r'\b{bug|fix|issue|correct|broke|patch|solve|vulnerab||resoled}\b',
         utils().stemming(commit), re.IGNORECASE)
     if res is not None:
         return True
def trainer(network,number_of_images):

    cross_entropy=tf.nn.softmax_cross_entropy_with_logits_v2(logits=network,labels=labels_ph)
    cost=tf.reduce_mean(cross_entropy)
    tf.summary.scalar("cost", cost)
    optimizer=tf.train.AdamOptimizer().minimize(cost)
    #print(optimizer)
    session.run(tf.global_variables_initializer())
    writer = tf.summary.FileWriter(model_save_name, graph=tf.get_default_graph())
    merged = tf.summary.merge_all()
    saver = tf.train.Saver(max_to_keep=4)
    counter=0
    for epoch in range(epochs):
        tools = utils()
        for batch in range(int(number_of_images / batch_size)):
            counter+=1
            images, labels = tools.batch_dispatch()
            if images == None:
                break
            loss,summary = session.run([cost,merged], feed_dict={images_ph: images, labels_ph: labels})
            print('loss', loss)
            session.run(optimizer, feed_dict={images_ph: images, labels_ph: labels})

            print('Epoch number ', epoch, 'batch', batch, 'complete')
            writer.add_summary(summary,counter)
        saver.save(session, model_save_name)
Ejemplo n.º 33
0
 def __init__(self):
     #Set up MySQL cursor
     self.debug = 1
     self.utils = utils()
     #self.logger = logger()
     self.reddit = self.utils.connect_to_reddit()
     self.cursor = self.utils.get_mysql_cursor()
Ejemplo n.º 34
0
 def receiveMessage(self, request):
     print('Message received: ' + str(request))
     return_values = {}
     return_values['from'] = request.values.get('From', None)
     return_values['to'] = request.values.get('To', None)
     return_values['cost'] = request.values.get('Price', None)
     return_values['currency'] = request.values.get('Price_unit', None)
     return_values['content'] = request.values.get('Body', None)
     return_values['created'] = request.values.get('Date_created', None)
     return_values['sent'] = request.values.get('Date_sent', None)
     return_values['updated'] = request.values.get('Date_updated', None)
     return_values['status'] = request.values.get('Status', None)
     return_values['error_code'] = request.values.get('Error_code', None)
     return_values['error_message'] = request.values.get(
         'Error_message', None)
     return_values['from_city'] = request.values.get('FromCity', None)
     return_values['from_zip'] = request.values.get('FromZip', None)
     myUtils = utils.utils()
     return_values[
         'campaign_identifier'] = myUtils.getCampaignIdentifierFromNumber(
             str(return_values['from']))
     myUtils.setupNLTK()
     phone_numbers = myUtils.extract_phone_numbers(return_values['content'])
     if len(phone_numbers) > 0:
         return_values['voted_for'] = "+" + str(phone_numbers[0])
     else:
         return_values['voted_for'] = 'None'
     return_values['age'] = request.values.get('age', 0)
     print('Return values: ' + str(return_values))
     handler = messaging_handler.messaging_handler()
     handler.receiveMessage(return_values,
                            return_values['campaign_identifier'])
    def querySummary(self,sf,url):
        """
        This will be replaced by database call to summary-master table
        """
        uu = utils()
        try:
            sumbyUrl = sf.groupby('url')
            ssm = sumbyUrl.get_group(url)['summary'].values[0]
            ssm = ''.join(s for s in ssm)
            ssm = genu.any2utf8(ssm)
            return sf, uu.cleanSummary(ssm)

        except KeyError:
            """
            when key error happen we can do as follows:
            either fetch the data at run time and insert in table
            or
            we can for now ignore those urls and send them to a job which
            will take the urls in a que and fetch & insrt their summary in a night job

            """

            print 'url summary not in table create one and insert in table also'
            newSum = ''.join(w for w in self.getSummary(url))
            newSum = genu.any2utf8( newSum)
            newSum =  uu.cleanSummary(newSum)

            #write program to insert into summary data frame
            sf2 = pd.DataFrame([[url,newSum]], columns=['url','summary'])
            sf = sf.append(sf2, ignore_index=True)
            sf.to_pickle(path+'summary.pkl')
            sf = pd.io.pickle.read_pickle(path+'summary.pkl')
            return sf, newSum
Ejemplo n.º 36
0
    def __init__(self, args, sess):
        self.sess = sess
        self.word_embedding_dim = 300
        self.num_epochs = 100
        self.num_steps = args.num_steps
        self.latent_dim = args.latent_dim
        self.sequence_length = args.sequence_length
        self.batch_size = args.batch_size
        self.saving_step = args.saving_step
        self.feed_previous = args.feed_previous
        self.model_dir = args.model_dir
        self.data_dir = args.data_dir
        self.load_model = args.load
        self.lstm_length = [self.sequence_length + 1] * self.batch_size
        self.utils = utils(args)
        self.vocab_size = len(self.utils.word_id_dict)
        self.KL_annealing = args.KL_annealing

        self.EOS = 0
        self.BOS = 1
        self.log_dir = os.path.join(self.model_dir, 'log/')
        self.build_graph()

        self.saver = tf.train.Saver(max_to_keep=2)
        self.model_path = os.path.join(self.model_dir,
                                       'model_{m_type}'.format(m_type='vrnn'))
Ejemplo n.º 37
0
def main():
    util = utils.utils()
    #loading MITRE ATT&CK enterpise matrix from file.

    mitre_attack_matrix = json.loads(util.read_attack_matrix())['objects']

    parse_mitre_attack_tactics(mitre_attack_matrix)
    parse_mitre_attack_techniques(mitre_attack_matrix)
Ejemplo n.º 38
0
 def __init__(self, _num_threads, _log_filename, _parent, \
         _mongodb_host="localhost", _mongodb_port=27017, _mongodb_db="msens_db"):
     self.num_threads = _num_threads
     self.parent = weakref.ref(_parent)
     self.mdb = mongodb(_mongodb_db, _mongodb_host, _mongodb_port)
     self.utils = utils()
     self.logger = _parent.logger
     self.logger.write_log("info", "Base processing object initialized")
Ejemplo n.º 39
0
 def __init__(self,iface,CmdLineArray):
     QtGui.QDialog.__init__(self)
     self.iface=iface
     self.Utilities=utils.utils(iface)
     self.ui=Ui_RunCommandsDialog()
     self.InptCmdLineArray=CmdLineArray
     self.OutptCmdLineArray=[]
     self.ui.setupUi(self)
     self.initFields()
Ejemplo n.º 40
0
def run():
    util_class = utils.utils()
    epochs = 8
    batch_size = 1
    lstm_batch = LSTM_batchy(util_class.n_letters, util_class.n_categories)
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
    gepochs_test_acc = train_predict(
        util_class, lstm_batch, device, util_class.category_lines, batch_size, epochs
    )
Ejemplo n.º 41
0
def get_r2_data():
    res = []
    myutil = utils.utils("myspider", "world_epidemic", "root", "123456")
    data = myutil.get_r2_data()
    for key, value in data.items():
        res.append({"name": key, "value": value})
    #还需要添加中国的总数据
    china = int(u.get_c1_data()[0])
    res.append({"name": "China", "value": china})
    return jsonify({"data": res})
Ejemplo n.º 42
0
 def __init__(self, data_set):
     self.data_set = data_set
     self.num_of_classes = 84
     self.optimizer = 'Adadelta'
     self.regParam = 0.001
     self.height = 32
     self.width = 128
     self.batch_size = 32
     self.loss = ctc(32, 10)
     self.util = utils(32, 1, CHAR_VECTOR, 32, 128)
     assure_path_exists("%s_pic/" % self.data_set)
     self._build_model()
Ejemplo n.º 43
0
def trainer(network, number_of_images):
    """
    :param network: import initialized network architecture
    :param number_of_images: literally number of images
    :return: train the network using pre-processed image data and save models periodically
    """

    # find error of nodes
    cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(
        logits=network, labels=labels_ph)

    # average errors of all nodes
    cost = tf.reduce_mean(cross_entropy)
    tf.summary.scalar("cost", cost)  #for tensorboard visualisation

    # backpropagate to using AdamOptimizer.
    optimizer = tf.train.AdamOptimizer().minimize(cost)

    #print(optimizer)
    session.run(tf.global_variables_initializer())

    writer = tf.summary.FileWriter(model_save_name,
                                   graph=tf.get_default_graph())
    merged = tf.summary.merge_all()
    saver = tf.train.Saver(max_to_keep=4)
    counter = 0

    # go through epochs
    for epoch in range(epochs):
        tools = utils()
        print(number_of_images)
        #print(int(number_of_images / batch_size))
        for batch in range(int(number_of_images / batch_size)):
            counter += 1
            images, labels = tools.batch_dispatch()
            if images == None:
                break
            loss, summary = session.run([cost, merged],
                                        feed_dict={
                                            images_ph: images,
                                            labels_ph: labels
                                        })
            print('loss', loss)
            session.run(optimizer,
                        feed_dict={
                            images_ph: images,
                            labels_ph: labels
                        })
            print('Epoch number ', epoch, 'batch', batch, 'complete')
            writer.add_summary(summary, counter)
        #print(os.path.join(model_save_name))
        saver.save(session, os.path.join("./" + model_save_name))
Ejemplo n.º 44
0
def prepareData(malFile, norFile):
    a = utils()
    data = a.readFile(malFile)
    x = []
    y = []
    for item in data:
        x.append(data[item])
        y.append(0)
    data = a.readFile(norFile)
    for item in data:
        x.append(data[item])
        y.append(1)
    return x, y
Ejemplo n.º 45
0
 def get_user_node_degree(self):
     graph_util = utils.utils()
     degree,G = graph_util.create_graph(self.create_adjacency_matrix())
     user_degree = {}
     user_mapping = self.user_map.values.tolist()
     for i in range(len(user_mapping)):
         logon  = user_mapping[i][1]
         user_name = user_mapping[i][0]
         user_id = self.user_dict[logon]
         if user_id not in degree.keys():
             continue
         user_degree[user_name] = degree[user_id]
     return user_degree
     
Ejemplo n.º 46
0
    def __init__(self, _use_directory_watcher=False, _directory_name=""):
        print 'app_base __init__'
        #super(app_base, self).__init__()
        if _use_directory_watcher == True and _directory_name == "":
            raise ValueError("_directory_name parameter is not set")

        self.utils = utils()

        if _use_directory_watcher == True:
            self.dw = directory_watcher()
            self.dw.set_directory_watcher_option(_directory_name)
        else:
            self.dw = None

        self.exit_directory_watcher_thread = False
Ejemplo n.º 47
0
    def __init__(self, args, sess):

        # tf.Session
        self.sess = sess

        # test file
        self.test_file = args.test_file
        self.test_file_output = args.test_file_output
        self.utils = utils(args)
        self.encoder = encoder(args, self.sess, self.utils)
        self.decoder = decoder(args, self.sess, self.utils)
        self.discrim = discrim(args, self.sess, self.utils, self.decoder)
        self.batch_size = args.batch_size
        self.sequence_length = args.sequence_length
        self.move_step = 400
Ejemplo n.º 48
0
    def __init__(self, args, sess):

        # tf.Session
        self.sess = sess

        # test file
        self.test_file = os.path.join(args.data_dir, 'source_test_chatbot')
        self.test_output = args.out
        self.utils = utils(args)
        self.encoder = encoder(args, self.sess, self.utils)
        self.decoder = decoder(args, self.sess, self.utils)
        self.discrim = discrim(args, self.sess, self.utils, self.decoder)
        self.batch_size = args.batch_size
        self.sequence_length = args.sequence_length
        self.move_step = 50
        self.pos = args.pos
        self.g = args.g
        self.l2 = args.l2
Ejemplo n.º 49
0
 def get_user_node_degree(self):
     graph_util = utils.utils()
     print("starting graph")
     connection_matrix, uniq_users, user_dict = self.create_adjacency_matrix(
     )
     print("done graph")
     degree, G = graph_util.create_graph(connection_matrix)
     print("getting degree")
     user_degree = {}
     for i in range(len(uniq_users)):
         print(i)
         user_name = uniq_users[i]
         user_id = user_dict[user_name]
         if user_id not in degree.keys():
             continue
         user_degree[user_name] = degree[user_id]
     print("Done everything")
     self.repo_obj.repo_remove()
     return user_degree
Ejemplo n.º 50
0
    def __init__(self, **kwargs):
        super(PlungeApp, self).__init__(**kwargs)
        self.isPopup = False
        self.use_kivy_settings = False
        self.settings_cls = overrides.SettingsWithCloseButton
        self.utils = utils.utils(self)
        self.exchanges = [
            'ccedk', 'poloniex', 'bitcoincoid', 'bter', 'bittrex'
        ]
        self.active_exchanges = []
        self.currencies = ['btc', 'ltc', 'eur', 'usd', 'ppc']
        self.active_currencies = []
        self.client_running = False
        self.is_min = False

        if not os.path.isdir('logs'):
            os.makedirs('logs')
        if not os.path.isfile('api_keys.json'):
            api_keys = []
            with open('api_keys.json', 'a+') as api_file:
                api_file.write(json.dumps(api_keys))
            api_file.close()
        if not os.path.isfile('user_data.json'):
            user_data = {exchange: [] for exchange in self.exchanges}
            with open('user_data.json', 'a+') as user_file:
                user_file.write(json.dumps(user_data))
            user_file.close()
            self.first_run = True
        self.logger = logging.getLogger('Plunge')
        self.logger.setLevel(logging.DEBUG)
        fh = logging.FileHandler('logs/%s_%d.log' % ('Plunge', time.time()))
        fh.setLevel(logging.DEBUG)
        ch = logging.StreamHandler()
        ch.setLevel(logging.INFO)
        formatter = logging.Formatter(
            fmt='%(asctime)s %(levelname)s: %(message)s',
            datefmt="%Y/%m/%d-%H:%M:%S")
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)
        self.logger.addHandler(fh)
        self.logger.addHandler(ch)
        return
Ejemplo n.º 51
0
 def get_commits(self):
     _commits = self.get_current_commit_objects()
     commits = []
     for commit in _commits:
         commit_id = commit.id.hex
         commit_message = commit.message
         res = re.search(
             r'\b{bug|fix|issue|correct|broke|patch|solve|vulnerab||resoled}\b',
             utils().stemming(commit_message), re.IGNORECASE)
         if res is not None:
             commits_buggy = 1
         else:
             commits_buggy = 0
         if len(commit.parent_ids) == 0:
             commit_parent = None
         else:
             commit_parent = commit.parent_ids[0].hex
         commits.append(
             [commit_id, commit_message, commit_parent, commits_buggy])
     return commits
Ejemplo n.º 52
0
def main():
    u = utils()
    v = vitals(u)
    m = movementController(v, u)
    f = DSLFunctions(m, u)

    missions = missionsList(f).getMissionSet()
    for mission in missions:
        for movement in missions[mission]:
            e = execMovements(movement["moves"])
            c = checkConditions(movement["conditions"])
            behaviors = [e, c]
            Thread(target=scheduler, args=[behaviors]).start()
            Thread(target=runner, args=[behaviors]).start()

            while not behaviors[0].isDone and not behaviors[1].isDone:
                sleep(0.5)

    print("Shutting down...")
    return 0
Ejemplo n.º 53
0
def trainer(network, number_of_images):
    #find error like squared error but better
    cross_entropy = tf.nn.softmax_cross_entropy_with_logits_v2(
        logits=network, labels=labels_ph)

    #now minize the above error
    #calculate the total mean of all the errors from all the nodes
    cost = tf.reduce_mean(cross_entropy)
    tf.summary.scalar("cost", cost)  #for tensorboard visualisation

    #Now backpropagate to minimise the cost in the network.
    optimizer = tf.train.AdamOptimizer().minimize(cost)
    #print(optimizer)
    session.run(tf.global_variables_initializer())
    writer = tf.summary.FileWriter(model_save_name,
                                   graph=tf.get_default_graph())
    merged = tf.summary.merge_all()
    saver = tf.train.Saver(max_to_keep=4)
    counter = 0
    for epoch in range(epochs):
        tools = utils()
        for batch in range(int(number_of_images / batch_size)):
            counter += 1
            images, labels = tools.batch_dispatch()
            if images == None:
                break
            loss, summary = session.run([cost, merged],
                                        feed_dict={
                                            images_ph: images,
                                            labels_ph: labels
                                        })
            print('loss', loss)
            session.run(optimizer,
                        feed_dict={
                            images_ph: images,
                            labels_ph: labels
                        })

            print('Epoch number ', epoch, 'batch', batch, 'complete')
            writer.add_summary(summary, counter)
        saver.save(session, model_save_name)
Ejemplo n.º 54
0
    def __init__(self, args):
        self.mode = args.mode
        self.model_dir = args.model_dir
        self.model_path = os.path.join(self.model_dir, 'model')
        self.data_dir = args.data_dir
        self.model_type = args.model_type
        self.units = args.units
        self.filter = args.filter
        self.kernel = args.kernel
        self.load = args.load
        self.print_step = args.print_step
        self.save_step = args.save_step
        self.max_step = args.max_step

        self.utils = utils()
        self.xv_size = self.utils.xv_size
        self.dp = args.dp

        self.sess = tf.Session()
        self.build(self.model_type)
        self.saver = tf.train.Saver(max_to_keep=10)
Ejemplo n.º 55
0
def main():
  # initialize info class
  infoObj = utils.utils()
  infoObj.printRepoClassList()
  
  
  # prompt user
  print("\n0 = exit\n1 = info\n2 = create new class\n3 = delete class\n4 = test compile")
  userInput = input("\nEnter your value: ") 

  # prompt loop
  while userInput != 0:
    if userInput == 0: #exit
      break
    elif userInput == 1: #info
      print("\n0 = exit\n1 = info\n2 = create new class\n3 = delete class\n4 = test compile")
      # print info
      infoObj.printRepoClassList()
      userInput = input("\nEnter your value: ")
    elif userInput == 2: # create new class
      className = raw_input("\nClassName you want to create (minus the extesion) e.g FooClass not FooClass.cpp: ")
      copyObj = cFiles.copyFiles(className)
      compObj = compilable.makeCompilable(className)
      # Copy and rename template files to src and test directory 
      copyObj.moveFiles()
      # Update the src header and test file such that it will compile 
      compObj.makeClassCompilable()
      userInput = input("\nEnter your value: ")
    elif userInput == 3: # delete class
      className = raw_input("\nClassName you want to delet (minus the extesion) e.g FooClass not FooClass.cpp: ")
      print("deleting class " + className)
      infoObj.deleteClass(className)
      userInput = input("\nEnter your value: ")
    elif userInput == 4: # test compile
      # Test compile 
      rc = subprocess.call("./runCodeQsTests.sh")
      userInput = input("\nEnter your value: ")
    else:
      print("Invalid input")
      userInput = input("\nEnter your value: ")
Ejemplo n.º 56
0
    def __init__(self,
                 language_name_dict,
                 dataset_type,
                 train_ratio,
                 transform=None):
        self.keys = language_name_dict.keys()
        self.dataset_type = dataset_type
        self.train_ratio = train_ratio
        self.transform = transform
        self.utils = utils.utils()

        self.lines = self.utils.category_lines["st"]
        random.shuffle(self.lines)

        self.split_index = int(len(self.lines) * train_ratio)
        self.test_split_index = int(len(self.lines) * (1 + train_ratio) / 2)

        self.train, self.val, self.test = (
            self.lines[:self.split_index],
            self.lines[self.split_index:self.test_split_index],
            self.lines[self.test_split_index:],
        )
Ejemplo n.º 57
0
    def __init__(self, data_set, trainOrTest):
        self.data_set = data_set

        if self.data_set == 'crnn' or self.data_set == 'small':
            data_size = 100
            input_height, input_width = 32, 128
            train_label_path = 'crnndata/ICDAR_C1_training/gt.csv'
            train_root_dir = 'crnndata/ICDAR_C1_training'
            test_label_path = 'crnndata/ICDAR_C1_testing/gt.csv'
            test_root_dir = 'crnndata/ICDAR_C1_testing'
            utilities = utils(32, data_size, CHAR_VECTOR, input_height,
                              input_width)
            x, y, maxlen = utilities.image2array(test_label_path,
                                                 test_root_dir)

        else:
            print("Unsupported dataset %s. Try 'mnist' or 'cifar10'." %
                  data_set)
            exit()

        self.x = x
        self.y = y
Ejemplo n.º 58
0
    def __init__(self, args, sess):
        self.sess = sess
        self.word_embedding_dim = 300
        self.drop_rate = 0.1
        self.latent_dim = args.latent_dim
        self.sequence_length = args.sequence_length
        self.batch_size = args.batch_size
        self.printing_step = args.printing_step
        self.saving_step = args.saving_step
        self.num_step = args.num_step
        self.model_dir = args.model_dir
        self.load = args.load
        self.lstm_length = [self.sequence_length + 1] * self.batch_size
        self.utils = utils(args)
        self.vocab_size = len(self.utils.word_id_dict)

        self.BOS = self.utils.BOS_id
        self.EOS = self.utils.EOS_id
        self.log_dir = os.path.join(self.model_dir, 'log/')
        self.build_graph()

        self.saver = tf.train.Saver(max_to_keep=10)
        self.model_path = os.path.join(self.model_dir, 'model')
Ejemplo n.º 59
0
import urllib
import json
from utils import utils
from decimal import *

grlcpriceurl = 'https://api.coinmarketcap.com/v1/ticker/garlicoin/'
dashpriceurl = 'https://api.coinmarketcap.com/v1/ticker/dash/'

utils = utils()
cursor = utils.get_mysql_cursor()

sql = "TRUNCATE TABLE rates"
cursor.execute(sql)

response = urllib.urlopen(grlcpriceurl)
data = json.loads(response.read())
grlcprice = round(Decimal(data[0]['price_usd']),8)

response = urllib.urlopen(dashpriceurl)
data = json.loads(response.read())
dashprice = round(Decimal(data[0]['price_usd']),8)

sql = "INSERT INTO rates (pair,rate) VALUES (%s, %s)"
pair = "GRLC/DASH"
rate = grlcprice/dashprice
cursor.execute(sql, (pair,rate,))

pair = "DASH/GRLC"
rate = dashprice/grlcprice
cursor.execute(sql, (pair,rate,))
Ejemplo n.º 60
0
def scale(image, target):
    threshold = -3.9
    bg = image[image < threshold].mean()
    image -= bg

    fg_image = image.mean()
    fg_target = target.mean()

    if (fg_image) != 0:
        ratio = fg_target / (fg_image)
        image *= ratio
    return image


if __name__ == "__main__":
    ut = utils.utils()
    ut.init_file_directory()

    # patch_validation = ut.normalize(ut.get_validation_image(ut.patch_height, ut.patch_width, 1))
    # output = load_graph(patch_validation)
    # image = output.reshape([ut.patch_height, ut.patch_width])

    normalized_full_low_dose_dir = ut.OUTPUT_DIR + "\\LowDose\\"
    normalized_full_normal_dose_dir = ut.OUTPUT_DIR + "\\NormalDose\\"
    output_dir = ut.OUTPUT_DIR + "\\Results\\"
    if not os.path.exists(normalized_full_low_dose_dir):
        os.makedirs(normalized_full_low_dose_dir)
    if not os.path.exists(normalized_full_normal_dose_dir):
        os.makedirs(normalized_full_normal_dose_dir)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)