コード例 #1
0
ファイル: common.py プロジェクト: fkpp/ambari
def __update_sysctl_file_suse():
  """
  Updates /etc/sysctl.conf file with the HAWQ parameters on SUSE.
  """
  # Backup file
  backup_file_name = hawq_constants.sysctl_backup_file.format(str(int(time.time())))
  try:
    # Generate file with kernel parameters needed by hawq to temp file
    File(hawq_constants.hawq_sysctl_tmp_file, content=__convert_sysctl_dict_to_text(), owner=hawq_constants.hawq_user,
        group=hawq_constants.hawq_group)

    sysctl_file_dict = utils.read_file_to_dict(hawq_constants.sysctl_suse_file)
    sysctl_file_dict_original = sysctl_file_dict.copy()
    hawq_sysctl_dict = utils.read_file_to_dict(hawq_constants.hawq_sysctl_tmp_file)

    # Merge common system file with hawq specific file
    sysctl_file_dict.update(hawq_sysctl_dict)

    if sysctl_file_dict_original != sysctl_file_dict:
      # Backup file
      Execute("cp {0} {1}".format(hawq_constants.sysctl_suse_file, backup_file_name), timeout=hawq_constants.default_exec_timeout)
      # Write merged properties to file
      utils.write_dict_to_file(sysctl_file_dict, hawq_constants.sysctl_suse_file)
      # Reload kernel sysctl parameters from /etc/sysctl.conf
      Execute("sysctl -e -p", timeout=hawq_constants.default_exec_timeout)

  except Exception as e:
    Logger.error("Error occurred while updating sysctl.conf file, reverting the contents" + str(e))
    Execute("cp {0} {1}".format(hawq_constants.sysctl_suse_file, hawq_constants.hawq_sysctl_tmp_file))
    Execute("mv {0} {1}".format(backup_file_name, hawq_constants.sysctl_suse_file), timeout=hawq_constants.default_exec_timeout)
    Logger.error("Please execute `sysctl -e -p` on the command line manually to reload the contents of file {0}".format(
      hawq_constants.hawq_sysctl_tmp_file))
    raise Fail("Failed to update sysctl.conf file ")
コード例 #2
0
def __update_sysctl_file_suse():
  """
  Updates /etc/sysctl.conf file with the HAWQ parameters on SUSE.
  """
  # Backup file
  backup_file_name = hawq_constants.sysctl_backup_file.format(str(int(time.time())))
  try:
    # Generate file with kernel parameters needed by hawq to temp file
    File(hawq_constants.hawq_sysctl_tmp_file, content=__convert_sysctl_dict_to_text(), owner=hawq_constants.hawq_user,
        group=hawq_constants.hawq_group)

    sysctl_file_dict = utils.read_file_to_dict(hawq_constants.sysctl_suse_file)
    sysctl_file_dict_original = sysctl_file_dict.copy()
    hawq_sysctl_dict = utils.read_file_to_dict(hawq_constants.hawq_sysctl_tmp_file)

    # Merge common system file with hawq specific file
    sysctl_file_dict.update(hawq_sysctl_dict)

    if sysctl_file_dict_original != sysctl_file_dict:
      # Backup file
      Execute("cp {0} {1}".format(hawq_constants.sysctl_suse_file, backup_file_name), timeout=hawq_constants.default_exec_timeout)
      # Write merged properties to file
      utils.write_dict_to_file(sysctl_file_dict, hawq_constants.sysctl_suse_file)
      # Reload kernel sysctl parameters from /etc/sysctl.conf
      Execute("sysctl -e -p", timeout=hawq_constants.default_exec_timeout)

  except Exception as e:
    Logger.error("Error occurred while updating sysctl.conf file, reverting the contents" + str(e))
    Execute("cp {0} {1}".format(hawq_constants.sysctl_suse_file, hawq_constants.hawq_sysctl_tmp_file))
    Execute("mv {0} {1}".format(backup_file_name, hawq_constants.sysctl_suse_file), timeout=hawq_constants.default_exec_timeout)
    Logger.error("Please execute `sysctl -e -p` on the command line manually to reload the contents of file {0}".format(
      hawq_constants.hawq_sysctl_tmp_file))
    raise Fail("Failed to update sysctl.conf file ")
コード例 #3
0
    def load_state(self, filename=None):
        assert self.loadfile is not None or filename is not None
        if filename is None:
            filename = self.loadfile

        d = read_file_to_dict(filename)

        self.on = bool(d['on'])
        self.mode = yl.PowerMode(int(d['mode']))
        self.h = int(d['h'])
        self.s = int(d['s'])
        self.brightness = int(d['brightness'])
        self.degrees = int(d['degrees'])
        self.r = int(d['r'])
        self.g = int(d['g'])
        self.b = int(d['b'])
        self.group_name = d['group_name']

        self.set_mode(self.mode)
コード例 #4
0
def main():
    logger.basicConfig(level="INFO",
                       filename="F:/tmp/custom.log",
                       format="%(asctime)s %(message)s")

    try:
        print("ok")
        extract_convert_lda_input("F:/tmp/test.txt")
        #extract_fields_by_r1("F:/tmp/full_en3.v.csv",1)
        extract_hashtag_usage("F:/tmp/full_en3.csv")
        #create_ml_r1_file_read_line()
        users_stance = utils.read_file_to_dict("F:/tmp/merged_users.csv", "~")
        #unique_days = extract_daily_involvement_of_prev_calculated_user_stances("F:/tmp/merged_tweets.csv", users_stance)
        #write_dict_to_file("F:/tmp/unique_days.csv",unique_days)
        #filename = "merged_RB_MLMA_out.csv"
        #filename = "F:/tmp/test.txt"
        #users_stances = pandas_users_stances("F:/tmp/full_en3.csv_out.csv")
        #write_dict_to_file("F:/tmp/full_en3_rule_based_out.csv", users_stances)
        #dict = extract_daily_involvement_of_prev_calculated_user_stances(filename, users_stances)
        #write_text_list_to_file(filename+"_out.csv", dict)
        #create_ml_r1_file_read_line()
        # analyze_group_by_influence("F:/tmp/impact.csv")
        # analyze_duplicate_tweets("F:/tmp/full_features.csv")
        # extract_fields_by_r1(globals.INPUT_FILE_NAME_RB, 2)
        # pandas_extract_tweet_text_by_topic_label_random_n_records("F:/tmp/full_features.csv",5000, 1)
        # pandas_extract_tweet_text_by_topic_label_random_n_records("F:/tmp/full_features.csv",5000, 2)
        # extract_neutrals("F:/tmp/full_features.csvl_out.csv")
        # r1_stats()
        # rule_based_user_stance()
        # create_ml_r1_file_read_line()
        # filename = "C:/mongo/bin/mongo_export_latest_best.csv"
        # filename = "F:tmp/test-1-out.txt"
        #filename_user_id_names = "C:/mongo/bin/user_id_name.csv"
        #filename_stance = "F:tmp/ml_stance.txt"
        ## filename = "F:tmp/bots-378k.csv"
        ## filename = "F:tmp/full_fields.csv"
        #filen#ame = "F:/tmp/user_screen_names.csv"
        #filename_test = "F:tmp/ml_test.txt"
        #filename_ml = "F:tmp/pred_data.csv"
        #filename_write_bot = "F:tmp/bot-378k-wuserid"
        #filename_write = "F:/tmp/user_screen_names_out_2.csv"
        #filename_write_ml = "F:tmp/test-1-out.txt"
        #test = "C:/mongo/bin/tt2.csv"
        # dict = extract_daily_average_retweet_likes(filename)
        # users = group_users_by_posts(filename)
        # dict = extract_post_frequency(users)
        # write_text_list_to_file(filename_write, dict)

        # dict_user_names_ids = get_user_names_ids(filename_user_id_names)
        # dict_user_stances = get_user_id_stances(filename_stance)

        # add_stance_to_last_column_for_bots2(filename, filename_write, dict_user_names_ids, dict_user_stances)
        # create_ml_p1_file(filename, filename_ml, filename_write_ml)
        # dict = extract_tweet_text(filename)
        # count_total_topic_labels(filename)
        # extract_desired_field_distinct_user(filename, 1)
        # texts_1,texts_2 = extract_tweet_text_by_topic_label_random_n_records(filename, 1000)
        # dict = extract_daily_polarized_tweets(test)
        # extract_number_of_tweet_ml_labels_topics(filename)
        # update_mongo_with_ml_tweet_labels(filename_ml)
        #users_total_topic_counts = extract_users_total_topic_counts("F:/tmp/test.txt")
        print("ok")
        # users_stances = extract_users_stances(users_total_topic_counts, False)
        # extract_write_tweet_text_by_topic_label(filename, filename_write, "0")
        # logger.info_pro_remain(users_stances)
        # dict = extract_tweet_text_discover_neutrals("C:/mongo/bin/neutral.csv", "C:/mongo/bin/full_features.csv")
        # dict = extract_daily_involvement_of_prev_calculated_user_stances(filename, users_stances)
        # write_text_list_to_file(filename_write,dict)
        # sorted_hashtags = extract_hashtag_usage(filename)
        # write_list_to_file(filename_write, sorted_hashtags)
        # write_dict_to_file(filename_write, dict)
        # filename_write_1 = filename_write + '1'
        # filename_write_2 = filename_write + '2'
        # write_dict_to_file(filename_write_0, texts_0)
        # write_dict_to_file(filename_write_1, texts_1)
        # write_dict_to_file(filename_write_2, texts_2)

    except Exception as ex:
        logger.info(ex)
        logger.info(traceback.format_exc())