def best_friends(self):
        """
        Conducts best friends analysis.

        Returns
        -------
        None.

        """
        if self.username != "you":
            user_data = self._data_user()
            while self.username in user_data:
                user_data.remove(self.username)
            util.table(user_data, sort_by_likes=True)  #add graphs?
            users = np.unique(user_data, return_counts=True)
            #counts = list(np.unique(user_data, return_counts=True)[1])
            #util.graph_histogram_2(counts, "Number of Comments by a User", length=len(counts))
            bf = [[], []]
            users = (list(map(str, users[0])), list(users[1]))
            for i in range(len(users[0])):
                if users[0][i] in best_friend_list:
                    bf[0].append(users[0][i])
                    bf[1].append(users[1][i])

            return np.unique(user_data,
                             return_counts=True)  #used in message length
            util.table(bf, unique=True)
            #for i in range(len(bf[0])):
            #    print(bf[0][i], bf[1][i])

        else:
            print("Error: This doesn't work with anonymised data!")
Ejemplo n.º 2
0
    def months(self, year):
        """
        Conducts analysis on months.

        Parameters
        ----------
        year : string
            The data to match - can be YYYY or empty for all time.

        """
        time_data = self._data_time(slice(0, len(year)), year, slice(5, 7))
        if len(year) == 4:
            util.graph(time_data,
                       year,
                       "Month",
                       xtick_min=1,
                       xtick_max=13,
                       ylabel=self.ylabel)
        else:
            year = "all time"
            util.graph(time_data,
                       year,
                       "Month",
                       xtick_min=1,
                       xtick_max=13,
                       ylabel=self.ylabel)
        #util.graph_boxplot(time_data,year,"Month")
        util.table(time_data,
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
Ejemplo n.º 3
0
    def day_of_week(self, year_month):
        """
        Conducts analysis by day of week.

        Parameters
        ----------
        year_month : string
            The data to match YYYY or YYYY-MM.

        """
        day_of_week = []
        time_data = self._data_time(slice(0, len(year_month)),
                                    year_month,
                                    slice(0, 10),
                                    return_ints=False)
        for date in time_data:
            day_datetime = datetime.date(int(date[:4]), int(date[5:7]),
                                         int(date[8:10]))
            day_of_week.append(day_datetime.isoweekday())
        util.graph_histogram(day_of_week,
                             year_month,
                             "Day of Week",
                             self.ylabel,
                             xtick_min=1,
                             xtick_max=8)  #,style = "xb")
        util.table(day_of_week,
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
Ejemplo n.º 4
0
    def top_words(self):
        words = analysis_object._data()

        with open("words_to_remove.txt",
                  "r") as f:  #currently top 100 words in English language
            words_to_remove = f.read().split()

        for word in words_to_remove:
            words = list(filter((word).__ne__, words))

        util.table(words, sort_by_likes=True, max_rows=100, sort="asc")
Ejemplo n.º 5
0
    def days_range(self, start_date, finish_date):
        """ 
        Conducts analysis on all days between start_date and finish_date inclusive.

        Parameters
        ----------
        start_date : string
            The date to match - YYYY-MM-DD.
        finish_date : string
            The date to match - YYYY-MM-DD.

        """
        time_data = []
        for year in range(int(start_date[:4]), int(finish_date[:4]) + 1):
            if year == int(start_date[:4]):
                start_month = int(start_date[5:7])
            else:
                start_month = 1
            if year == int(finish_date[:4]):
                finish_month = int(finish_date[5:7])
            else:
                finish_month = 12

            for month in range(start_month, finish_month + 1):
                year_month = str(
                    year) + "-" + "0" * (2 - len(str(month))) + str(month)

                if year_month == start_date[0:7]:
                    start_day = int(start_date[8:10])
                else:
                    start_day = 1

                if year_month == finish_date[0:7]:
                    finish_day = int(finish_date[8:10])
                else:
                    finish_day = 31

                for day in range(start_day, finish_day + 1):
                    date = year_month + "-" + "0" * (2 -
                                                     len(str(day))) + str(day)
                    time_data += self._data_time(slice(0, 10),
                                                 date,
                                                 slice(0, 10),
                                                 return_ints=False)

        util.graph(time_data,
                   start_date + " to " + finish_date,
                   "Day",
                   ylabel=self.ylabel)
        #util.graph_boxplot(time_data,start_date+" to "+finish_date,"Day")
        print("Between", start_date, "and", finish_date, "inclusive:")
        util.table(time_data,
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
    def total_message_length(self):
        user_list = self._user_data_message_length()
        util.table(user_list, unique=True, sort_by_likes=True)

        bf = [[], []]
        #users = (list(map(str, users[0])), list(users[1]))
        for i in range(len(user_list[0])):
            if user_list[0][i] in best_friend_list:
                bf[0].append(user_list[0][i])
                bf[1].append(user_list[1][i])

        util.table(bf, unique=True)
Ejemplo n.º 7
0
    def generate_report(self, standalone):
        """ Generates a report of the last scan. If standalone is True
        it will generate a report to print in a terminal. If it's False
        it will returns the counters of every problem. """

        # collect data
        corrupted = self.count_chunks(CHUNK_CORRUPTED)
        wrong_located = self.count_chunks(CHUNK_WRONG_LOCATED)
        entities_prob = self.count_chunks(CHUNK_TOO_MANY_ENTITIES)
        shared_prob = self.count_chunks(CHUNK_SHARED_OFFSET)
        total_chunks = self.count_chunks()

        too_small_region = self.count_regions(REGION_TOO_SMALL)
        unreadable_region = self.count_regions(REGION_UNREADABLE)
        total_regions = self.count_regions()

        if standalone:
            text = ""

            # Print all this info in a table format
            # chunks
            chunk_errors = ("Problem", "Corrupted", "Wrong l.", "Etities",
                            "Shared o.", "Total chunks")
            chunk_counters = ("Counts", corrupted, wrong_located,
                              entities_prob, shared_prob, total_chunks)
            table_data = []
            for i, j in zip(chunk_errors, chunk_counters):
                table_data.append([i, j])
            text += "\nChunk problems:"
            if corrupted or wrong_located or entities_prob or shared_prob:
                text += table(table_data)
            else:
                text += "\nNo problems found.\n"

            # regions
            text += "\n\nRegion problems:\n"
            region_errors = ("Problem", "Too small", "Unreadable",
                             "Total regions")
            region_counters = ("Counts", too_small_region, unreadable_region,
                               total_regions)
            table_data = []
            # compose the columns for the table
            for i, j in zip(region_errors, region_counters):
                table_data.append([i, j])
            if too_small_region:
                text += table(table_data)
            else:
                text += "No problems found."

            return text
        else:
            return corrupted, wrong_located, entities_prob, shared_prob, total_chunks, too_small_region, unreadable_region, total_regions
Ejemplo n.º 8
0
 def years(self):
     """
     Conducts analysis on years.
     """
     time_data = self._data_time(slice(10, 11), "T", slice(0, 4))
     util.graph(time_data,
                "all time",
                "Year",
                xtick_min=self.min_year,
                xtick_max=self.max_year + 1,
                ylabel=self.ylabel)
     util.table(time_data,
                print_latex=self.print_latex,
                print_lists=self.print_lists)
Ejemplo n.º 9
0
    def generate_report(self, standalone):
        """ Generates a report of the last scan. If standalone is True
        it will generate a report to print in a terminal. If it's False
        it will returns the counters of every problem. """

        # collect data
        corrupted = self.count_chunks(CHUNK_CORRUPTED)
        wrong_located = self.count_chunks(CHUNK_WRONG_LOCATED)
        entities_prob = self.count_chunks(CHUNK_TOO_MANY_ENTITIES)
        shared_prob = self.count_chunks(CHUNK_SHARED_OFFSET)
        total_chunks = self.count_chunks()

        too_small_region = self.count_regions(REGION_TOO_SMALL)
        unreadable_region = self.count_regions(REGION_UNREADABLE)
        total_regions = self.count_regions()
        
        if standalone:
            text = ""
        
            # Print all this info in a table format
            # chunks
            chunk_errors = ("Problem","Corrupted","Wrong l.","Etities","Shared o.", "Total chunks")
            chunk_counters = ("Counts",corrupted, wrong_located, entities_prob, shared_prob, total_chunks)
            table_data = []
            for i, j in zip(chunk_errors, chunk_counters):
                table_data.append([i,j])
            text += "\nChunk problems:"
            if corrupted or wrong_located or entities_prob or shared_prob:
                text += table(table_data)
            else:
                text += "\nNo problems found.\n"

            # regions
            text += "\n\nRegion problems:\n"
            region_errors = ("Problem","Too small","Unreadable","Total regions")
            region_counters = ("Counts", too_small_region,unreadable_region, total_regions)
            table_data = []
            # compose the columns for the table
            for i, j in zip(region_errors, region_counters):
                table_data.append([i,j])
            if too_small_region:
                text += table(table_data)
            else:
                text += "No problems found."
                
            return text
        else:
            return corrupted, wrong_located, entities_prob, shared_prob, total_chunks, too_small_region, unreadable_region, total_regions
Ejemplo n.º 10
0
    def days(self, year_month):
        """
        Conducts analysis on days.

        Parameters
        ----------
        year_month : string
            The data to match - YYYY-MM.

        """
        time_data = self._data_time(slice(0, 7), year_month, slice(8, 10))
        util.graph(time_data, year_month, "Day", ylabel=self.ylabel)
        util.graph_boxplot(time_data, year_month, "Day")
        util.table(time_data,
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
Ejemplo n.º 11
0
    def worst_friends(self):
        """
        Conducts worst friend analysis (basically same as best friend but sorting low to high).

        Returns
        -------
        None.

        """
        if self.username != "you":
            user_data = self._data_user()
            while self.username in user_data:
                user_data.remove(self.username)
            util.table(user_data, sort_by_likes=True, sort="desc")
        else:
            print("Error: This doesn't work with anonymised data!")
Ejemplo n.º 12
0
 def read_conf(cls):
     try:
         return util.table(cls.CONF_FILE, cls.__name__ + 'ConfEntry')
     except IOError as e:
         if e.errno != 2: raise
     except:
         traceback.print_exc()
     return dict()
Ejemplo n.º 13
0
    def breaks(self, date, min_break=datetime.timedelta(days=1)):
        """
        Conducts analysis on breaks I have taken from Instagram.

        Parameters
        ----------
        date : string
            When should the analysis cover - empty string for all time.
        min_break : datetime.timedelta, optional
            What is the minimum break that should be included. The default is datetime.timedelta(days=1).

        """
        time_data = self._data_time(slice(0, len(date)),
                                    date,
                                    slice(0, 19),
                                    return_ints=False)
        time_data.sort(reverse=True)
        time_data = [
            datetime.datetime.strptime(datestamp, "%Y-%m-%dT%H:%M:%S")
            for datestamp in time_data
        ]

        max_break = datetime.timedelta(seconds=0)
        break_start = ""
        breaks = ([], [])

        for i in range(len(time_data) - 1):
            break_length = time_data[i] - time_data[i + 1]
            break_length_hours = break_length.days * 24 + break_length.seconds // 3600
            if break_length > min_break:
                breaks[0].append(time_data[i].strftime("%Y-%m-%d"))
                breaks[1].append(break_length_hours)

        breaks[0].reverse()
        breaks[1].reverse()
        util.graph(breaks,
                   util.date_to_time_period(date),
                   "day",
                   ylabel="hours break",
                   unique=True)
        util.table(breaks,
                   sort_by_likes=True,
                   unique=True,
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
Ejemplo n.º 14
0
    def hours_minutes(self, year_month_day):
        """
        Produces table for Hours:Minutes

        Parameters
        ----------
        year_month_day : string
            The data to match - can be YYYY-MM-DD, YYYY-MM or empty for all time.

        """
        time_data = self._data_time(slice(0, len(year_month_day)),
                                    year_month_day,
                                    slice(11, 16),
                                    return_ints=False)
        #util.graph(time_data,year_month_day,"Hour:Minute")
        util.table(time_data,
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
Ejemplo n.º 15
0
    def data_sources(self):  #this is done in an awful way!
        def get_all_time_data(self, true_index, data_source, settings):
            settings[true_index] = True
            self.change_settings(settings=settings)
            return [data_source] * len(
                self._data_time(slice(10, 11), "T", slice(0, 4)))

        original_settings = self.read_settings()
        data_sources_array = []
        false_settings = [
            False, False, False, False, False, "single", False, False, False,
            False, False, False, False
        ]

        data_sources_array.extend(
            get_all_time_data(self, 0, "media_likes", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 1, "comment_likes", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 2, "comments", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 3, "stories", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 4, "posts - single",
                              false_settings.copy()))
        false_settings[5] = "multiple"
        data_sources_array.extend(
            get_all_time_data(self, 4, "posts - multiple",
                              false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 6, "direct", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 7, "chaining_seen", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 8, "messages", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 9, "message_likes", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 10, "followers", false_settings.copy()))
        data_sources_array.extend(
            get_all_time_data(self, 11, "following", false_settings.copy()))

        self.change_settings(original_settings)
        util.table(data_sources_array, sort_by_likes=True)
Ejemplo n.º 16
0
    def top_days(self, number_of_days=25):
        """
        Prints a table of the dates of the days with the most activity.
        
        Parameters
        ----------
        number_of_days : string, optional
            The number of days to display. The default is 25.

        """
        time_data = self._data_time(slice(10, 11),
                                    "T",
                                    slice(0, 10),
                                    return_ints=False)
        util.table(time_data,
                   max_rows=number_of_days,
                   sort_by_likes=True,
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
Ejemplo n.º 17
0
    def average_message_length(self):
        user_list = self._user_data_message_length()
        #util.table(user_list, unique=True, sort_by_likes=True)

        message_count = self.best_friends()
        message_count = (list(map(str,
                                  message_count[0])), list(message_count[1]))
        for i in range(len(user_list[0])):
            if user_list[0][i] in message_count[0]:
                index = message_count[0].index(user_list[0][i])
                user_list[1][i] /= message_count[1][index]

        util.table(user_list, sort_by_likes=True, unique=True)

        bf = [[], []]
        #users = (list(map(str, users[0])), list(users[1]))
        for i in range(len(user_list[0])):
            if user_list[0][i] in best_friend_list:
                bf[0].append(user_list[0][i])
                bf[1].append(user_list[1][i])

        util.table(bf, unique=True)
Ejemplo n.º 18
0
    def hours(self, year_month_day):
        """
        Conducts analysis on hours.

        Parameters
        ----------
        year_month_day : string
            The data to match - can be YYYY-MM-DD, YYYY-MM, YYYY or empty for all time..

        """
        time_data = self._data_time(slice(0, len(year_month_day)),
                                    year_month_day, slice(11, 13))
        util.graph_histogram(time_data,
                             util.date_to_time_period(year_month_day), "Hour",
                             self.ylabel)
        util.table(time_data,
                   missing_data_items=(24 - len(np.unique(time_data))),
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
        util.graph_boxplot(time_data,
                           util.date_to_time_period(year_month_day),
                           "Hour",
                           xtick_max=24)
Ejemplo n.º 19
0
def human(aip):
    """Returns a string that represent the human readable version of the ."""

    title = "Application Interchange Profile (tag: 0x82)"
    data = [[i] for i in util.unroll(aip)]
    col_header = ["Value"]
    row_header = [
        "RFU", "SDA supported", "DDA supported",
        "Cardholder verification is supported",
        "Terminal risk management is to be performed",
        "Issuer authentication is supported", "RFU", "CDA supported",
        "Reserved for us by the EMV Contactless Specifications"
    ] + ["RFU"] * 7

    return util.table(title, data, col_header, row_header)
Ejemplo n.º 20
0
def human(aip):
    """Returns a string that represent the human readable version of the ."""

    title = "Application Interchange Profile (tag: 0x82)"
    data =[[i] for i in util.unroll(aip)]
    col_header = ["Value"]
    row_header = [
            "RFU",
            "SDA supported",
            "DDA supported",
            "Cardholder verification is supported",
            "Terminal risk management is to be performed",
            "Issuer authentication is supported",
            "RFU",
            "CDA supported",
            "Reserved for us by the EMV Contactless Specifications"
            ] + ["RFU"] * 7

    return util.table(title, data, col_header, row_header)
Ejemplo n.º 21
0
def human(cvm):
    """Returns a string that represent the human readable version of the
    CVM list."""

    title = "Cardholder Verification Method list (tag:0x8E)"

    # for a valid CVM list, the first 8 bytes are reserved for the amounts.
    amounts = cvm[:16]

    # card verification methods.
    cvms = cvm[16:]

    # each CVM is 4 nibbles long.
    nbr_cvms = len(cvms) / 4

    # order of appearance in the CVM list.
    orders = range(1, nbr_cvms + 1)

    # cvm values.
    values = [cvms[i : i + 4] for i in range(0, len(cvms), 4)]

    # list of cvm methods.
    method_list = [method(i) for i in values]

    # list of failure behavior.
    failure_list = [fail(i) for i in values]

    # conditions.
    condition_list = [condition(i) for i in values]

    tmp = [values, method_list, failure_list, condition_list]

    data = map(list, zip(*tmp))

    col_header = ["Value", "Method", "Fail", "condition"]
    row_header = orders

    return (
        util.table(title, data, col_header, row_header)
        + "\n"
        + "Amount X:{0}\nAmount Y:{1}".format(int(amounts[:4], 16), int(amounts[4:], 16))
    )
Ejemplo n.º 22
0
def human(cvm):
    """Returns a string that represent the human readable version of the
    CVM list."""

    title = "Cardholder Verification Method list (tag:0x8E)"

    # for a valid CVM list, the first 8 bytes are reserved for the amounts.
    amounts = cvm[:16]

    # card verification methods.
    cvms = cvm[16:]

    # each CVM is 4 nibbles long.
    nbr_cvms = len(cvms) / 4

    # order of appearance in the CVM list.
    orders = range(1, nbr_cvms + 1)

    # cvm values.
    values = [cvms[i:i + 4] for i in range(0, len(cvms), 4)]

    # list of cvm methods.
    method_list = [method(i) for i in values]

    # list of failure behavior.
    failure_list = [fail(i) for i in values]

    # conditions.
    condition_list = [condition(i) for i in values]

    tmp = [values, method_list, failure_list, condition_list]

    data = map(list, zip(*tmp))

    col_header = ["Value", "Method", "Fail", "condition"]
    row_header = orders

    return util.table(title, data, col_header, row_header) +"\n" +\
    "Amount X:{0}\nAmount Y:{1}".format(int(amounts[:4],16),int(amounts[4:],16))
Ejemplo n.º 23
0
def human(decline, online, default):
    """ Returns the human readable string for the given xIAC. """

    decline_col = util.unroll(decline)
    online_col = util.unroll(online)
    default_col = util.unroll(default)

    bit_per_bytes = 8
    nbr_bytes = len(default_col) / bit_per_bytes

    byte_col = bytelist(nbr_bytes)
    bit_col = bitlist(nbr_bytes)

    tmp = [byte_col, bit_col, decline_col, online_col, default_col]
    data = map(list, zip(*tmp))

    size = len(tmp[0])

    colums_header = ["byte", "bit", "dec", "onl", "def"]

    # Issuer Action Code row headers.
    rows_header_iac = [
        "Offline data authentication not performed",
        "SDA Failed",
        "ICC data missing",
        "Card appears on terminal exception file",
        "DDA failed",
        "CDA failed",
        "RFU",
        "RFU",
        "ICC and terminal diff app versions",
        "Expired application",
        "Application not yet effective",
        "Requested srvice not allowed ",
        "New card",
        "RFU",
        "RFU",
        "RFU",
        "Cardholder verification not successful",
        "Unrecognized CVM",
        "PIN Try Limit exceeded",
        "PIN required, pad not present/working",
        "PIN required, pad present, PIN not entered",
        "Online PIN entered",
        "RFU",
        "RFU",
        "Transaction exceeds floor limit",
        "Lower consecutive offline limit exceed",
        "Upper consecutive offline limit exceeded",
        "Transaction selected rand online processing",
        "Merchant forced transaction online",
        "RFU",
        "RFU",
        "RFU",
        "Default TDOL used",
        "Issuer authentication failed",
        "Script failed before final GENERATE AC",
        "Script failed after final GENERATE AC",
        "RFU",
        "RFU",
        "RFU",
        "RFU",
    ]

    # Card Issuer Action Code row headers (MasterCard specific).
    rows_header_ciac = [
        "RFU",
        "Unable to go online",
        "Offline PIN verif Not Performed",
        "Offline PIN Verification Failed",
        "PIN Try Limit exceeded",
        "International Transaction",
        "Domestic Transaction",
        "Terminal Wrongly Considers Off PIN OK",
        "Lower Consecutive Offiline Limit Exceeded",
        "Upper Consecutive Offiline Limit Exceeded",
        "Lower Cumulative Offiline Limit Exceeded",
        "Upper Cumulative Offiline Limit Exceeded",
        "Go Online On Next Transaction Was Set",
        "Issuer Authentication Failed",
        "Script Received",
        "Script Failed",
        "RFU",
        "RFU",
        "RFU",
        "RFU",
        "RFU",
        "RFU",
        "Match Found in Additional Check Table",
        "No Match Found In Additional Check Table",
    ]

    # Select the right title and rows_header list.
    if len(rows_header_iac) == size:
        title = "Issuer Action Code (Tag:9F0E,9F0F,9F0D)."
        rows = rows_header_iac
    elif len(rows_header_ciac) == size:
        title = "CARD Issuer Action Code (Tag:C3/CF,C5/CE,C4/CD)."
        rows = rows_header_ciac
        title
    else:
        rows = range(1, size + 1)

    return util.table(title, data, colums_header, rows)
Ejemplo n.º 24
0
SCORE_POLE_MAX = 2.0
SCORE_MUL_OTHER = 0.7
SCORE_MIN_SELF = 0.1
MIN_EFFECTIVE_LEN = 20
MIN_EFFECTIVE_NICKS = 10
SCORE_THRESHOLD = 3.0

Msg = namedtuple('Msg', ('time', 'id', 'text', 'score'))

# chan_history[chan.lower()] = [Msg(...), Msg(...), ...]
chan_history = defaultdict(list)

if os.path.exists(CONF_CHANS_FILE):
    conf_chans = {
        record.channel.lower(): record
        for record in util.table(CONF_CHANS_FILE,
        globals={'DEFAULT':None}) }
else:
    conf_chans = {}

def reload(prev):
    if hasattr(prev, 'chan_history') \
    and isinstance(prev.chan_history, dict):
        for chan, prev_history in prev.chan_history.iteritems():
            if chan not in conf_chans: continue
            history = chan_history[chan]
            history[:] = prev_history
            chan_history[chan] = history

@link('PRIVMSG')
@link('UNOTICE')
def h_message(bot, id, chan, text):
Ejemplo n.º 25
0
from util import LinkSet, table, fdict, bind, after, dice
from auth import admin
from collections import OrderedDict
from itertools import *
from functools import *
import random
import re

dungeons = table('conf/dungeons.py', 'dungeon_spec')
conf = fdict('conf/general.py')

DANGER  = 1
MYSTERY = 2

def normalise_name(name):
    name = name.strip()
    name = re.sub(r'[\x00-\x1F]', '', name)
    name = re.sub(r'\s+', ' ', name)
    return name

def hero_names(heroes):
    names = map(lambda h: h.name, heroes)
    names = ', '.join(names[:-2] + [' and '.join(names[-2:])])
    return names

class Game(object):
    __slots__ = (
        'install', 'uninstall', 'chan',
        'hidden_dungeons', 'found_dungeons', 'complete',
        'heroes', 'rounds', 'turn', 'attacks')
Ejemplo n.º 26
0
from untwisted.network import Work
from untwisted.event import DATA, BUFFER, FOUND, CLOSE, RECV_ERR
from untwisted.utils import std
from untwisted.utils.common import append, shrug
from untwisted.magic import sign

import util
import debug
import runtime
import bridge
from util import NotInstalled, AlreadyInstalled


RECONNECT_DELAY_SECONDS = 1

conf_servers = util.table('conf/minecraft.py', 'server', socket.__dict__)

mc_work = []
mc_mode = Mode()
mc_mode.domain = 'mc'
mc_link = util.LinkSet()
mc_link.link_module(std)
mc_link.link(DATA, append)
mc_link.link(BUFFER, shrug, '\n')
if '--debug' in sys.argv: mc_link.link_module(debug)

ab_mode = None
ab_link = util.LinkSet()


class MinecraftState(object):
Ejemplo n.º 27
0
MAX_CHAT_LENGTH = 127
STATE_FILE = 'state/terraria.json'

IGNORE_MESSAGES=(
    'The air is getting colder around you...',
    'You feel vibrations from deep below...',
    'This is going to be a terrible night...',
    'A solar eclipse is happening!',
    re.compile(r'.* the Travell?ing Merchant has (arrived|departed)!$'),
    re.compile(r'Looks like .* (is|are) throwing a party$'),
    "Party time's over!",
    'Slime is falling from the sky!',
    'Slime has stopped falling from the sky.',
    re.compile(r'.* was slain\.\.\.'))

servers = util.table('conf/terraria.py', 'server')

te_mode = untwisted.mode.Mode()
te_link = util.LinkSet()
te_link.link_module(terraria_protocol, debug=False)
te_work = dict()

ab_mode = None
ab_link = util.LinkSet()

#==============================================================================#
def get_state():
    try:
        with open(STATE_FILE) as file:
            return util.recursive_encode(json.load(file), 'utf-8')
    except (ValueError, IOError):
Ejemplo n.º 28
0
def human(decline, online, default):
    """ Returns the human readable string for the given xIAC. """

    decline_col = util.unroll(decline)
    online_col = util.unroll(online)
    default_col = util.unroll(default)

    bit_per_bytes = 8
    nbr_bytes = len(default_col) / bit_per_bytes

    byte_col = bytelist(nbr_bytes)
    bit_col = bitlist(nbr_bytes)

    tmp = [byte_col, bit_col, decline_col, online_col, default_col]
    data = map(list, zip(*tmp))

    size = len(tmp[0])

    colums_header = ["byte", "bit", "dec", "onl", "def"]

    # Issuer Action Code row headers.
    rows_header_iac = [
        "Offline data authentication not performed", "SDA Failed",
        "ICC data missing", "Card appears on terminal exception file",
        "DDA failed", "CDA failed", "RFU", "RFU",
        "ICC and terminal diff app versions", "Expired application",
        "Application not yet effective", "Requested srvice not allowed ",
        "New card", "RFU", "RFU", "RFU",
        "Cardholder verification not successful", "Unrecognized CVM",
        "PIN Try Limit exceeded", "PIN required, pad not present/working",
        "PIN required, pad present, PIN not entered", "Online PIN entered",
        "RFU", "RFU", "Transaction exceeds floor limit",
        "Lower consecutive offline limit exceed",
        "Upper consecutive offline limit exceeded",
        "Transaction selected rand online processing",
        "Merchant forced transaction online", "RFU", "RFU", "RFU",
        "Default TDOL used", "Issuer authentication failed",
        "Script failed before final GENERATE AC",
        "Script failed after final GENERATE AC", "RFU", "RFU", "RFU", "RFU"
    ]

    # Card Issuer Action Code row headers (MasterCard specific).
    rows_header_ciac = [
        "RFU", "Unable to go online", "Offline PIN verif Not Performed",
        "Offline PIN Verification Failed", "PIN Try Limit exceeded",
        "International Transaction", "Domestic Transaction",
        "Terminal Wrongly Considers Off PIN OK",
        "Lower Consecutive Offiline Limit Exceeded",
        "Upper Consecutive Offiline Limit Exceeded",
        "Lower Cumulative Offiline Limit Exceeded",
        "Upper Cumulative Offiline Limit Exceeded",
        "Go Online On Next Transaction Was Set",
        "Issuer Authentication Failed", "Script Received", "Script Failed",
        "RFU", "RFU", "RFU", "RFU", "RFU", "RFU",
        "Match Found in Additional Check Table",
        "No Match Found In Additional Check Table"
    ]

    # Select the right title and rows_header list.
    if len(rows_header_iac) == size:
        title = "Issuer Action Code (Tag:9F0E,9F0F,9F0D)."
        rows = rows_header_iac
    elif len(rows_header_ciac) == size:
        title = "CARD Issuer Action Code (Tag:C3/CF,C5/CE,C4/CD)."
        rows = rows_header_ciac
        title
    else:
        rows = range(1, size + 1)

    return util.table(title, data, colums_header, rows)
Ejemplo n.º 29
0
    def day_of_week_hours(self, year_month, graph_per_day=False):
        """
        Conducts hours analysis by day of week (optional) and weekday vs weekend.

        Parameters
        ----------
        year_month : string
            The data to match YYYY or YYYY-MM.
        graph_per_day : boolean, optional
            Should a graph/table be produced for each day of the week. The default is False.

        """
        days_of_week = []

        time_data = self._data_time(slice(0, len(year_month)),
                                    year_month,
                                    slice(0, 13),
                                    return_ints=False)
        for date_time in time_data:
            day_datetime = datetime.date(int(date_time[:4]),
                                         int(date_time[5:7]),
                                         int(date_time[8:10]))
            days_of_week.append([day_datetime.isoweekday(), date_time[-2:]])

        if year_month == "":
            year_month = "all time"

        hours = [[], [], [], [], [], [], []]
        for day_of_week in range(1, 8):
            for day_of_week_hour in days_of_week:
                if day_of_week_hour[0] == day_of_week:
                    hours[day_of_week - 1].append(day_of_week_hour[1])
            hours[day_of_week - 1] = list(map(int, hours[day_of_week - 1]))
            if graph_per_day:
                util.graph_histogram(
                    hours[day_of_week - 1], year_month,
                    ("hour for each Day of Week: " + str(day_of_week)),
                    self.ylabel)
                util.graph_boxplot(
                    hours[day_of_week - 1],
                    year_month,
                    ("hour for each Day of Week: " + str(day_of_week)),
                    xtick_max=24)
                print("Day of week", day_of_week)
                util.table(hours[day_of_week - 1],
                           missing_data_items=(
                               24 - len(np.unique(hours[day_of_week - 1]))),
                           print_latex=self.print_latex,
                           print_lists=self.print_lists)

        workday_hours = hours[0] + hours[1] + hours[2] + hours[3] + hours[4]
        util.graph_histogram(workday_hours, year_month,
                             "hour during the week(Monday-Friday)",
                             self.ylabel)
        util.graph_boxplot(workday_hours,
                           year_month,
                           "hour during the week(Monday-Friday)",
                           xtick_max=24)
        print("Weekdays(Monday-Friday)")
        util.table(workday_hours,
                   missing_data_items=(24 - len(np.unique(workday_hours))),
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)

        weekend_hours = hours[5] + hours[6]
        util.graph_histogram(weekend_hours, year_month,
                             "hour during the weekend", self.ylabel)
        util.graph_boxplot(weekend_hours,
                           year_month,
                           "hour during the weekend",
                           xtick_max=24)
        print("Weekend")
        util.table(weekend_hours,
                   missing_data_items=(24 - len(np.unique(weekend_hours))),
                   print_latex=self.print_latex,
                   print_lists=self.print_lists)
Ejemplo n.º 30
0
        # Build Model
        # --------------------------------------------------------------------------------

        model = Ngram(data, n=n)

        # --------------------------------------------------------------------------------
        # Train Model
        # --------------------------------------------------------------------------------

        model.fit(x_train, y_train, debug=debug)

        # --------------------------------------------------------------------------------
        # Evaluate Model
        # --------------------------------------------------------------------------------

        model.test(x_test, y_test)  # sets various model properties

        print('test accuracy:', model.test_accuracy)
        print('test relevance:', model.test_relevance)

        print('sample predictions:')
        df = model.test_samples
        util.uprint(util.table(model.test_samples))

        print('generated text:')
        nsentences = 5
        for i in range(nsentences):
            util.uprint(model.generate())  # weird symbols can crash print

        print()