Example #1
0
def build_array(file_content):
    array_content = [
        hex(char) 
        for char in gzip_compress(file_content.encode("utf-8"))
    ]

    for i in range(9, len(array_content), 9):
        array_content[i] = f"\n\t{array_content[i]}"

    return ",\t".join(array_content).replace('\t\n', '\n')
Example #2
0
def encrypt(data: bytes,
            password: Union[str, bytes],
            compress: bool = False) -> bytes:
    """
    encrypt data
    :param data: plain data
    :param password: password
    :param compress: compress the encrypted data.
    :return: encrypted data
    """
    iv = Random.new().read(AES.block_size)
    data = iv + __create_aes(password, iv).encrypt(data)
    if compress:
        return b'moca' + gzip_compress(data)
    else:
        return data
    def load_html_page(self, filename):

        #We can trim whitespace from the HTML file, so let's do that:
        cleaned_html = list()
        with open(getcwd() + sep + "html" + sep + filename,
                  mode="r",
                  encoding="UTF-8") as f:
            for x in f:
                cleaned_html.append(x.strip())

        #Join the strings together, and then gzip the payload:
        html_str = "".join(cleaned_html).encode()
        html_gzip = gzip_compress(html_str)

        #Both of these will be stored in a dictionary for quick lookup on the web server.
        return html_str, html_gzip
Example #4
0
    def __call__(self, environ, start_response):
        if 'gzip' not in environ.get('HTTP_ACCEPT_ENCODING', ''):
            return self.middleware.application(environ, start_response)

        self.start_response = start_response
        app_iter = self.middleware.application(environ, self.gzip_start_response)
        if app_iter is not None and self.compressible:
            binary = gzip_compress(b''.join(app_iter), self.middleware.compress_level)
            if hasattr(app_iter, 'close'):
                app_iter.close()

            self.remove_header('content-length')
            self.headers.append(('content-encoding', 'gzip'))
            self.set_header('content-length', len(binary))

            start_response(self.status, self.headers, self.exc_info)
            return [binary]

        return app_iter
    def serve_vdf_data(self):

        #The parameters for this GET request are encoded into the URL:
        (steam64, mission_index) = self.load_get_parameters()

        #If the mission index is unspecified, generate the global tour keyvalue string:
        if mission_index is None:
            data = self.build_full_tour_kv(steam64)

        #Otherwise, only return the bitflags for just that mission:
        else:
            data = self.build_mission_kv(steam64, mission_index)

        #Based on whether the client accepts gzip encoding or not,
        #determine whether we should compress this data or not:
        raw_data = data.encode()
        payload = gzip_compress(raw_data) if self.supports_gzip else raw_data

        #Then serve it to the client:
        self.serve_data(payload)
    def generate_server_csv(self):

        #The first row is the current unix time:
        t = int(time())
        csv_list = [str(t)]

        #Add subsequent servers to it:
        server_dict = master.server_info_dict
        for x in server_dict:

            #If the server hasn't reported an update for over a minute now, assume it went offline.
            #Only include servers that have reported data sometime within the past minute.
            data = server_dict[x]
            if (t - data[-1]) < 60:
                csv_list.append(create_csv_row(data))

        #Join all the rows by newlines and make a binary string:
        csv_raw = "\n".join(csv_list).encode()

        #Based on whether the client accepts gzip encoding or not,
        #determine whether we should compress this data or not:
        return gzip_compress(csv_raw) if self.supports_gzip else csv_raw
    def generate_player_csv(self, steam64):

        #The first row contains the server timestamp, total wave credits (39), and total maximum waves (7).
        p = master  #One global lookup
        max_waves = p.max_waves
        first_row = create_csv_row((int(time()), p.total_credits, max_waves))

        #Put this in a list:
        csv_list = [first_row]

        #Grab this player's tour progress:
        player_tour_progress = p.tour_progress_dict.get(steam64)
        if player_tour_progress is None:  #steam64 bug compensation as reported by Benoist3012
            player_tour_progress = p.tour_progress_dict.get(
                steam64 + 2**32, ())

        #Per mission tuple in this player's tour tuple:
        for x in player_tour_progress:

            #This is the row that becomes the CSV file string.
            #None's become empty strings, timestamps stay the same:
            new_row = ["" if y is None else y for y in x]

            #Pad it with -1's for missions that have less than the maximum number of waves:
            padding = max_waves - len(x)
            new_row.extend((-1, ) * padding)

            #Compile it into a CSV row string and put it into the csv list:
            csv_list.append(create_csv_row(new_row))

        #Join all the rows by newlines and make a binary string:
        csv_raw = "\n".join(csv_list).encode()

        #Based on whether the client accepts gzip encoding or not,
        #determine whether we should compress this data or not:
        return gzip_compress(csv_raw) if self.supports_gzip else csv_raw
Example #8
0
def compress_string(s):
    return gzip_compress(s, compresslevel=6, mtime=0)
    def build_global_chart_csv(self):

        #Grab the null tuple and turn it into a bunch of lists. Replace the None with 0's as well.
        #We will use this to count how many wave credits have been earned per mission across all players.
        #This is used to generate the line graph in quadrant 2 (wave credits vs wave number for each map).
        mission_counter = [[0] * len(x) for x in self.null_tuple]

        #Use this list to count the number of players that have earned a set number of wave credits.
        #This is used to generate the line graph in quadrant 1 (players vs wave credits count).
        credits_counter = [0] * self.total_credits

        #Use these two lists to count how many players have participated and completed each mission in the tour:
        total_missions = len(self.null_tuple)
        participants_counter = [0] * total_missions
        completionists_counter = [0] * total_missions

        #Keep track of how many players received a medal:
        medal_recepients = 0

        #In this dictionary, pair each date (Month & Day) with the number of new players who participated in the tour that day:
        timestamp_participated_dict = dict()

        #For this dictionary, similar as the previous one, but store the number of people who *finished* the tour on each day:
        timestamp_completed_dict = dict()

        #Count the number of new players who have played at least 1 wave on each map, for each day.
        #This is mostly for the map makers to see how many new players have played on their maps.
        #
        #The map index is used to index into this list. Init it with all empty dictionaries:
        map_index_date_participant_counter = [
            dict() for x in range(total_missions)
        ]

        #Similarly, count the number of players who have beaten each map, on each day:
        map_index_date_completist_counter = [
            dict() for x in range(total_missions)
        ]

        #Count the number of UNIQUE wave credits awarded on each day.
        unique_wave_credits_awarded_dict = dict()

        #Per tour tuple: (Per player's tour progress)
        for t in self.tour_progress_dict.values():

            #Keep a count of how many wave credits this player has earned:
            wave_credits_earned = 0

            #In this list, store the timestamps of ALL the wave credits they have earned:
            timestamp_all_list = list()

            #Keep track of whether the player has completed the tour or not.
            #Assume they haven't unless otherwise proven.
            tour_completion_bool_list = [False] * total_missions

            #Per mission in the tour tuple:
            for (x, y) in enumerate(t):

                #Did this player complete the mission in full?
                #Assume yes unless otherwise told. (There needs to be a single None in the tuple for this to become false.)
                completed_mission = True

                #Did this player participate in this mission by completing at least one wave in full?
                #Assume no unless otherwise told. (There needs to be at least 1 entry that's not None for this to become true.)
                participated_mission = False

                #In this list, store the timestamps of all the wave credits they earned for this mission.
                #We will use this to count the number of unique players who have played on each map, over time.
                timestamp_mission_list = list()

                #Per wave in the mission:
                for (i, j) in enumerate(y):

                    #If this value is set to None, that means they didn't earn a wave credit for this wave.
                    #This also means they didn't complete the mission, so set that boolean to false:
                    if j is None:
                        completed_mission = False
                        continue

                    #Raise the mission counter: (the total wave credits earned per wave, per map)
                    mission_counter[x][i] += 1

                    #Raise the number of wave credits this player has earned:
                    wave_credits_earned += 1

                    #Put this timestamp in both timestamp lists:
                    timestamp_all_list.append(j)
                    timestamp_mission_list.append(j)

                    #This player has completed at least one wave in this mission, so set the participation boolean to true:
                    participated_mission = True

                    #Form a timestamp and increment the awarded wave credits dictionary.
                    stamp = localtime(j)
                    key = (stamp.tm_mon, stamp.tm_mday)
                    unique_wave_credits_awarded_dict[
                        key] = unique_wave_credits_awarded_dict.get(key, 0) + 1

                #If the player participated in this mission, raise the participation counter for this mission:
                if participated_mission:
                    participants_counter[x] += 1

                #Do the same for completionists:
                if completed_mission:
                    completionists_counter[x] += 1

                #Put the mission completion boolean into the tour completion boolean list:
                tour_completion_bool_list[x] = completed_mission

                #If the mission timestamp list is empty, grind the next iteration:
                if len(timestamp_mission_list) == 0:
                    continue

                #Get the smallest timestamp (the first time this player completed a wave on this map), and form a date tuple:
                stamp = localtime(min(timestamp_mission_list))
                key = (stamp.tm_mon, stamp.tm_mday)

                #And increment the counter for the appropriate date, for the appropriate map:
                date_counter_dict = map_index_date_participant_counter[x]
                date_counter_dict[key] = date_counter_dict.get(key, 0) + 1

                #If they beat the mission, then do the same for the biggest timestamp, but store it in the completist list dictionary:
                if completed_mission:
                    stamp = localtime(max(timestamp_mission_list))
                    key = (stamp.tm_mon, stamp.tm_mday)
                    date_counter_dict = map_index_date_completist_counter[x]
                    date_counter_dict[key] = date_counter_dict.get(key, 0) + 1

            #Increment the global credits counter based on the number of wave credits this player has:
            if wave_credits_earned != 0:
                credits_counter[wave_credits_earned - 1] += 1

            #If this player has completed all the missions, raise the medal recepients counter:
            if False not in tour_completion_bool_list:
                medal_recepients += 1

            #Find the smallest timestamp. That's the time they participated in the tour.
            #Increment that date by 1, since that's when this player participated in the tour:
            stamp = localtime(min(timestamp_all_list))
            key = (stamp.tm_mon, stamp.tm_mday)
            timestamp_participated_dict[key] = timestamp_participated_dict.get(
                key, 0) + 1

            #If the player beat the tour, do the same thing as above, but use the largest
            #timestamp and increment the completed players dictionary instead.
            if len(timestamp_all_list) >= self.total_credits:
                stamp = localtime(max(timestamp_all_list))
                key = (stamp.tm_mon, stamp.tm_mday)
                timestamp_completed_dict[key] = timestamp_completed_dict.get(
                    key, 0) + 1

        #The last thing to compile data for is the global statistics table.

        #We can get total tour participants from the length of the dictionary:
        tour_participants = len(self.tour_progress_dict)

        #Total medals is given by the medal_recepients variable.

        #Total wave credits awarded is given by the row ID.

        #Total unique wave credits can be obtained by summing the awarded wave credits by date dictionary:
        global_credits_acquired = sum(
            unique_wave_credits_awarded_dict.values())

        #Total missions participated and completed can be found by summing the participants and completionists arrays accordingly:
        total_participated_missions = sum(participants_counter)
        total_completed_missions = sum(completionists_counter)

        ##########################################################

        #Now we build the CSV data to transmit to clients.
        csv_data = list()

        #The mission counter list is transposed. Javascript expects the columns as maps and the wave numbers as rows,
        #but the counter list has it backwards. We need to modify the counter list first.
        #
        #First, all the lists need to be the same length.

        #First, find what's the length of the longest list in there:
        longest_list_len = len(max(mission_counter, key=len))

        #Then per list in the mission counter:
        for x in mission_counter:

            #Append enough -1's to the end of the list until it reaches the length of the longest list.
            #This will make all the lists the same length in the counter list.
            remaining_entries = longest_list_len - len(x)
            x.extend((-1, ) * remaining_entries)

        #Put the mission counter first into the csv data list:
        for x in zip(*mission_counter):
            csv_data.append(create_csv_row(x))

        #Add an = as a delimiter:
        csv_data.append("=")

        #Then put the player wave credits counter in next.
        #
        #Delete the last value from the list because that's the total medal recepients, which we don't want
        #to include, or else it will skew the chart horribly.
        del credits_counter[len(credits_counter) - 1]
        csv_data.append(create_csv_row(credits_counter))

        #Add an = as a delimiter:
        csv_data.append("=")

        #Participants and completionists counters go next:
        csv_data.append(create_csv_row(participants_counter))
        csv_data.append(create_csv_row(completionists_counter))

        #Add an = as a delimiter:
        csv_data.append("=")

        #This section is the biggest grindfest in the global statistics computation.
        #For each date, compute the local and cumulative sum of the relevant data sections.

        #Because the keys to this dictionary are all dates, sort the wave credits awarded dictionary by date:
        #Because everything ultimately is measured by wave credit and/or its timestamp, it is safe to use the keys of this dictionary.
        date_keys = tuple(sorted(unique_wave_credits_awarded_dict))

        #Using the participants counter dictionary for each map and each date, compute the cumulative sum of participants on each map on each date:
        cumulative_map_date_participants_counts = [
            self.compute_successive_sum_dict(x, date_keys)
            for x in map_index_date_participant_counter
        ]

        #Do the same thing with the completists counter dictionary as well:
        cumulative_map_date_completists_counts = [
            self.compute_successive_sum_dict(x, date_keys)
            for x in map_index_date_completist_counter
        ]

        #Now, compute the *grand total* number of missions each player has completed and participated in on each day.
        #These are NOT stacked dictionaries, but these are the sum of all missions participated/completed across all maps.
        new_mission_completists = dict()  #Completed
        new_mission_participants = dict()  #Participated

        #Loop across every date tuple:
        for x in date_keys:

            #Total missions played by all participants:
            for y in map_index_date_participant_counter:
                new_mission_participants[x] = new_mission_participants.get(
                    x, 0) + y.get(x, 0)

            #Total missions completed by all participants (completists):
            for y in map_index_date_completist_counter:
                new_mission_completists[x] = new_mission_completists.get(
                    x, 0) + y.get(x, 0)

        #Now we need to generate the CSV data with all this data in it.
        #
        #Put all the dictionaries we have created in a single list:
        dictionary_list = cumulative_map_date_participants_counts + cumulative_map_date_completists_counts + [
            timestamp_participated_dict, timestamp_completed_dict,
            new_mission_participants, new_mission_completists,
            unique_wave_credits_awarded_dict, self.wave_credits_earned_per_day
        ]

        #Using that dictionary list, generate the CSV rows for each dictionary in it.
        #Loop across each date key:
        for x in date_keys:

            #Put the key in the row first. Flatten it out into (as) a list, since this is a CSV file.
            row_data = list(x)

            #Then for each dictionary, grab the value for this date from it, and put the value into the row list. Default to 0 if not found.
            for y in dictionary_list:
                row_data.append(y.get(x, 0))

            #Then build a CSV string and put it into the big CSV data list:
            csv_data.append(create_csv_row(row_data))

        #Add an = as a delimiter:
        csv_data.append("=")

        #The statistics table information go last:
        global_str = create_csv_row(
            (tour_participants, medal_recepients, self.row_id,
             global_credits_acquired, total_participated_missions,
             total_completed_missions))
        csv_data.append(global_str)

        #Then build the full CSV file and cache it:
        #That way, we don't have to go through this whole grind every time someone requests global tour information.
        csv_raw = "\n".join(csv_data).encode()
        csv_gzip = gzip_compress(csv_raw)
        self.global_data_csv = (csv_raw, csv_gzip)
Example #10
0
def compress_test(data: bytes,
                  output: bool = True) -> Dict[str, Tuple[int, int, int]]:
    """
    Compare compress modules.
    :param data: the data to compress.
    :param output: if this value is True, print the results to console.
    :return: {'module name': (<size of the compressed data>, <time to compress>, <time to decompress>)}
    """
    res: Dict[str, Tuple[int, int, int]] = {}
    try_print('+++++++++++++++++++++++++++++++++++++++++++++++++++++',
              flag=output)
    size = len(data)
    try_print(f'Original size: {round(size/1024/1024), 4} MB', flag=output)
    # gzip
    for i in range(10):
        tmp = gzip_compress(data, compresslevel=i)
        key = f'gzip(compress level {i})'
        res[key] = (len(tmp),
                    check_function_speed(gzip_compress, data, compresslevel=i),
                    check_function_speed(gzip_decompress, tmp))
        __print(res, size, key, output)
    # bz2
    for i in range(1, 10):
        tmp = bz2_compress(data, compresslevel=i)
        key = f'bz2(compress level {i})'
        res[key] = (len(tmp),
                    check_function_speed(bz2_compress, data, compresslevel=i),
                    check_function_speed(bz2_decompress, tmp))
        __print(res, size, key, output)
    # zlib
    for i in range(10):
        tmp = zlib_compress(data, level=i)
        key = f'zlib(compress level {i})'
        res[key] = (len(tmp), check_function_speed(zlib_compress,
                                                   data,
                                                   level=i),
                    check_function_speed(zlib_decompress, tmp))
        __print(res, size, key, output)
    # lzma
    tmp = lzma_compress(data, FORMAT_XZ, CHECK_CRC64)
    res[f'lzma(XZ - CRC64)'] = (len(tmp),
                                check_function_speed(lzma_compress, data,
                                                     FORMAT_XZ, CHECK_CRC64),
                                check_function_speed(lzma_decompress,
                                                     tmp,
                                                     format=FORMAT_XZ))
    __print(res, size, f'lzma(XZ - CRC64)', output)
    tmp = lzma_compress(data, FORMAT_XZ, CHECK_CRC32)
    res[f'lzma(XZ - CRC32)'] = (len(tmp),
                                check_function_speed(lzma_compress, data,
                                                     FORMAT_XZ, CHECK_CRC32),
                                check_function_speed(lzma_decompress,
                                                     tmp,
                                                     format=FORMAT_XZ))
    __print(res, size, f'lzma(XZ - CRC32)', output)
    tmp = lzma_compress(data, FORMAT_XZ, CHECK_NONE)
    res[f'lzma(XZ - NONE)'] = (len(tmp),
                               check_function_speed(lzma_compress, data,
                                                    FORMAT_XZ, CHECK_NONE),
                               check_function_speed(lzma_decompress,
                                                    tmp,
                                                    format=FORMAT_XZ))
    __print(res, size, f'lzma(XZ - NONE)', output)
    tmp = lzma_compress(data, FORMAT_ALONE, CHECK_NONE)
    res[f'lzma(ALONE - NONE)'] = (len(tmp),
                                  check_function_speed(lzma_compress, data,
                                                       FORMAT_ALONE,
                                                       CHECK_NONE),
                                  check_function_speed(lzma_decompress,
                                                       tmp,
                                                       format=FORMAT_ALONE))
    __print(res, size, f'lzma(ALONE - NONE)', output)
    # brotli
    tmp = brotli_compress(data)
    key = 'brotli'
    res[key] = (len(tmp), check_function_speed(brotli_compress, data),
                check_function_speed(brotli_decompress, tmp))
    __print(res, size, key, output)
    try_print('+++++++++++++++++++++++++++++++++++++++++++++++++++++',
              flag=output)
    return res