-->  NOTE that for hashes, the guess-no. is always 0, as no guessing-no. can eb determinded from the JtR '.pot' file
'output_file'               # String:  Path of the file to write the analysis results into
"""

import sys
import os

sys.path.insert(1, os.path.abspath("./"))
import operator
import math
from math import fmod
from pgf.initiation.confighelper import ConfigHelper

try:
    # Initiate ConfigHelper instance
    ch = ConfigHelper("./configfiles/run.ini")

    alpha = ch.get_option("DEFAULT", "alpha")  # value will already be return as float

    probabilities = dict()  # create dict to store the probabilities of the passwords
    alpha_guesswork = 0.0  # return value to be calculated
    u_alpha = 0.0  # helper
    lambda_u_alpha = 0.0  # helper

    # calc probabilities for all pws in the leak
    #     print "Calculating probabilities..."
    for pw, occ_lu in pws_multi.iteritems():
        probabilities[pw] = float(occ_lu["occ"]) / float(pw_counter)

    #     print "Sorting the passwords by their probabilities..."
    # sort the probabilities-dict by value (ascending) and return list-type object
'cracked_pws'               #   Dict:  {'password/hash':[guess-no. which cracked the pw]}
                                  -->  NOTE that for hashes, the guess-no. is always 0, as no guessing-no. can eb determinded from the JtR '.pot' file
'output_file'               # String:  Path of the file to write the analysis results into
'''

import sys
import os
sys.path.insert(1, os.path.abspath('./'))
import operator
import math
from math import fmod
from pgf.initiation.confighelper import ConfigHelper

try:
    # Initiate ConfigHelper instance
    ch = ConfigHelper('./configfiles/run.ini')

    alpha = ch.get_option('DEFAULT', 'alpha')           # value will already be return as float

    probabilities = dict()                              # create dict to store the probabilities of the passwords
    alpha_guesswork = 0.0                               # return value to be calculated
    u_alpha = 0.0                                       # helper
    lambda_u_alpha = 0.0                                # helper

    # calc probabilities for all pws in the leak
#     print "Calculating probabilities..."
    for pw, occ_lu in pws_multi.iteritems():
        probabilities[pw] = float(occ_lu['occ'])/float(pw_counter)

#     print "Sorting the passwords by their probabilities..."
    # sort the probabilities-dict by value (ascending) and return list-type object
Also, no method declarations etc. are needed.
Name your script with a leading '_' to make the analysis module skip it during executing the plugins. E.g, 'myfile.py' will be executed, '_myfile.py' won't.
'''

import sys
import os
sys.path.insert(1, os.path.abspath('./'))
# import the ConfigHelper to be able to read values from the run.ini
from pgf.initiation.confighelper import ConfigHelper

#   [Do the necessary imports here...]

try:

    # Initiate ConfigHelper instance
    ch = ConfigHelper('./configfiles/run.ini')
    '''
    Samples:
    - Get a value (TYPE CASTED!) from the 'run.ini':    my_value = ch.get_option('DEFAULT', 'my_value')
    - Get all (unique) passwords from the leak:         for password in pws_multi.iterkeys(): [...]
    - Get the occurence counter for a password:         occurences = pws_multi[password]['occ']
    - Get all cracked passwords:                        for password in cracked_pws.iterkeys(): [...]
    '''

    #   [Do your calculations here...]

    # Append lines to the output-list
    output = list()
    output.append("\n\n")
    output.append(
        "**** **** **** **** [TITLE OF THIS CALCULATION] **** **** **** ****\n"
def main():
    ''' Starts the Password Guessing Framework.

    :requires: a configuration file named "run.ini" in the directory "[...]/Password Guessing Framework/configfiles/"
    '''

    runtimes = OrderedDict()                        # dict to store the runtimes of each job
    start = timeit.default_timer()

    # Initiate logger
    logger = Logger()
    logger.basicConfig('DEBUG')                     # set logger level to DEBUG

    # Initiate ConfigHelper instance
    ch = ConfigHelper('./configfiles/run.ini', logger=logger)

    # parse jobs from 'run.ini'
    job_queue = ch.parse_jobs()
    job_counter = 0                                 # helper to get corrent job-object from json file at the end of each job run

    # Clear 'PGF.log' file in te JtR directory to reset it
    # The path of the logfile will be generated and stored while parsing the jobs thus the get
    # it from the first job in the list
    job_queue[0].clear_jtr_log()


    # iterate through the job queue
    while len(job_queue) > 0:
        logger.debug("Remaining jobs: %2d\n\n" % len(job_queue))
        job = job_queue.pop(0)      # get first job in queue

        job_start = timeit.default_timer()
        logger.debug("Starting Job <%s>" % job.label)

        # Clear 'PGF.pot' and 'PGF.rec' files in te JtR directory to reset hashing state for each job
        job.clear_jtr_pot_rec()
        
        # Preparation is called from the config helper

        # Executor instance
        executor = Executor(job)
        executor.execute()

        # Analysis is called from the executor as it is run as a subprocess!

        # calc runtime of the current job
        job_end = timeit.default_timer()
        job_runtime = job_end - job_start
        job_human_runtime = ("%2dd:%2dh:%2dm:%2ds" % ((job_runtime/86400),
                                                      (fmod(job_runtime,86400)/3600),
                                                      (fmod(job_runtime,3600)/60),
                                                      (fmod(job_runtime,60))
                                                      ))
        with open(job.output_file, 'a') as output_file:                     # write runtime of the current job the according outfile
            output_file.write("\nRuntime: '%s':  %s" % (job.label, job_human_runtime))

        # job finished!
        logger.debug("---------------- JOB <%s> DONE! --------------------------\n" % job.label)
        logger.debug("Runtime: %28s: %s\n" % (job.label, job_human_runtime))

        # add runtime to list of job-runtimes
        runtimes[job.label] = job_human_runtime

        # Write Runtime of curent job into jobs.json
        with open('./results/jobs.json', 'r') as f:
            json_obj = json.load(f)
            json_obj['jobs'][job_counter]['runtime'] = job_human_runtime
        with open('./results/jobs.json', 'w') as f:
            f.write(json.dumps(json_obj, sort_keys=True, indent=4))
        job_counter += 1


    # **** **** WHILE LOOP ENDS HERE! **** ****


    # calc runtime of PGF
    end = timeit.default_timer()
    runtime = end - start
    human_runtime = ("%3dd:%2dh:%2dm:%2ds" % ((runtime/86400),
                                              (fmod(runtime,86400)/3600),
                                              (fmod(runtime,3600)/60),
                                              (fmod(runtime,60))
                                              ))

    logger.debug("---------------- ALL JOBS PROCESSED! --------------------------\n")

    # Run the shell script processing the progress files
    final_processing(logger, ch.get_option('DEFAULT', 'final_processing'))

    # print summery of job runtimes
    logger.debug("Job-Runtimes:")
    for label, rt in runtimes.iteritems():
        logger.debug("%37s: %s" % (label, rt))
    logger.debug("Overall Runtime:%s%s\n" % (22*' ', human_runtime))
    logger.debug("PGF closed.")

    # backup all created files AT LAST STEP!
    result_backup(ch.get_option('DEFAULT', 'backup_dir'), ch.get_timestamp_uuid())
def main():
    ''' Starts the Password Guessing Framework.

    :requires: a configuration file named "run.ini" in the directory "[...]/Password Guessing Framework/configfiles/"
    '''

    runtimes = OrderedDict()  # dict to store the runtimes of each job
    start = timeit.default_timer()

    # Initiate logger
    logger = Logger()
    logger.basicConfig('DEBUG')  # set logger level to DEBUG

    # Initiate ConfigHelper instance
    ch = ConfigHelper('./configfiles/run.ini', logger=logger)

    # parse jobs from 'run.ini'
    job_queue = ch.parse_jobs()
    job_counter = 0  # helper to get corrent job-object from json file at the end of each job run

    # Clear 'PGF.log' file in te JtR directory to reset it
    # The path of the logfile will be generated and stored while parsing the jobs thus the get
    # it from the first job in the list
    job_queue[0].clear_jtr_log()

    # iterate through the job queue
    while len(job_queue) > 0:
        logger.debug("Remaining jobs: %2d\n\n" % len(job_queue))
        job = job_queue.pop(0)  # get first job in queue

        job_start = timeit.default_timer()
        logger.debug("Starting Job <%s>" % job.label)

        # Clear 'PGF.pot' and 'PGF.rec' files in te JtR directory to reset hashing state for each job
        job.clear_jtr_pot_rec()

        # Preparation is called from the config helper

        # Executor instance
        executor = Executor(job)
        executor.execute()

        # Analysis is called from the executor as it is run as a subprocess!

        # calc runtime of the current job
        job_end = timeit.default_timer()
        job_runtime = job_end - job_start
        job_human_runtime = ("%2dd:%2dh:%2dm:%2ds" %
                             ((job_runtime / 86400),
                              (fmod(job_runtime, 86400) / 3600),
                              (fmod(job_runtime, 3600) / 60),
                              (fmod(job_runtime, 60))))
        with open(
                job.output_file, 'a'
        ) as output_file:  # write runtime of the current job the according outfile
            output_file.write("\nRuntime: '%s':  %s" %
                              (job.label, job_human_runtime))

        # job finished!
        logger.debug(
            "---------------- JOB <%s> DONE! --------------------------\n" %
            job.label)
        logger.debug("Runtime: %28s: %s\n" % (job.label, job_human_runtime))

        # add runtime to list of job-runtimes
        runtimes[job.label] = job_human_runtime

        # Write Runtime of curent job into jobs.json
        with open('./results/jobs.json', 'r') as f:
            json_obj = json.load(f)
            json_obj['jobs'][job_counter]['runtime'] = job_human_runtime
        with open('./results/jobs.json', 'w') as f:
            f.write(json.dumps(json_obj, sort_keys=True, indent=4))
        job_counter += 1

    # **** **** WHILE LOOP ENDS HERE! **** ****

    # calc runtime of PGF
    end = timeit.default_timer()
    runtime = end - start
    human_runtime = ("%3dd:%2dh:%2dm:%2ds" %
                     ((runtime / 86400), (fmod(runtime, 86400) / 3600),
                      (fmod(runtime, 3600) / 60), (fmod(runtime, 60))))

    logger.debug(
        "---------------- ALL JOBS PROCESSED! --------------------------\n")

    # Run the shell script processing the progress files
    final_processing(logger, ch.get_option('DEFAULT', 'final_processing'))

    # print summery of job runtimes
    logger.debug("Job-Runtimes:")
    for label, rt in runtimes.iteritems():
        logger.debug("%37s: %s" % (label, rt))
    logger.debug("Overall Runtime:%s%s\n" % (22 * ' ', human_runtime))
    logger.debug("PGF closed.")

    # backup all created files AT LAST STEP!
    result_backup(ch.get_option('DEFAULT', 'backup_dir'),
                  ch.get_timestamp_uuid())