示例#1
0
    def __init__(self):
        self.utils = CommonUtils()
        self.payload = {}
        self.action = None

        self.logger = self.set_logger()
        self.logger.debug(
            " ########### Entering ST2 XMC Integration Main Module ##########")
示例#2
0
    def __init__(self, ipaddress=None, username=None, password=None):
        self.ipaddress = ipaddress
        self.username = username
        self.password = password

        if self.ipaddress is not None:
            self.url = "https://" + self.ipaddress

        self.utils = CommonUtils()
示例#3
0
    def is_leaf(self, rect, bound_rects):

        for r1 in bound_rects:

            if CommonUtils.is_rectangle_inside(CommonUtils.get_coords(rect),
                                               CommonUtils.get_coords(r1)):
                return False

        print("^^^^^^ Reached here so its not a leaf..... ^^^^^^^^")
        return True
示例#4
0
    def run(self, deviceip, xmckey):
        self.deviceip = deviceip
        self.xmcpassword = xmckey

        self.utils = CommonUtils()
        self.xmcinfo = self.utils.load_config_schema()

        payload = self.query_device_info()

        return (True, payload)
 def __init__(self):
     self.CLIENT_ID = CommonUtils.get_environ_variable("STAT_CLIENT_ID")
     self.STAT_CLIENT_SECRET = CommonUtils.get_environ_variable("STAT_CLIENT_SECRET")
     self.STAT_ACCESS_TOKEN = CommonUtils.get_environ_variable("STAT_ACCESS_TOKEN")
     self.logger = logging.getLogger(__name__)
     self.headers = {
         "Authorization": str(self.STAT_ACCESS_TOKEN),
         "Accept": "application/vnd.stattleship.com; version=1",
         "Content-Type": "application/json",
     }
示例#6
0
 def __init__(self):
     self.date = ""
     self.name = ""
     self.questions = []
     self.question_list = []
     self.students = {}
     self.answerkey = []
     self.marked = []
     self.selected_options = []
     self.utils = CommonUtils()
 def get_aws_mongo_db_events():
     """
     Connects to AWS hosted MongoDB events database
     :return: MongoDB instance with database
     """
     uri = (
         "mongodb://"
         + CommonUtils.get_environ_variable("AWS_MONGO_USER")
         + ":"
         + CommonUtils.get_environ_variable("AWS_MONGO_PASS")
         + "@"
         + CommonUtils.get_environ_variable("AWS_ADDRESS")
     )
     client = MongoClient(uri)
     return client.eventsDB
示例#8
0
    def __init__(self, ref):
        self.utils = CommonUtils()
        self.logger = logging.getLogger("ST2WebhookUtils")

        self.st2info = self.utils.get_st2info()
        if bool(self.st2info):
            self.url = "https://" + self.st2info[
                'st2ip'] + "/api/v1/webhooks/" + ref
            self.headers = {
                'content-type': 'application/json',
                'St2-Api-Key': self.st2info['st2api_key']
            }
        self.ref = ref

        self.logger.debug("Webhook URL: " + self.url)
示例#9
0
class ST2WebhookUtils:
    """
        REST API Client for ST2 Webhook.
    """
    def __init__(self, ref):
        self.utils = CommonUtils()
        self.logger = logging.getLogger("ST2WebhookUtils")

        self.st2info = self.utils.get_st2info()
        if bool(self.st2info):
            self.url = "https://" + self.st2info[
                'st2ip'] + "/api/v1/webhooks/" + ref
            self.headers = {
                'content-type': 'application/json',
                'St2-Api-Key': self.st2info['st2api_key']
            }
        self.ref = ref

        self.logger.debug("Webhook URL: " + self.url)

    def post(self, payload):
        response = requests.post(self.url,
                                 data=json.dumps(payload),
                                 headers=self.headers,
                                 verify=False)

        return response

    def get_xmc_ip(self):
        return ((([
            ip for ip in socket.gethostbyname_ex(socket.gethostname())[2]
            if not ip.startswith("127.")
        ] or [[(s.connect(("8.8.8.8", 53)), s.getsockname()[0], s.close())
               for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]
               ][0][1]]) + ["no IP found"])[0])
    def process(self, inp, training=False):
        maxWeight = -1
        function = None
        for funcname, weight in self.rule_lexicon.iteritems():
            f = getattr(self,funcname)
            if weight>maxWeight:
                maxWeight = weight
                function = getattr(self,funcname)
            if training:
                print '######Please give rating(1-10)####'
                print f(inp)
                self.rule_lexicon[funcname] = self.rule_lexicon[funcname]+int(raw_input())

        if training:
            CommonUtils.dumpToRuleBook(self.rulebook, self.rule_lexicon)
        return maxWeight, function(inp)
示例#11
0
    def test_is_rectangle_inside(self):

        smallRect = {'x1': 850, 'y1': 185, 'x2': 1648, 'y2': 349}
        bigRect = {'x1': 47, 'y1': 114, 'x2': 1652, 'y2': 386}
        br1 = SimpleNamespace(**bigRect)
        sm1 = SimpleNamespace(**smallRect)

        assert CommonUtils.is_rectangle_inside(br1, sm1) == True
    def __init__(self):
        self.rulebook = "general.rule"
        self.rule_lexicon = Extractor.rule_lexicon
        #print os.path.isfile(rulebook)
        if os.path.isfile(self.rulebook):
            #load pickle
            rulef = open(self.rulebook,"rb")
            try:
                self.rule_lexicon = pickle.load(rulef)
            except EOFError:
                print 'pickle loads empty file'
            rulef.close()

        #load the rules of this class in rule_lexicon, dump them in rule book
        for name,method in GeneralExtractor.__dict__.iteritems():
            if name.startswith('rule'):
                if not name in self.rule_lexicon:
                    self.rule_lexicon[name] = 0.0

        CommonUtils.dumpToRuleBook(self.rulebook, self.rule_lexicon)
示例#13
0
class Poll:
    def __init__(self):
        self.date = ""
        self.name = ""
        self.questions = []
        self.question_list = []
        self.students = {}
        self.answerkey = []
        self.marked = []
        self.selected_options = []
        self.utils = CommonUtils()

    def set_name(self, name):
        self.name = name

    def set_date(self, date):
        self.date = date

    def insert_question(self, question):
        self.questions.append(question)

    def insert_student(self, name):
        self.students[self.utils.strip_accents(name)] = self.questions
        self.question_list = self.questions
        self.questions = []

    def if_student_exists(self, name):
        check = False
        students = list(self.students.keys())
        for i in range(len(students)):
            if self.utils.strip_accents(
                    students[i].lower()) in self.utils.strip_accents(
                        name.lower()):
                check = True
                break
        return check
示例#14
0
    def process(self, event):

        print("------------------------------ PHASE-1-1 STARTED for file " +
              event.src_path + "------------------------")

        pipeline_file = PipelineFileName(
            task_file_name=os.path.basename(event.src_path))

        if pipeline_file.file_cat != "M":
            return

        tru_img_pipeline_file = PipelineFileName(
            task_file_name=os.path.basename(event.src_path))
        tru_img_pipeline_file.file_cat = ""
        tru_img_path = os.path.join(
            os.path.dirname(event.src_path),
            tru_img_pipeline_file.task_output_file_name)

        # try:

        # output of phase 0 is the input to phase1-0
        input_path = os.path.join(
            os.path.dirname(os.path.realpath("__file__")), "phase0-output")
        #output of phase 1-0
        out_folder_name = pipeline_file.task_output_folder_name
        temp_output_path = os.path.join(
            os.path.dirname(os.path.realpath("__file__")), "phase-1-1-output",
            "temp", out_folder_name)
        output_path = os.path.join(
            os.path.dirname(os.path.realpath("__file__")), "phase-1-1-output")
        old_output_path = os.path.join(output_path, out_folder_name)
        #after phase 1 processing archive the images to archive folder
        archive_to = os.path.join(os.path.basename(event.src_path),
                                  "processesd")

        img = cv2.imread(event.src_path, 0)
        thresh_img = cv2.adaptiveThreshold(img, 255,
                                           cv2.ADAPTIVE_THRESH_GAUSSIAN_C,
                                           cv2.THRESH_BINARY, 11, 2)

        # (thresh, img_bin) = cv2.threshold(img, 128, 255,cv2.THRESH_BINARY|cv2.THRESH_OTSU)

        # #invert image to binary
        # img_bin = 255-img_bin

        # #define the kernal
        # # Defining a kernel length
        # kernel_length = np.array(img).shape[1]//80
        # kernel_length = 2
        # # A verticle kernel of (1 X kernel_length), which will detect all the verticle lines from the image.
        # verticle_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (1, kernel_length))
        # kernel_length = 5
        # # A horizontal kernel of (kernel_length X 1), which will help to detect all the horizontal line from the image.
        # hori_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (kernel_length, 1))
        # print(hori_kernel)
        # print(verticle_kernel)
        # # A kernel of (3 X 3) ones.
        # kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3))

        # #
        # # Morphological operation to detect vertical lines from an image
        # img_temp1 = cv2.erode(img_bin, verticle_kernel, iterations=5)
        # verticle_lines_img = cv2.dilate(img_temp1, verticle_kernel, iterations=5)
        # #cv2.imwrite("verticle_lines.jpg",verticle_lines_img)
        # # Morphological operation to detect horizontal lines from an image
        # img_temp2 = cv2.erode(img_bin, hori_kernel, iterations=5)
        # horizontal_lines_img = cv2.dilate(img_temp2, hori_kernel, iterations=15)
        # #cv2.imwrite("horizontal_lines.jpg",horizontal_lines_img)

        # #
        # #
        # # Weighting parameters, this will decide the quantity of an image to be added to make a new image.
        # alpha = 0.5
        # beta = 1.0 - alpha
        # # This function helps to add two image with specific weight parameter to get a third image as summation of two image.
        # img_final_bin = cv2.addWeighted(verticle_lines_img, alpha, horizontal_lines_img, beta, 0.0)
        # img_final_bin = cv2.erode(~img_final_bin, kernel, iterations=1)
        # (thresh, img_final_bin) = cv2.threshold(img_final_bin, 128,255, cv2.THRESH_BINARY | cv2.THRESH_OTSU)

        # Find contours for image, which will detect all the boxes
        contours, hierarchy = cv2.findContours(thresh_img, cv2.RETR_TREE,
                                               cv2.CHAIN_APPROX_SIMPLE)
        # Sort all the contours by top to bottom.
        (contours, boundingBoxes) = self.sort_contours(contours,
                                                       method="top-to-bottom")

        idx = 0
        img_tru = cv2.imread(tru_img_path, 0)
        print("about to remove all files from if already exists" +
              temp_output_path)
        try:
            shutil.rmtree(temp_output_path)
            shutil.rmtree(old_output_path)
        except Exception as e:
            print(e)
        os.mkdir(temp_output_path)

        MARGIN = 0
        bounding_rects = []
        for c in contours:
            x, y, w, h = cv2.boundingRect(c)
            if ((w > 20 and h > 10) or (w > 10 and h > 20 and h < 1200)):
                r = {
                    'x': x - MARGIN,
                    'y': y - MARGIN,
                    'w': w + MARGIN,
                    'h': h + MARGIN
                }
                n = SimpleNamespace(**r)
                bounding_rects.append(n)

        # print(bounding_rects)
        # dedupe_rects = self.remove_duplicate_blocks(bounding_rects)

        #print(str(c.size))
        #print(boundingBoxes)
        process_pipeline_file = PipelineFileName(
            task_file_name=os.path.basename(
                tru_img_pipeline_file.task_output_file_name))
        res_file = os.path.join(
            temp_output_path,
            '{}.csv'.format(os.path.basename(tru_img_path).split('.')[0]))

        with open(res_file, 'w') as f:
            f.write('{},{},{},{},{},{}\r\n'.format("file", "x1", "y1", "x2",
                                                   "y2", "isleaf"))
            for r in bounding_rects:
                # Returns the location and width,height for every contour
                idx += 1
                new_img = img_tru[r.y:r.y + r.h, r.x:r.x + r.w]
                process_pipeline_file.segment = "0"
                process_pipeline_file.segment = str(idx)
                process_pipeline_file.file_cat = ""
                new_file = os.path.join(
                    temp_output_path,
                    process_pipeline_file.task_output_file_name)
                rc = CommonUtils.get_coords(r)
                isl = self.is_leaf(r, bounding_rects)
                print("******************************************* isl = " +
                      str(isl))
                f.write('{},{},{},{},{},{}\r\n'.format(
                    process_pipeline_file.task_output_file_name, rc.x1, rc.y1,
                    rc.x2, rc.y2, str(isl)))
                print("writing out ..." + new_file)
                cv2.imwrite(new_file, new_img)

        # move the processed file
        shutil.move(temp_output_path, output_path)
        move_to = os.path.join(os.path.dirname(event.src_path), "processed",
                               os.path.basename(event.src_path))
        shutil.move(event.src_path, move_to)
        move_to = os.path.join(os.path.dirname(tru_img_path), "processed",
                               os.path.basename(tru_img_path))
        shutil.move(tru_img_path, move_to)
        print("moved file " + event.src_path + " to.. " + move_to)
        print(
            "------------------------------------------------- COMPLETE PHASE-1-1---------------------------------------------------------"
        )
示例#15
0
 def __init__(self):
     self.BS = 16
     self.input = CommonUtils.get_environ_variable('NODE_API_KEY')
     self.iv = CommonUtils.get_environ_variable('NODE_API_IV')
示例#16
0
class GetDeviceInfo(Action):
    """
        XMC NBI client to get device SSH credentail and family.
    """
    def run(self, deviceip, xmckey):
        self.deviceip = deviceip
        self.xmcpassword = xmckey

        self.utils = CommonUtils()
        self.xmcinfo = self.utils.load_config_schema()

        payload = self.query_device_info()

        return (True, payload)

    def query_device_info(self):
        self.query_device_auth()
        device_family = self.query_device_family()
        payload = dict()
        payload['user'] = self.user
        payload['password'] = self.password
        payload['devicefamily'] = device_family

        return payload

    def query_device_auth(self):
        return_value = False

        query = '{administration {deviceProfile(user: '******'"' + self.xmcinfo['xmcuser'] + '"' + ', ip: ' + '"' + self.deviceip + '"'  \
            ') {authCred {userName,loginPassword}}}}'

        resp = self.nbi_call(query)
        if resp['data']["administration"]["deviceProfile"] is not None:
            self.user = resp['data']["administration"]["deviceProfile"][
                "authCred"]["userName"]
            self.password = resp['data']["administration"]["deviceProfile"][
                "authCred"]["loginPassword"]
            return_value = True
        else:
            self.user = None
            self.password = None
            self.logger.warning("Failed to query " + self.deviceip +
                                " credential")

        return return_value

    def query_device_family(self):
        query = "{network {device(ip: " + "\"" + self.deviceip + "\"" + ") {deviceDisplayFamily}}}"
        device_family = None

        resp = self.nbi_call(query)
        if resp['data']["network"]["device"] is not None:
            self.logger.debug(resp)
            device_family = resp['data']["network"]["device"][
                "deviceDisplayFamily"]
        else:
            self.logger.warning("Failed to query " + self.deviceip +
                                " device_family")

        return device_family

    def nbi_call(self, query):
        ExtremeApi = "query ExtremeApi "
        query = ExtremeApi + query

        xmc_api = ExtremeNBI(self.xmcinfo['xmcip'], self.xmcinfo['xmcuser'],
                             self.xmcpassword)
        response = xmc_api.send(query, None, 60)
        json_str = json.dumps(response)
        resp = json.loads(json_str)

        return resp
示例#17
0
import CommonUtils.CommonUtils as lib
lib.getSystemDateTime()
 def __init__(self, answer_dir, students_dir, polls_dir, out_dir):
     self.answer_dir = answer_dir
     self.students_dir = students_dir
     self.polls_dir = polls_dir
     self.out_dir = out_dir
     self.utils = CommonUtils()
示例#19
0
class ST2ApiUtils:
    """
        REST API Client for Stackstrom CLI and Key managments.
    """
    def __init__(self, ipaddress=None, username=None, password=None):
        self.ipaddress = ipaddress
        self.username = username
        self.password = password

        if self.ipaddress is not None:
            self.url = "https://" + self.ipaddress

        self.utils = CommonUtils()

    def get_auth_token(self, timeout=60):
        #requests.packages.urllib3.disable_warnings()
        headers = {'Content-Type': 'application/json'}
        return_response = requests.post(self.url + '/auth/v1/tokens',
                                        headers=headers,
                                        auth=(self.username, self.password),
                                        data=None,
                                        timeout=timeout,
                                        verify=False)

        if return_response.status_code == requests.codes.created:
            json_str = json.dumps(json.loads(return_response.text))
            resp = json.loads(json_str)
            self.token = resp['token']

            return self.token

        # raise http exception
        return_response.raise_for_status()

    def generate_api_key(self):
        self.get_auth_token()

        #requests.packages.urllib3.disable_warnings()
        headers = self.get_rest_headers()
        headers['X-Auth-Token'] = self.token
        metadata = '{"metadata": {"used_by": "xmc"}}'

        return_response = requests.post(self.url + '/api/v1/apikeys',
                                        headers=headers,
                                        auth=(self.username, self.password),
                                        data=metadata,
                                        timeout=60,
                                        verify=False)

        if return_response.status_code == requests.codes.created:
            json_str = json.dumps(json.loads(return_response.text))

            resp = json.loads(json_str)
            self.apikey = resp['key']

            return self.apikey

        # raise http exception
        return_response.raise_for_status()

    def get_st2_info(self, pack, query):
        self.st2info = self.utils.get_st2info()
        if bool(self.st2info):
            headers = self.get_rest_headers()
            headers['ST2-Api-Key'] = self.st2info['st2api_key']

            #requests.packages.urllib3.disable_warnings()
            self.url = "https://" + self.st2info[
                'st2ip'] + "/api/v1/" + query + '/?pack=' + pack

            return_response = requests.get(self.url,
                                           headers=headers,
                                           timeout=60,
                                           verify=False)
            reture_value = return_response.text

            file_name = self.utils.get_webhook_infofile(
            ) + "_" + query + ".json"
            with open(file_name, 'w') as outfile:
                outfile.write(reture_value)

            return reture_value
        else:
            print "Fail to get StackStorm information"

    def create_st2_action_list_v1(self, pack):
        response_return = self.get_st2_info(pack, "rules")
        reture_value = json.loads(response_return)
        with open(self.utils.get_webhook_action_listfile(), 'w') as outfile:
            for query in reture_value:
                action = query["trigger"]["parameters"]["url"]
                outfile.write("[" + action + "] \n")
                # For parameters, Stackstrom pack name and webhook action name, deviceip, message, severity, alarmName and deviceFirmware
                # are mandatory and have to be in sequence.
                # user, password and devicefamily will be queried at runtime

                outfile.write("   " + "pack" + "\n")
                outfile.write("   " + "action" + "\n")
                outfile.write("   " + "deviceip" + "\n")
                outfile.write("   " + "message" + "\n")
                outfile.write("   " + "severity" + "\n")
                outfile.write("   " + "alarmName" + "\n")
                outfile.write("   " + "deviceFirmware" + "\n")

                for param in query["action"]["parameters"]:
                    print action + " " + param
                    outfile.write("   " + param + "\n")

    def create_st2_action_list_v2(self, pack):
        response_return = self.get_st2_info(pack, "rules")
        retval = json.loads(response_return)

        with open(self.utils.get_webhook_action_listfile(), 'w') as outfile:
            for query in retval:
                action = query["trigger"]["parameters"]["url"]
                outfile.write("[" + action + "] \n")

                print action
                for param in query["action"]["parameters"]:
                    print " " + param
                    outfile.write("   " + param + "\n")

    def get_st2_webhook_action_params(self, pack, action):
        self.st2info = self.utils.get_st2info()
        if bool(self.st2info):
            headers = self.get_rest_headers()
            headers['ST2-Api-Key'] = self.st2info['st2api_key']

            #requests.packages.urllib3.disable_warnings()
            self.url = "https://" + self.st2info[
                'st2ip'] + "/api/v1/" + "rules/" + pack + "." + action

            response_return = requests.get(self.url,
                                           headers=headers,
                                           timeout=60,
                                           verify=False)

            if response_return.status_code == requests.codes.ok:
                retval = json.loads(response_return.text)

                param_list = []
                index = 0
                if retval != None:
                    for param in retval["action"]["parameters"]:
                        param_list.insert(index, param)
                        index = index + 1
                        print param
                    return param_list

                response_return.raise_for_status()
        else:
            print "Fail to get StackStorm information"

    def get_rest_headers(self):
        headers = {
            'Content-Type': 'application/json',
            'Connection': 'keep-alive',
            'Accept-Encoding': 'gzip, deflate',
            'Accept': '*/*',
            'User-Agent': 'python-requests/2.14.2',
        }

        return headers
示例#20
0
class ST2Integration():
    """
        Master ST2 XMC Integration module.
    """
    def __init__(self):
        self.utils = CommonUtils()
        self.payload = {}
        self.action = None

        self.logger = self.set_logger()
        self.logger.debug(
            " ########### Entering ST2 XMC Integration Main Module ##########")

    def set_logger(self):
        name = "ST2Integration"
        log_format = '%(asctime)s  %(name)8s  %(levelname)5s  %(message)s'
        logging.basicConfig(level=logging.DEBUG,
                            format=log_format,
                            filename=self.utils.get_logfile(),
                            filemode='a')
        console = logging.StreamHandler()
        console.setLevel(logging.DEBUG)
        console.setFormatter(logging.Formatter(log_format))
        logging.getLogger(name).addHandler(console)

        return logging.getLogger(name)

    def call_st2webhook(self):
        api = ST2WebhookUtils(self.action)
        response = api.post(self.payload)

        self.logger.debug(response)

    def build_palyload(self, params):
        input_len = len(params)

        if input_len >= 2:
            for i in range(0, input_len):
                self.logger.debug("Input param " + params[i])
                param = params[i].split("=")

                if param[0] == "action":
                    self.action = param[1]
                elif param[0] == "message":
                    param0_len = len(param[0])
                    msg_str = params[i][param0_len + 1:]
                    msg_len = len(msg_str)
                    if msg_str[0] == "\"" and msg_str[msg_len - 1] == "\"":
                        msg_str = msg_str[1:msg_len - 1]
                        self.payload["message"] = msg_str
                    # this part should be removed. only for demo
                    else:
                        self.payload[param[0]] = msg_str
                else:
                    # this part should be removed. only for demo
                    self.payload[param[0]] = param[1]
                i = i + 1

            self.logger.debug("action=" + self.action)
            for k, v in self.payload.iteritems():
                self.logger.debug("Payload info " + k + ": " + v)
        else:
            self.logger.debug("At least two params are required")
示例#21
0
from validate_email import validate_email

#This is the main script which takes tv series and mail address as input from user
#store the above info in mysql db
#call web scrapper function to fetch upcoming episode air date details for each tv series
#send one mail to the user with all thr above tv series upcoming episodes info

email_address = str(raw_input("enter email address :  "))
if (validate_email(email_address) == False):
    raise ValueError('Please provide valid mail address :  ')
tv_series = [
    l.strip(" ").strip("\n") for l in raw_input(
        "please provide comma separated values for the tv series :  ").split(
            ",")
]
CommonUtils.insert_data_in_mysql(email_address, tv_series)
imdb_scrapper = ImdbScrapper(config.IMDB_URL)
mail_body_arr = []
for tv_series in tv_series:
    mail_body_arr.append("<br />")
    tv_series_str = "Tv series name: {}".format(tv_series)
    mail_body_arr.append(tv_series_str)
    mail_body_arr.append("<br />")
    try:
        info = imdb_scrapper.fetch_upcoming_episode_details_using_imdb_scraping(
            tv_series)
        upcoming_episode_info = "Status: {}".format(info)
        print("tv_series - {} and output is - {}".format(tv_series, info))
    except:
        upcoming_episode_info = "Unable to fetch upcoming episode details due to unexpected exception. Please contact admin to debug"
    mail_body_arr.append(upcoming_episode_info)
class ZoomPollAnalyzer:
    def __init__(self, answer_dir, students_dir, polls_dir, out_dir):
        self.answer_dir = answer_dir
        self.students_dir = students_dir
        self.polls_dir = polls_dir
        self.out_dir = out_dir
        self.utils = CommonUtils()

    def populate_answer_keys(self, directory):
        count = 1
        answer_keys = []
        for file_name in glob.iglob('{}/*.csv'.format(directory),
                                    recursive=True):
            cols = []
            df = pandas.read_csv(file_name)
            for col in df.columns:
                cols.append(col)
            answer_key = Answerkey(cols[0])
            for i, j in df.iterrows():
                answer_key.insert_question("".join(j[0].split()),
                                           "".join(j[1].split()))
            answer_keys.append(answer_key)
            count += 1
        return answer_keys

    def populate_students_list(self, directory):
        students = []
        for file_name in glob.iglob('{}/*.xls'.format(directory),
                                    recursive=True):
            start = False
            df = pandas.read_excel(file_name)
            for i in df.itertuples():
                arr = np.asarray(i)
                cleaned_row = np.asarray([x for x in arr if str(x) != 'nan'])
                if cleaned_row.size < 2:
                    start = False
                if start:
                    if cleaned_row.size < 6:
                        exp = " "
                    else:
                        exp = cleaned_row[5]
                    st = Student(cleaned_row[2],
                                 self.utils.strip_accents(cleaned_row[3]),
                                 self.utils.strip_accents(cleaned_row[4]), exp)
                    students.append(st)
                if np.isin("Öğrenci No", cleaned_row):
                    start = True
        return students

    def populate_polls(self, directory):
        count = 1
        polls = []
        for file_name in glob.iglob('{}/*.csv'.format(directory),
                                    recursive=True):
            df = pandas.read_csv(file_name)
            poll = Poll()
            for i in df.itertuples():
                if len(i[0]) > 3:
                    data = [np.asarray(i[0])]
                    for j in range(1, len(i)):
                        if str(i[j]) != "nan":
                            data.append(i[j])
                    tup = []
                    for obj in data[0]:
                        tup.append(obj)
                else:
                    tup = [i[0][0], i[0][1]]
                for index in range(1, len(i)):
                    tup.append(i[index])
                tup = np.asarray([x for x in tup if str(x) != 'nan'])
                if poll.if_student_exists(tup[1]):
                    polls.append(poll)
                    poll = Poll()
                for q in range(4, len(tup), 2):
                    question = Question("".join(tup[q].split()),
                                        "".join(tup[q + 1].split()))
                    poll.insert_question(question)
                poll.insert_student(self.utils.strip_accents(tup[1]))
            polls.append(poll)
            count += 1
        return polls

    def identify_poll(self, polls, answer_keys):
        return_polls = []
        for pl in polls:
            for ak in answer_keys:
                if ak.is_question_present(pl.question_list):
                    pl.name = ak.name
                    pl.answerkey = ak
                    return_polls.append(pl)
                    break
        return return_polls

    def mark_attendance(self, students, polls):
        for st in students:
            for pl in polls:
                if pl.if_student_exists(st.fname + " " + st.lname):
                    st.attended_polls += 1
        return students

    def mark_quiz(self, poll):
        students = poll.students
        answer_key = poll.answerkey
        marked_students = []
        for st in students:
            marks = []
            question_list = students[st]
            for q in question_list:
                if answer_key.get_answer(str(q.question)) == q.get_answer():
                    marks.append(1)
                else:
                    marks.append(0)
            marked_students.append(marks)
        q_iter = 0
        chosen_answers = []
        for ak in answer_key.question_list:
            qa = {}
            for st in students:
                if not q_iter >= len(students[st]):
                    st_answer = students[st][q_iter].get_answer()
                    if st_answer in qa:
                        qa[st_answer] += 1
                    else:
                        qa[st_answer] = 1
            qa["correct"] = ak.get_answer()
            chosen_answers.append(qa)
            q_iter += 1
        return marked_students, chosen_answers

    def start(self):
        results = []
        self.utils.clean_output_folder(self.out_dir)
        answer_keys = self.populate_answer_keys(self.answer_dir)
        student_list = self.populate_students_list(self.students_dir)
        polls = self.populate_polls(self.polls_dir)
        new_polls = self.identify_poll(polls, answer_keys)
        marked_students = self.mark_attendance(student_list, new_polls)
        ids = []
        fnames = []
        lnames = []
        exps = []
        att_polls = []
        att_rate = []
        att_per = []
        for st in marked_students:
            ids.append(st.id)
            fnames.append(st.fname)
            lnames.append(st.lname)
            exps.append(st.exp)
            att_polls.append(len(new_polls))
            att_rate.append("Attended {} of {}".format(st.attended_polls,
                                                       len(new_polls)))
            att_per.append("Attended Percentage = {}".format(
                (st.attended_polls / len(new_polls)) * 100))
        out_dict = {
            'Öğrenci No': ids,
            'Adı': fnames,
            'Soyadı': lnames,
            'Açıklama': exps,
            'Number of Attendance Polls': att_polls,
            'Attendance Rate': att_rate,
            'Attendance Percentage': att_per
        }
        er = ExcelWriter(out_dict, self.out_dir + '/attendance.xlsx')
        er.write_excel()
        marked_polls = []
        for poll in new_polls:
            marks, ans = self.mark_quiz(poll)
            poll.marked = marks
            marked_polls.append(poll)
            poll.selected_options = ans
        poll_number = 0
        for pl in marked_polls:
            questions_dict = {}
            false_indices = []
            bad_index = 0
            q_ids = []
            q_fnames = []
            q_lnames = []
            q_exps = []
            number_of_q = []
            success_rate = []
            success_per = []
            count = 0
            for st in pl.students:
                check = False
                for i in range(len(pl.students)):
                    n = fnames[i].lower() + " " + lnames[i].lower()
                    if self.utils.strip_accents(
                            st.lower()) in self.utils.strip_accents(n):
                        q_ids.append(ids[i])
                        q_fnames.append(fnames[i])
                        q_lnames.append(lnames[i])
                        q_exps.append(exps[i])
                        check = True
                        break
                if not check:
                    false_indices.append(bad_index)
                    q_ids.append("-")
                    q_fnames.append(st)
                    q_lnames.append("-")
                    q_exps.append("-")
                bad_index += 1
                correct = [x for x in pl.marked[count] if x == 1]
                success_rate.append("{} of {}".format(len(correct),
                                                      len(pl.question_list)))
                success_per.append("Success Percentage= {} ".format(
                    (len(correct) / len(pl.question_list)) * 100))
                count += 1
            questions_dict['Öğrenci No'] = q_ids
            questions_dict['Adı'] = q_fnames
            questions_dict['Soyadı'] = q_lnames
            questions_dict['Açıklama'] = q_exps
            poll_name = pl.name
            bar_counter = 0
            os.mkdir(self.out_dir + "/Histograms " + poll_name +
                     str(poll_number))
            for qa in pl.selected_options:
                ans_keys = []
                que_keys = []
                colors = []
                correct_ans = qa.pop("correct", None)
                for value in qa.keys():
                    que_keys.append(value)
                    ans_keys.append(qa[value])
                    if value == correct_ans:
                        colors.append('g')
                    else:
                        colors.append('b')
                qu_dict = {"answers": que_keys, "count": ans_keys}
                er.set_dict(qu_dict)
                er.set_path(self.out_dir + "/Histograms " + poll_name +
                            str(poll_number) + "/ Q" + str(bar_counter + 1) +
                            ".xlsx")
                er.write_excel()
                self.utils.plot_histograms(
                    qa, colors, poll_name,
                    self.out_dir + "/Histograms " + poll_name +
                    str(poll_number) + "/ Q" + str(bar_counter + 1))
                bar_counter += 1
            for i in range(len(pl.question_list)):
                col = []
                question_number = "Q{}".format(i + 1)
                q = 0
                for m in pl.marked:
                    if i >= len(m) or q in false_indices:
                        col.append("-")
                    else:
                        col.append(m[i])
                    q += 1
                questions_dict[question_number] = col
            for index in range(len(pl.students)):
                if index in false_indices:
                    success_rate[index] = "-"
                    success_per[index] = "-"
                    number_of_q.append("-")
                else:
                    number_of_q.append(len(pl.question_list))
            questions_dict['Number of Questions'] = number_of_q
            questions_dict['Success rate'] = success_rate
            questions_dict['Success Percentage'] = success_per
            results.append(questions_dict)
            er.set_dict(questions_dict)
            er.set_path(self.out_dir + "/" + poll_name + str(poll_number + 1) +
                        ".xlsx")
            er.write_excel()
            poll_number += 1