def get_large_pauses_patterns_derived(self, python_files):
     pauses_large_strings = []
     for pauses_large in self.time_large_files:
         file = open(pauses_large, 'r', encoding='utf-8')
         pauses_large_string = file.read()
         try:
             fsa = Fsa(
                 parser.parse(pauses_large_string,
                              first_iter=True,
                              with_ids=True))
             for f in python_files:
                 program_file = open(f, 'r', encoding='utf-8')
                 program_string = program_file.read()
                 try:
                     program_tree = parser.parse(program_string,
                                                 with_ids=False,
                                                 first_iter=True)
                     result = fsa.run(program_tree)
                     if result:
                         for res in result:
                             if isinstance(res.targets[0], Name):
                                 name = res.targets[0].id
                             elif isinstance(res.targets[0], Attribute):
                                 name = res.targets[0].attr
                             else:
                                 continue
                             new_pattern = "_VAR_MULTI_.sleep(" + name + ")"
                             if new_pattern not in pauses_large_strings:
                                 pauses_large_strings.append(new_pattern)
                 except SyntaxError:
                     logging.error("ERROR PARSING PROGRAM: " + str(f.name))
         except SyntaxError:
             logging.error("ERROR PARSING PATTERN: " + pauses_large_string)
     return pauses_large_strings
 def get_get_users_patterns_derived(self, python_files):
     get_users_strings = []
     for get_followers in (
             self.followers_patterns_files + self.friends_patterns_files + self.users_by_name_files + self.users_by_query_files):
         file = open(get_followers, 'r', encoding='utf-8')
         get_followers_string = file.read()
         new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + get_followers_string
         try:
             fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
             for f in python_files:
                 program_file = open(f, 'r', encoding='utf-8')
                 program_string = program_file.read()
                 try:
                     program_tree = parser.parse(program_string, with_ids=False, first_iter=True)
                     result = fsa.run(program_tree)
                     if result:
                         for res in result:
                             pattern_assign = "_VAR_USERS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                             if pattern_assign not in get_users_strings:
                                 get_users_strings.append(pattern_assign)
                 except SyntaxError:
                     logging.error("ERROR PARSING PROGRAM: " + str(f.name))
         except SyntaxError:
             logging.error("ERROR PARSING PATTERN: " + new_pat)
     return get_users_strings
 def get_store_patterns_derived(self, python_files):
     store_strings = []
     for store_file in (self.store_patterns_files):
         file = open(store_file, 'r', encoding='utf-8')
         store_string = ""
         for s in file.readlines():
             store_string = store_string + s
         try:
             fsa = Fsa(parser.parse("def _FUN_():\n\t" + store_string, first_iter=True, with_ids=True))
             for f in python_files:
                 program_file = open(f, 'r', encoding='utf-8')
                 program_string = program_file.read()
                 try:
                     program_tree = parser.parse(program_string, with_ids=False, first_iter=True)
                     result = fsa.run(program_tree)
                     if result:
                         for res in result:
                             pattern_assign = "_VAR_USERS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                             if pattern_assign not in store_strings:
                                 store_strings.append(pattern_assign)
                 except SyntaxError:
                     logging.error("ERROR PARSING PROGRAM: " + str(f.name))
         except SyntaxError:
             logging.error("ERROR PARSING PATTERN: " + "def _FUN_():\n\t" + store_string)
     return store_strings
 def get_constraint_fsas_derived(self):
     constraint_strings = []
     for pause in self.get_generic_pause_patterns():
         constraint_strings.append(
             "if _VAR_1 < _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
         constraint_strings.append(
             "if _VAR_1 <= _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
         constraint_strings.append(
             "if _VAR_1 >= _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
         constraint_strings.append(
             "if _VAR_1 > _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
     little_strings = self.pauses_little_derived
     for little in little_strings:
         constraint_strings.append(little)
     constraint_fsas = []
     for constraint_string in constraint_strings:
         try:
             constraint_fsas.append((Fsa(
                 parser.parse(constraint_string,
                              first_iter=True,
                              with_ids=True)), constraint_string))
         except SyntaxError as e:
             logging.error("ERROR PARSING PROGRAM: " +
                           str(constraint_string))
     return constraint_fsas
 def get_store_fsas_basic(self):
     store_strings_patterns = []
     get_tweets_strings = self.get_get_all_tweets_patterns_basic()
     store_strings = self.get_store_patterns_basic()
     get_users_strings = self.get_get_users_patterns_basic()
     for tweet in get_tweets_strings:
         for s in store_strings:
             store_strings_patterns.append(
                 tweet + "\n_STAT_MULTI_\n" + "for _VAR_TWEET_ in _VAR_TWEETS_:\n\t_STAT_MULTI_\n\t" + s)
             store_strings_patterns.append(
                 tweet + "\n_STAT_MULTI_\n" + "for _VAR_TWEET_ in _VAR_TWEETS_:\n\t_STAT_MULTI_\n" + s)
     for user in get_users_strings:
         for s in store_strings:
             store_strings_patterns.append(
                 user + "\n_STAT_MULTI_\n" + "for _VAR_TWEET_ in _VAR_USERS_:\n\t_STAT_MULTI_\n\t" + s)
             store_strings_patterns.append(
                 user + "\n_STAT_MULTI_\n" + "for _VAR_TWEET_ in _VAR_USERS_:\n\t_STAT_MULTI_\n" + s)
     fsas = []
     for store_strings_pattern in store_strings_patterns:
         try:
             fsas.append(
                 (Fsa(parser.parse(store_strings_pattern, first_iter=True, with_ids=True)), store_strings_pattern))
         except SyntaxError as e:
             logging.error("ERROR PARSING PROGRAM: " + str(store_strings_pattern))
     return fsas
def run(index_0, index_1, fold, ret):
    ret[index_1] = fold
    ret[index_0] = -1
    try:
        logging.debug("SCANNING PROJECT " + str(fold.name))
        print("SCANNING PROJECT " + str(fold.name))
        files = list(fold.glob("**/*.py"))

        all_files = files
        types = ["**/^(results_patterns).txt", "**/*.json", "**/*.cfg"]
        for t in types:
            all_files += list(fold.glob(t))

        main.initialize_patterns(files)

        store_fsas_basic = main.get_store_fsas_basic()

        store_fsas = store_fsas_basic + main.get_store_fsas_derived()

        store_results = []

        for f in files:
            with open(f, 'r', encoding='utf-8') as fi:
                feed = fi.read()
            try:
                feed_tree = parser.parse(feed, first_iter=True, with_ids=False)
                for fsa in store_fsas:
                    result = fsa[0].run(feed_tree)
                    for res in result:
                        store_results.append((Module(body=res), fsa[1]))
                with open("../recheck/results/" + fold.name + ".txt", 'a', encoding='utf-8') as result_file:
                    result_file.truncate(0)
                    result_file.write("STORE PATTERNS:\n")
                    for res in store_results:
                        write_program_on_file(res[0], result_file)
                        result_file.write("\n")
                        result_file.write(res[1])
                        result_file.write("\n\n")
                    result_file.write("MIMIC PATTERNS:\n")
            except SyntaxError:
                logging.error("ERROR PARSING PROGRAM: " + str(f.name))
    except BaseException as e:
        logging.debug("ERROR ON PROJECT " + str(fold))
        print(traceback.format_exc())
        logging.error(traceback.format_exc())
        try:
            shutil.move(str(fold), "../recheck/error")
        except:
            shutil.rmtree(str(fold))
        ret[index_0] = -1
        return
    ret[index_0] = 1
    try:
        shutil.move(str(fold), "../recheck/done")
    except:
        shutil.rmtree(str(fold))
 def get_generic_fsas(self):
     generic_fsas = []
     generic_pauses = self.get_generic_pause_patterns()
     for generic_pause in generic_pauses:
         try:
             generic_fsas.append(
                 (Fsa(parser.parse(generic_pause, first_iter=True, with_ids=True)), generic_pause))
         except SyntaxError as e:
             logging.error("ERROR PARSING PROGRAM: " + str(generic_pause))
     return generic_fsas
 def get_mimic_fsas_derived(self):
     mimic_fsas = []
     pause_strings = self.pauses_large_derived
     for pause_string in pause_strings:
         try:
             mimic_fsas.append(
                 (Fsa(parser.parse(pause_string, first_iter=True, with_ids=True)), pause_string))
         except SyntaxError as e:
             logging.error("ERROR PARSING PROGRAM: " + str(pause_string))
     return mimic_fsas
Exemple #9
0
    p_unfollow_patterns = Path(path_unfollow_patterns)
    unfollow_patterns_files = list(p_unfollow_patterns.glob("*"))

    # INDISCRIMINATE FOLLOW PATTERNS
    path_basic_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\parse_follow\\pattern_programs\\indiscrimate follow\\basic"
    p_basic_patterns = Path(path_basic_patterns)
    follow_files = list(p_basic_patterns.glob("*"))

    get_users_strings = []
    for get_followers in followers_patterns_files:
        file = open(get_followers, 'r', encoding='utf-8')
        get_followers_string = file.read()
        get_users_strings.append(get_followers_string)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + get_followers_string
        try:
            fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string, with_ids=False, first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_assign = "_VAR_USERS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in get_users_strings:
                                get_users_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING: ")
                    print(f)
                    print("error:")
def run_store():
    # GET PROJECT PATH
    path = 'C:\\Users\\Andrea\\PycharmProjects\\inspect_def\\' + input(
        "Inserisci percorso progetto: ").replace("/", "\\")

    p = Path(path)

    python_files = []
    python_files += list(p.glob("**/*.py"))

    # GET FOLLOWERS
    path_followers_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_users\\followers"
    p_followers_patterns = Path(path_followers_patterns)
    followers_patterns_files = list(p_followers_patterns.glob("*"))

    # GET FRIENDS
    path_friends_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_users\\friends"
    p_friends_patterns = Path(path_friends_patterns)
    friends_patterns_files = list(p_friends_patterns.glob("*"))

    # GET USER BY NAME
    path_user_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_users\\by_name"
    p_user_patterns = Path(path_user_patterns)
    users_by_name_files = list(p_user_patterns.glob("*"))

    # GET USERS BY QUERY
    path_user_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_users\\by_query"
    p_user_patterns = Path(path_user_patterns)
    users_by_query_files = list(p_user_patterns.glob("*"))

    # GET USER TWEETS
    path_usertweets_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_tweets\\user_tweets"
    p_usertweets_patterns = Path(path_usertweets_patterns)
    usertweets_files = list(p_usertweets_patterns.glob("*"))

    # GET TWEETS
    path_tweets_pattern = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_tweets\\query"
    p_tweets_patterns = Path(path_tweets_pattern)
    tweets_patterns_files = list(p_tweets_patterns.glob("*"))

    # STORE
    path_store_pattern = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\store_patterns"
    p_store_patterns = Path(path_store_pattern)
    store_patterns_files = list(p_store_patterns.glob("*"))

    get_tweets_strings = []
    for tweets_files in (tweets_patterns_files + usertweets_files):
        file = open(tweets_files, 'r', encoding='utf-8')
        get_tweets_string = file.read()
        # print("GETTING PATTERN STRING [GET TWEET]:")
        # print(get_tweets_string)
        get_tweets_strings.append(get_tweets_string)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + get_tweets_string
        try:
            fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_assign = "_VAR_TWEETS_ = _VAR_MULTI_." + res.name + "(_ARGS_)"
                            # print("NEW PATTERN FOR GETTING TWEETS:")
                            # print(pattern_assign)
                            if pattern_assign not in get_tweets_strings:
                                get_tweets_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING ")
            print(new_pat)
            print("error:")
            print(e)
            print(e.text)

    get_users_strings = []
    for get_followers in (followers_patterns_files + friends_patterns_files +
                          users_by_name_files + users_by_query_files):
        file = open(get_followers, 'r', encoding='utf-8')
        get_followers_string = file.read()
        get_users_strings.append(get_followers_string)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + get_followers_string
        try:
            fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_assign = "_VAR_USERS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in get_users_strings:
                                get_users_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING: ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(new_pat)
            print("error:")
            print(e)
            print(e.text)

    store_strings = []
    for store_file in (store_patterns_files):
        file = open(store_file, 'r', encoding='utf-8')
        store_string = ""
        for s in file.readlines():
            store_string = store_string + s
        store_strings.append(store_string)
        try:
            fsa = Fsa(
                parser.parse(store_string, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_assign = "_VAR_USERS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in store_strings:
                                store_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING: ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(store_string)
            print("error:")
            print(e)
            print(e.text)

    store_strings_patterns = []
    for tweet in get_tweets_strings:
        for s in store_strings:
            store_strings_patterns.append(
                tweet + "\n_STAT_MULTI_\n" +
                "for _VAR_TWEET_ in _VAR_TWEETS_:\n\t_STAT_MULTI_\n\t" + s)
            store_strings_patterns.append(
                tweet + "\n_STAT_MULTI_\n" +
                "for _VAR_TWEET_ in _VAR_TWEETS_:\n\t_STAT_MULTI_\n" + s)
    for user in get_users_strings:
        for s in store_strings:
            store_strings_patterns.append(
                user + "\n_STAT_MULTI_\n" +
                "for _VAR_TWEET_ in _VAR_USERS_:\n\t_STAT_MULTI_\n\t" + s)
            store_strings_patterns.append(
                user + "\n_STAT_MULTI_\n" +
                "for _VAR_TWEET_ in _VAR_USERS_:\n\t_STAT_MULTI_\n" + s)

    fsas = []
    for store_strings_pattern in store_strings_patterns:
        try:
            fsas.append((Fsa(
                parser.parse(store_strings_pattern,
                             first_iter=True,
                             with_ids=True)), store_strings_pattern))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(store_strings_pattern)
            print("error:")
            print(e)
            print(e.text)

    results = []
    for f in python_files:
        fi = open(f, 'r', encoding='utf-8')
        feed = fi.read()
        try:
            feed_tree = parser.parse(feed, first_iter=True, with_ids=False)
            for fsa in fsas:
                result = fsa[0].run(feed_tree)
                for res in result:
                    results.append((Module(body=res), fsa[1]))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(f)
            print("error:")
            print(e)
            print(e.text)

    for res in results:
        print_program(res[0])
        print("\n")
        print(res[1])
        pass
def run(index_0, index_1, fold, ret):
    ret[index_1] = fold
    ret[index_0] = -1
    try:
        logging.debug("SCANNING PROJECT " + str(fold.name))
        print("SCANNING PROJECT " + str(fold.name))
        files = list(fold.glob("**/*.py"))

        all_files = files
        types = ["**/^(results_patterns).txt", "**/*.json", "**/*.cfg"]
        for t in types:
            all_files += list(fold.glob(t))

        main.initialize_patterns(files)

        mimic_fsas_basic = main.get_mimic_fsas_basic()
        constraint_fsas_basic = main.get_constraint_fsas_basic()
        generic_pauses_fsas_basic = main.get_generic_fsas()

        mimic_fsas = mimic_fsas_basic + main.get_mimic_fsas_derived()
        constraint_fsas = constraint_fsas_basic + main.get_constraint_fsas_derived(
        )
        generic_pauses_fsas = generic_pauses_fsas_basic

        mimic_results = []
        constraint_results = []
        generic_pauses_results = []

        for f in files:
            with open(f, 'r', encoding='utf-8') as fi:
                feed = fi.read()
            try:
                feed_tree = parser.parse(feed, first_iter=True, with_ids=False)
                with open("../results/res.txt", 'a',
                          encoding='utf-8') as result_file:
                    for fsa in mimic_fsas:
                        result = fsa[0].run(feed_tree)
                        if result:
                            result_file.write("m ")
                    for fsa in constraint_fsas:
                        result = fsa[0].run(feed_tree)
                        if result:
                            result_file.write("c ")
                    if not mimic_results and not constraint_results:
                        for fsa in generic_pauses_fsas:
                            result = fsa[0].run(feed_tree)
                            if result:
                                result_file.write("g ")
            except SyntaxError:
                logging.error("ERROR PARSING PROGRAM: " + str(f.name))
    except BaseException as e:
        logging.debug("ERROR ON PROJECT " + str(fold))
        print(traceback.format_exc())
        logging.error(traceback.format_exc())
        try:
            shutil.move(str(fold), "../error")
        except:
            shutil.rmtree(str(fold))
        ret[index_0] = -1
        return
    ret[index_0] = 1
    shutil.rmtree(str(fold))
Exemple #12
0
def run_ret():
    # GET PROJECT PATH
    path = 'C:\\Users\\Andrea\\PycharmProjects\\inspect_def\\' + input(
        "Inserisci percorso progetto: ").replace("/", "\\")
    p = Path(path)

    python_files = []
    python_files += list(p.glob("**/*.py"))

    # GET TWEETS
    path_query_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_tweets\\query"
    p_query_patterns = Path(path_query_patterns)
    query_patterns_files = list(p_query_patterns.glob("*"))

    # GET MENTIONS
    path_mentions_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_tweets\\mentions"
    p_mentions_patterns = Path(path_mentions_patterns)
    mentions_files = list(p_mentions_patterns.glob("*"))

    # GET USER TWEETs
    path_usertweets_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_tweets\\user_tweets"
    p_usertweets_patterns = Path(path_usertweets_patterns)
    usertweets_files = list(p_usertweets_patterns.glob("*"))

    # STREAM
    path_stream_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_tweets\\stream"
    p_stream_patterns = Path(path_stream_patterns)
    stream_files = list(p_stream_patterns.glob("*"))

    # BLACKLIST TWEETS
    path_tweetsblacklisted_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\blacklist_tweets"
    p_tweetsblacklisted_patterns = Path(path_tweetsblacklisted_patterns)
    tweetsblacklisted_files = list(p_tweetsblacklisted_patterns.glob("*"))

    # RETWEET
    path_retweet_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\retweet"
    p_retweet_patterns = Path(path_retweet_patterns)
    retweet_files = list(p_retweet_patterns.glob("*"))

    get_tweets_strings = []
    for get_tweets in query_patterns_files:
        file = open(get_tweets, 'r', encoding='utf-8')
        get_tweets_string = file.read()
        get_tweets_strings.append(get_tweets_string)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + get_tweets_string
        try:
            fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_call = "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_call not in get_tweets_strings:
                                get_tweets_strings.append(pattern_call)
                            pattern_assign = "_VAR_TWEETS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in get_tweets_strings:
                                get_tweets_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING: ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(new_pat)
            print("error:")
            print(e)
            print(e.text)

    for get_tweets in mentions_files:
        file = open(get_tweets, 'r', encoding='utf-8')
        get_tweets_string = file.read()
        get_tweets_strings.append(get_tweets_string)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + get_tweets_string
        try:
            fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_call = "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_call not in get_tweets_strings:
                                get_tweets_strings.append(pattern_call)
                            pattern_assign = "_VAR_TWEETS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in get_tweets_strings:
                                get_tweets_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING: ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(new_pat)
            print("error:")
            print(e)
            print(e.text)

    get_user_tweets_strings = []
    for get_user_tweets in usertweets_files:
        file = open(get_user_tweets, 'r', encoding='utf-8')
        get_tweets_string = file.read()
        get_user_tweets_strings.append(get_tweets_string)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + get_tweets_string
        try:
            fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_call = "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_call not in get_tweets_strings:
                                get_user_tweets_strings.append(pattern_call)
                            pattern_assign = "_VAR_TWEETS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in get_user_tweets_strings:
                                get_user_tweets_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING: ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(new_pat)
            print("error:")
            print(e)
            print(e.text)

    retweet_strings = []
    for retweet_file in retweet_files:
        file = open(retweet_file, 'r', encoding='utf-8')
        retweet_string = file.read()
        retweet_strings.append(retweet_string)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + retweet_string
        try:
            fsa = Fsa(parser.parse(new_pat, first_iter=True, with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            pattern_call = "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_call not in retweet_strings:
                                retweet_strings.append(pattern_call)
                            pattern_assign = "_VAR_MULTI_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in retweet_strings:
                                retweet_strings.append(pattern_assign)
                except SyntaxError as e:
                    print("ERROR PARSING: ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(new_pat)
            print("error:")
            print(e)
            print(e.text)

    indiscriminate_retweet_strings = []
    for ret in retweet_strings:
        indiscriminate_retweet_strings.append(ret)

    blacklist_retweet_strings = []
    for ret in retweet_strings:
        blacklist_retweet_strings.append(
            "if _VAR_MULTI_ in _VAR_MULTI_:\n\t_STAT_MULTI\n\t_VAR_CHECK_=False"
            "\n_STAT_MULTI_\nif _VAR_CHECK_:\n\t_STAT_MULTI_\n\t" + ret)
        for get_tweets in get_tweets_strings:
            for blacklistweets in tweetsblacklisted_files:
                tweetsblacklisted_file = open(blacklistweets,
                                              'r',
                                              encoding='utf-8')
                tweetsblacklisted_read = tweetsblacklisted_file.read()
                blacklist_retweet_strings.append(
                    get_tweets + "\n_STAT_MULTI_\n" + tweetsblacklisted_read +
                    "\n_STAT_MULTI_\nfor _VAR_MULTI_ in _VAR_TWEETS_:\n\t_STAT_MULTI_\n\t"
                    + ret)

    mass_retweet_strings = []
    for ret in retweet_strings:
        for get_user_tweets_string in get_user_tweets_strings:
            mass_retweet_strings.append(
                get_user_tweets_string +
                "\n_STAT_MULTI_\nfor _VAR_MULTI_ in _VAR_TWEETS_:\n\t_STAT_MULTI_\n\t"
                + ret)

    for ret in retweet_strings:
        blacklist_retweet_strings.append(
            "if not any(_ARGS_):\n\t_STAT_MULTI_\n\t" + ret)

        blacklist_retweet_strings.append(
            "if any(_ARGS_):\n\t_STAT_MULTI_\n\tcontinue\n_STAT_MULTI_\n" +
            ret)

        blacklist_retweet_strings.append(
            "if _VAR_MULTI_ not in _VAR_MULTI_ and _EVERY_:\n\t_STAT_MULTI_\n\t"
            + ret)

    indiscriminate_retweet_fsas = []
    for s in indiscriminate_retweet_strings:
        try:
            indiscriminate_retweet_fsas.append(
                (Fsa(parser.parse(s, first_iter=True, with_ids=True)), s))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(s)
            print("error:")
            print(e)
            print(e.text)

    blacklist_retweet_fsas = []
    for s in blacklist_retweet_strings:
        try:
            blacklist_retweet_fsas.append(
                (Fsa(parser.parse(s, first_iter=True, with_ids=True)), s))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(s)
            print("error:")
            print(e)
            print(e.text)

    mass_retweet_fsas = []
    for s in mass_retweet_strings:
        try:
            mass_retweet_fsas.append(
                (Fsa(parser.parse(s, first_iter=True, with_ids=True)), s))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(s)
            print("error:")
            print(e)
            print(e.text)

    indiscriminate_results = []
    blacklist_results = []
    mass_results = []

    for f in python_files:
        fi = open(f, 'r', encoding='utf-8')
        feed = fi.read()
        try:
            feed_tree = parser.parse(feed, first_iter=True, with_ids=False)
            for fsa in blacklist_retweet_fsas:
                result = fsa[0].run(feed_tree)
                for res in result:
                    blacklist_results.append((Module(body=res), fsa[1]))
            for fsa in mass_retweet_fsas:
                result = fsa[0].run(feed_tree)
                for res in result:
                    mass_results.append((Module(body=res), fsa[1]))
            if not blacklist_results:
                for fsa in indiscriminate_retweet_fsas:
                    result = fsa[0].run(feed_tree)
                    for res in result:
                        indiscriminate_results.append(
                            (Module(body=res), fsa[1]))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(f)
            print("error:")
            print(e)
            print(e.text)

    print("INDISCRIMINATE:")
    for res in indiscriminate_results:
        print_program(res[0])
        print("\n")
        print(res[1])

    print("BLACKLIST:")
    for res in blacklist_results:
        print_program(res[0])
        print("\n")
        print(res[1])

    print("MASS:")
    for res in mass_results:
        print_program(res[0])
        print("\n")
        print(res[1])
def run_pauses():
    # GET PROJECT PATH
    path = 'C:\\Users\\Andrea\\PycharmProjects\\inspect_def\\' + input(
        "Inserisci percorso progetto: ").replace("/", "\\")
    p = Path(path)

    python_files = []
    python_files += list(p.glob("**/*.py"))

    all_files = []
    types = ["**/*.py", "**/*.txt", "**/*.json", "**/*.cfg"]
    for t in types:
        all_files += list(p.glob(t))

    path_pause_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\pause_pattern\\generic"
    p_pause_patterns = Path(path_pause_patterns)
    pause_files = list(p_pause_patterns.glob("*"))

    path_pause_large = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\pause_pattern\\large"
    p_pause_large = Path(path_pause_large)
    pauses_large_file = list(p_pause_large.glob("*"))

    path_pause_little = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\pause_pattern\\little"
    p_pause_little = Path(path_pause_little)
    pauses_little_file = list(p_pause_little.glob("*"))

    path_time_little = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\pause_pattern\\time_assignment_little"
    p_time_little = Path(path_time_little)
    time_little_file = list(p_time_little.glob("*"))

    path_time_large = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\pause_pattern\\time_assignment_large"
    p_time_large = Path(path_time_large)
    time_large_file = list(p_time_large.glob("*"))

    pauses_large_strings = []
    for pauses_large in time_large_file:
        file = open(pauses_large, 'r', encoding='utf-8')
        pauses_large_string = file.read()
        try:
            fsa = Fsa(
                parser.parse(pauses_large_string,
                             first_iter=True,
                             with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            if isinstance(res.targets[0], Name):
                                name = res.targets[0].id
                            elif isinstance(res.targets[0], Attribute):
                                name = res.targets[0].attr
                            else:
                                continue
                            new_pattern = "_VAR_MULTI_.sleep(" + name + ")"
                            # print("NEW PATTERN FOR GETTING TWEETS:")
                            # print(pattern_assign)
                            if new_pattern not in pauses_large_strings:
                                pauses_large_strings.append(new_pattern)
                except SyntaxError as e:
                    print("ERROR PARSING ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING ")
            print(new_pattern)
            print("error:")
            print(e)
            print(e.text)
    for p in pauses_large_file:
        file = open(p, 'r', encoding='utf-8')
        pauses_large_strings.append(file.read())

    pauses_little_strings = []
    for pauses_little in time_little_file:
        file = open(pauses_little, 'r', encoding='utf-8')
        pauses_little_string = file.read()
        try:
            fsa = Fsa(
                parser.parse(pauses_little_string,
                             first_iter=True,
                             with_ids=True))
            for f in python_files:
                program_file = open(f, 'r', encoding='utf-8')
                program_string = program_file.read()
                try:
                    program_tree = parser.parse(program_string,
                                                with_ids=False,
                                                first_iter=True)
                    result = fsa.run(program_tree)
                    if result:
                        for res in result:
                            if isinstance(res.targets[0], Name):
                                name = res.targets[0].id
                            elif isinstance(res.targets[0], Attribute):
                                name = res.targets[0].attr
                            else:
                                continue
                            new_pattern = "_VAR_MULTI_.sleep(" + name + ")"
                            if new_pattern not in pauses_little_strings:
                                pauses_little_strings.append(new_pattern)
                except SyntaxError as e:
                    print("ERROR PARSING ")
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING ")
            print(new_pattern)
            print("error:")
            print(e)
            print(e.text)
    for p in pauses_little_file:
        file = open(p, 'r', encoding='utf-8')
        pauses_little_strings.append(file.read())

    generic_pauses = []
    for pause in pause_files:
        file = open(pause, 'r', encoding='utf-8')
        generic_pauses.append(file.read())

    constraint_strings = []
    for pause in generic_pauses:
        constraint_strings.append(
            "if _VAR_1 < _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
        constraint_strings.append(
            "if _VAR_1 <= _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
        constraint_strings.append(
            "if _VAR_1 >= _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
        constraint_strings.append(
            "if _VAR_1 > _VAR_2 and EVERY:\n\t_STAT_MULTI_\n\t" + pause)
    for little in pauses_little_strings:
        constraint_strings.append(little)

    mimic_fsas = []
    for pause_string in pauses_large_strings:
        try:
            mimic_fsas.append(
                (Fsa(parser.parse(pause_string, first_iter=True,
                                  with_ids=True)), pause_string))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(pause_string)
            print("error:")
            print(e)
            print(e.text)

    constraint_fsas = []
    for constraint_string in constraint_strings:
        try:
            constraint_fsas.append((Fsa(
                parser.parse(constraint_string, first_iter=True,
                             with_ids=True)), constraint_string))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(constraint_string)
            print("error:")
            print(e)
            print(e.text)

    generic_fsas = []
    for generic_pause in generic_pauses:
        try:
            generic_fsas.append((Fsa(
                parser.parse(generic_pause, first_iter=True,
                             with_ids=True)), generic_pause))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(generic_pause)
            print("error:")
            print(e)
            print(e.text)

    mimic_results = []
    constraint_results = []
    generic_results = []
    for f in python_files:
        fi = open(f, 'r', encoding='utf-8')
        feed = fi.read()
        try:
            feed_tree = parser.parse(feed, first_iter=True, with_ids=False)
            for fsa in mimic_fsas:
                result = fsa[0].run(feed_tree)
                for res in result:
                    mimic_results.append((Module(body=res), fsa[1]))
            for fsa in constraint_fsas:
                result = fsa[0].run(feed_tree)
                for res in result:
                    constraint_results.append((Module(body=res), fsa[1]))
            if not mimic_results and not constraint_results:
                for fsa in generic_fsas:
                    result = fsa[0].run(feed_tree)
                    for res in result:
                        generic_results.append((Module(body=res), fsa[1]))
        except SyntaxError as e:
            print("ERROR PARSING: ")
            print(f)
            print("error:")
            print(e)
            print(e.text)

    print("MIMIC:")
    for res in mimic_results:
        print_program(res[0])
        print("\n")
        print(res[1])
        pass

    print("CONSTRAINTS:")
    for res in constraint_results:
        print_program(res[0])
        print("\n")
        print(res[1])

    print("GENERIC:")
    for res in generic_results:
        print_program(res[0])
        print("\n")
        print(res[1])
Exemple #14
0
def run():
    index = 1

    # RETRIEVE PYTHON FILES TO SCAN LOOKING FOR PATTERNS
    path = 'C:\\Users\\Andrea\\PycharmProjects\\inspect_def\\' + input(
        "Inserisci percorso progetto: ").replace("/", "\\")
    p = Path(path)
    python_files = []
    python_files += list(p.glob("**/*.py"))

    # GET FOLLOWERS
    path_followers_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_users\\followers"
    p_followers_patterns = Path(path_followers_patterns)
    followers_patterns_files = list(p_followers_patterns.glob("*"))

    # GET USER
    path_user_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\search_users\\single"
    p_user_patterns = Path(path_user_patterns)
    user_files = list(p_user_patterns.glob("*"))

    # BLACKLIST USERS
    path_blacklist_user_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\blacklist_users"
    p_blacklist_user_patterns = Path(path_blacklist_user_patterns)
    blacklist_user_files = list(p_blacklist_user_patterns.glob("*"))

    # PHANTOM FOLLOW PATTERNS
    path_phantom_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\parse_follow\\pattern_programs\\phantom_follow"
    p_phantom_patterns = Path(path_phantom_patterns)
    phantom_patterns_files = list(p_phantom_patterns.glob("*"))

    # BLACKLIST-BASED FOLLOW PATTERNS
    path_blacklist_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\parse_follow\\pattern_programs\\blacklist-based follow"
    p_blacklist_patterns = Path(path_blacklist_patterns)
    blacklist_patterns_files = list(p_blacklist_patterns.glob("*"))

    # UNFOLLOW PATTERNS
    path_unfollow_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\parse_follow\\pattern_programs\\unfollow"
    p_unfollow_patterns = Path(path_unfollow_patterns)
    unfollow_patterns_files = list(p_unfollow_patterns.glob("*"))

    # INDISCRIMINATE FOLLOW PATTERNS
    path_basic_patterns = "C:\\Users\\Andrea\\PycharmProjects\\Patterns\\parse_follow\\pattern_programs\\indiscrimate follow\\basic"
    p_basic_patterns = Path(path_basic_patterns)
    basic_patterns_files = list(p_basic_patterns.glob("*"))

    basic_fsas = []

    index_patterns_to_extract = 1

    basic_follow_strings = []
    unfollow_strings = []
    get_followers_strings = []
    get_user_strings = []
    blacklist_users_strings = []

    for k in basic_patterns_files:
        pat = open(k, 'r', encoding='utf-8')
        pat = pat.read()
        basic_follow_strings.append(pat)

    for u in unfollow_patterns_files:
        pat = open(u, 'r', encoding='utf-8')
        pat = pat.read()
        unfollow_strings.append(pat)

    for fol in followers_patterns_files:
        pat = open(fol, 'r', encoding='utf-8')
        pat = pat.read()
        get_followers_strings.append(pat)

    for blus in blacklist_user_files:
        pat = open(blus, 'r', encoding='utf-8')
        pat = pat.read()
        blacklist_users_strings.append(pat)

    basic_fsa_feed = []
    for i in range(0, len(basic_follow_strings)):
        b = basic_follow_strings[i]
        if b not in basic_fsa_feed:
            basic_fsa_feed.append(b)
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + b
        new_pat_tree = parser.parse(new_pat, first_iter=True, with_ids=True)
        fsa_to_try = Fsa(new_pat_tree)
        for f in python_files:
            to_scan = open(f, 'r', encoding='utf-8')
            try:
                program_tree = parser.parse(to_scan.read(), first_iter=True)
                result = fsa_to_try.run(copy.deepcopy(program_tree))
                if result:
                    for res in result:
                        pattern_call = "_VAR_MULTI_." + res.name + "(_ARGS_)"
                        if pattern_call not in basic_follow_strings:
                            basic_follow_strings.append(pattern_call)
                            basic_fsa_feed.append(pattern_call)
                        pattern_assign = "_VAR_MULTI_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                        if pattern_assign not in basic_follow_strings:
                            basic_follow_strings.append(pattern_assign)
                            basic_fsa_feed.append(pattern_assign)
            except SyntaxError as e:
                print("ERROR PARSING FILE: ", end=' ')
                print(f)
                print("error:")
                print(e)
                print(e.text)

    # #
    # #
    #

    search_followers_blacklist_feed = []
    for i in range(0, len(get_followers_strings)):
        getfol = get_followers_strings[i]
        for j in range(0, len(blacklist_users_strings)):
            new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + getfol + "\n\t" + blacklist_users_strings[
                j]
            try:
                new_pat_tree = parser.parse(new_pat,
                                            first_iter=True,
                                            with_ids=True)
                fsa_to_try = Fsa(new_pat_tree)
                for f in python_files:
                    to_scan = open(f, 'r', encoding='utf-8')
                    try:
                        program_tree = parser.parse(to_scan.read(),
                                                    first_iter=True)
                        result = fsa_to_try.run(copy.deepcopy(program_tree))
                        if result:
                            for res in result:
                                pattern_call = "_VAR_USERS_." + res.name + "(_ARGS_)"
                                if pattern_call not in basic_follow_strings:
                                    search_followers_blacklist_feed.append(
                                        pattern_call)
                                pattern_assign = "_VAR_USERS_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                                if pattern_assign not in basic_follow_strings:
                                    search_followers_blacklist_feed.append(
                                        pattern_assign)
                    except SyntaxError as e:
                        print("ERROR PARSING FILE: ", end=' ')
                        print(f)
                        print("error:")
                        print(e)
                        print(e.text)
            except SyntaxError as e:
                print("ERROR PARSING PATTERN: ", end=' ')
                print(new_pat)
                print("error:")
                print(e)
                print(e.text)

    unfollow_fsa_feed = []
    for i in range(0, len(unfollow_strings)):
        u = unfollow_strings[i]
        new_pat = "def _FUN_():\n\t_STAT_MULTI_\n\t" + u
        try:
            new_pat_tree = parser.parse(new_pat,
                                        first_iter=True,
                                        with_ids=True)
            fsa_to_try = Fsa(new_pat_tree)
            for f in python_files:
                to_scan = open(f, 'r', encoding='utf-8')
                try:
                    program_tree = parser.parse(to_scan.read(),
                                                first_iter=True)
                    result = fsa_to_try.run(copy.deepcopy(program_tree))
                    if result:
                        for res in result:
                            if u in unfollow_fsa_feed:
                                unfollow_fsa_feed.remove(u)
                            pattern_call = "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_call not in unfollow_strings:
                                unfollow_strings.append(pattern_call)
                            if pattern_call not in unfollow_fsa_feed:
                                unfollow_fsa_feed.append(pattern_call)
                            pattern_assign = "_VAR_MULTI_ = " + "_VAR_MULTI_." + res.name + "(_ARGS_)"
                            if pattern_assign not in unfollow_strings:
                                unfollow_strings.append(pattern_assign)
                            if pattern_assign not in unfollow_fsa_feed:
                                unfollow_fsa_feed.append(pattern_assign)
                    else:
                        if u not in unfollow_fsa_feed:
                            unfollow_fsa_feed.append(u)
                except SyntaxError as e:
                    print("ERROR PARSING File: ", end=' ')
                    print(f)
                    print("error:")
                    print(e)
                    print(e.text)
        except SyntaxError as e:
            print("ERROR PARSING PATTERN: ", end=' ')
            print(new_pat)
            print("error:")
            print(e)
            print(e.text)

    patterns_placeholder_substituted = []
    for blpat in blacklist_patterns_files:
        pat = open(blpat, 'r', encoding='utf-8')
        pat_str = pat.read()
        pat.close()
        pat = open(blpat, 'r', encoding='utf-8')
        pat_lines = pat.readlines()
        last = pat_lines[-1]
        for st in basic_fsa_feed:
            splitted = st.split("\n")
            to_insert = pat_str.replace("(FOLLOW_PLACEHOLDER)", splitted[0])
            if len(splitted) > 1:
                for line in splitted[1:]:
                    to_insert += "\n" + " " * last.count(" ") + line
            patterns_placeholder_substituted.append(to_insert)
            # print(to_insert)

    for g in search_followers_blacklist_feed:
        for basic in basic_fsa_feed:
            patterns_placeholder_substituted.append(
                g + "\n" + "_STAT_MULTI_" + "\n" +
                "for _VAR_USER_ in _VAR_USERS_:" + "\n\t" + "_STAT_MULTI_" +
                "\n\t" + basic)
    blacklist_fsas = []
    for pattern_placeholder_substituted in patterns_placeholder_substituted:
        try:
            blacklist_fsas.append((Fsa(
                parser.parse(pattern_placeholder_substituted,
                             with_ids=True,
                             first_iter=True)),
                                   pattern_placeholder_substituted))
        except SyntaxError as e:
            print("ERROR PARSING PATTERN: ", end=' ')
            print(pattern_placeholder_substituted)
            print("error:")
            print(e)
            print(e.text)

    patterns_placeholder_substituted = []
    for unpat in phantom_patterns_files:
        pat = open(unpat, 'r', encoding='utf-8')
        pat_str = pat.read()
        pat.close()
        pat = open(unpat, 'r', encoding='utf-8')
        pat_lines = pat.readlines()
        last = pat_lines[-1]
        for st in unfollow_fsa_feed:
            splitted = st.split("\n")
            to_insert = pat_str.replace("(UNFOLLOW_PLACEHOLDER)", splitted[0])
            if len(splitted) > 1:
                for line in splitted[1:]:
                    to_insert += "\n" + " " * last.count(" ") + line
            patterns_placeholder_substituted.append(to_insert)

    unfollow_fsas = []
    for pattern_placeholder_substituted in patterns_placeholder_substituted:
        try:
            unfollow_fsas.append((Fsa(
                parser.parse(pattern_placeholder_substituted,
                             with_ids=True,
                             first_iter=True)),
                                  pattern_placeholder_substituted))
        except SyntaxError as e:
            print("ERROR PARSING PATTERN: ", end=' ')
            print(pattern_placeholder_substituted)
            print("error:")
            print(e)
            print(e.text)

    basic_fsas = []
    for basic in basic_fsa_feed:
        try:
            basic_fsas.append(
                (Fsa(parser.parse(basic, with_ids=True,
                                  first_iter=True)), basic))
        except SyntaxError as e:
            print("ERROR PARSING PATTERN: ", end=' ')
            print(basic)
            print("error:")
            print(e)
            print(e.text)

    results = []
    results_blacklist = []
    results_unfollow = []
    for f in python_files:
        to_scan = open(f, 'r', encoding='utf-8')
        to_scan_read = to_scan.read()
        try:
            fsa_feed = parser.parse(to_scan_read, first_iter=True)
            to_scan.close()
            for fsa in blacklist_fsas:
                result_fsa = fsa[0].run(copy.deepcopy(fsa_feed))
                if result_fsa:
                    for r in result_fsa:
                        results_blacklist.append((Module(body=r), fsa[1]))
            for fsa in unfollow_fsas:
                result_fsa = fsa[0].run(copy.deepcopy(fsa_feed))
                if result_fsa:
                    for r in result_fsa:
                        results_unfollow.append((Module(body=r), fsa[1]))
            if not results_blacklist:
                for fsa in basic_fsas:
                    result = fsa[0].run(copy.deepcopy(fsa_feed))
                    if result:
                        for r in result:
                            results.append((Module(body=r), fsa[1]))
        except SyntaxError as e:
            print("ERROR: ", end='')
            print(f)
            print(e)
            print(e.text)

    # return results
    print("BASIC:")
    for res in results:
        print(res[1])
        print_program(res[0])
        print("\n")
    print("BLACKLIST:\n")
    for res in results_blacklist:
        print(res[1])
        print_program(res[0])
        print("\n")
    print("UNFOLLOW:\n")
    for res in results_unfollow:
        print(res[1])
        print_program(res[0])
    #   for x in basic_fsa_feed:
    #       print(x)
    #       print("\n")
    return results, results_blacklist, results_unfollow