def test_message_parse(self):
        parse = Parser(
            'Ya ya, http://whatever.com @user1 @user2 This is a test, yay http://google.com #woot #blah')
        print(parse.urls)
        print(parse.hashtags)
        print(parse.recipients)
        print(parse.message)

        print(parse._msg_values)
        print(parse._parsed_message)
        print(parse._value_lookup)

        parse = Parser(
            '@tflemings this twitter clone is awful #TwicSuck #awful')
        print(parse.urls)
        print(parse.hashtags)
        print(parse.recipients)
        print(parse.message)
        parse = Parser(
            'Check out this video !!! https://www.youtube.com/watch?v=9hUUsqhetX4 #ps4 #robotorgy')
        print(parse.urls)
        print(parse.hashtags)
        print(parse.recipients)
        print(parse.message)
        print("finally")
        print(parse.formatted_msg)
Esempio n. 2
0
    def handle_recv(self, bp, iccid, dat):
        parser = Parser()
        p_type = bp.header.cmd_type + bp.header.cmd_num
        success = bp.header['success']
        if int(success) == 0:
            if p_type == "D1":
                lp = parser.parse_login(dat)
                self.login_status = lp.data['login_status']
                if int(self.login_status) == 0:
                    self.res['sessionID'] = int(lp.data['sessionID'])
                    options.thread_lock.acquire()
                    options.tracker[
                        self.sn]['sessionID'] = self.res['sessionID']
                    options.thread_lock.release()
                    self.logging.info("%s, login send success!", iccid)
                    self.send_config()
                    if self.first_login:
                        threading.Thread(target=self.send_heartbat).start()

            elif p_type == 'D3':
                lp = parser.parse_config(dat)
                print(lp.data)
        else:
            if p_type != "D1":
                self.logging.info("%s, status is offline!", iccid)
                self.login(iccid, False)
            else:
                self.logging.info("%s, login failed!", iccid)
Esempio n. 3
0
 def __init__(self):
     self.parser = Parser()
     self.randomize = Randomize()
     self.report_targets_vull = []
     self.error_list = [
         "mysql_fetch_array()", "You have an error in your SQL syntax",
         "MySQL Query fail.", "PostgreSQL ERROR", "Access Database Engine",
         "Microsoft Access Driver"
     ]
Esempio n. 4
0
def process():
    """
    Process the files in the queue directory.
    """
    config = ConfigParser()
    config.read(['../project_config'])

    for filename in os.listdir(config.get("locations", "queue")) :
        parser_obj = Parser(os.path.join(config.get("locations", "queue"), filename))
        parser_obj.run()
Esempio n. 5
0
def main():
    # TODO : implement getopt style arguments

    if len(sys.argv) == 2:
        filename = sys.argv[1]
    else:
        print "Usage: python parser.py <relative-path-to-file>"
        sys.exit(1)

    parser_obj = Parser(filename)
    parser_obj.run()
Esempio n. 6
0
def process():
    """
    Process the files in the queue directory.
    """
    config = ConfigParser()
    config.read(['../project_config'])

    for filename in os.listdir(config.get("locations", "queue")):
        parser_obj = Parser(
            os.path.join(config.get("locations", "queue"), filename))
        parser_obj.run()
Esempio n. 7
0
def main():
    # TODO : implement getopt style arguments
    
    if len(sys.argv) == 2 :
        filename = sys.argv[1]
    else :
        print "Usage: python parser.py <relative-path-to-file>"
        sys.exit(1)
    
    parser_obj = Parser(filename)
    parser_obj.run()
Esempio n. 8
0
 def tcpClient(self, iccid):
     try:
         self.login(iccid)
         parser = Parser()
         while True:
             dat = self.socket.recv(1024)
             if dat:
                 bp = parser.parse_base(dat)
                 self.handle_recv(bp, iccid, dat)
     except Exception as e:
         self.logging.error("What's wrong, reconnect it.")
Esempio n. 9
0
    def __init__(self, query, start_page=0, number_of_results=100, timeout=10):

        self.query = query
        self.start_page = start_page
        self.number_of_results = number_of_results
        self.timeout_requests = timeout
        self.failed_request = 'Our systems have detected unsual traffic from\
                                your computer network'

        self.filter_string = ["google"]
        self.randomize = Randomize()
        self.parser = Parser()
Esempio n. 10
0
def show_result(request, filename):
    #TODO: Check for the case when a duplicate file is submitted, with a pre-existing Md5 sum
    """
    """
    # Parse the file
    if not Md5Log.objects.filter(file_name=filename + '.log'):
        file = settings.MEDIA_ROOT + 'benchmarkLogs/' + filename + '.log'
        parser_obj = Parser(file)
        parser_obj.run()
    flush_transaction()
    #transaction.enter_transaction_management()
    #transaction.
    time.sleep(3)
    while not Md5Log.objects.filter(file_name=filename + '.log'):
        flush_transaction()
        print Md5Log.objects.filter(file_name=filename + '.log')
        flush_transaction()
        continue
    data_dict = {}
    #Query the database for Benchmark data from benchmark_logs table
    file_obj = Md5Log.objects.filter(file_name=filename + '.log')[0]
    data_dict['BRLCAD_Version'] = file_obj.benchmark.brlcad_version
    data_dict['Running_Time'] = file_obj.benchmark.running_time
    data_dict['Time_of_Execution'] = file_obj.benchmark.time_of_execution
    data_dict['VGR_Rating'] = file_obj.benchmark.approx_vgr
    data_dict['Log_VGR'] = file_obj.benchmark.log_vgr
    data_dict['Parameters'] = file_obj.benchmark.params

    #Query the database for System Information from machine_info table
    data_dict['Clock_Speed'] = file_obj.benchmark.machineinfo.cpu_mhz
    data_dict['NCores'] = file_obj.benchmark.machineinfo.cores
    data_dict['NProcessors'] = file_obj.benchmark.machineinfo.processors
    data_dict['Vendor_ID'] = file_obj.benchmark.machineinfo.vendor_id
    data_dict['OS_Type'] = file_obj.benchmark.machineinfo.ostype
    data_dict[
        'Processor_Model_Name'] = file_obj.benchmark.machineinfo.model_name

    #Query the database for individual Image Performance
    data_dict['Rt_Average'] = file_obj.benchmark.rtaverage_set.all()[0].abs_rps
    data_dict['Rt_Bldg391'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
    data_dict['Rt_M35'] = file_obj.benchmark.rtm35_set.all()[0].abs_rps
    data_dict['Rt_Moss'] = file_obj.benchmark.rtmoss_set.all()[0].abs_rps
    data_dict['Rt_Sphlake'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
    data_dict['Rt_Star'] = file_obj.benchmark.rtstar_set.all()[0].abs_rps
    data_dict['Rt_World'] = file_obj.benchmark.rtworld_set.all()[0].abs_rps

    data_dict['filename'] = filename
    return render_to_response('result.html',
                              data_dict,
                              context_instance=RequestContext(request))
Esempio n. 11
0
def show_result(request, filename):
    #TODO: Check for the case when a duplicate file is submitted, with a pre-existing Md5 sum
    """
    """
    # Parse the file
    if not Md5Log.objects.filter(file_name=filename+'.log'):
        file = settings.MEDIA_ROOT + 'benchmarkLogs/' + filename + '.log'
        parser_obj = Parser(file)
        parser_obj.run()
    flush_transaction()
    #transaction.enter_transaction_management()
    #transaction.
    time.sleep(3)
    while not Md5Log.objects.filter(file_name=filename+'.log'):
        flush_transaction()
        print Md5Log.objects.filter(file_name=filename+'.log')
        flush_transaction()
        continue
    data_dict = {
    }
    #Query the database for Benchmark data from benchmark_logs table
    file_obj = Md5Log.objects.filter(file_name=filename+'.log')[0]
    data_dict['BRLCAD_Version'] = file_obj.benchmark.brlcad_version
    data_dict['Running_Time'] = file_obj.benchmark.running_time
    data_dict['Time_of_Execution'] = file_obj.benchmark.time_of_execution
    data_dict['VGR_Rating'] = file_obj.benchmark.approx_vgr
    data_dict['Log_VGR'] = file_obj.benchmark.log_vgr
    data_dict['Parameters'] = file_obj.benchmark.params

    #Query the database for System Information from machine_info table
    data_dict['Clock_Speed'] = file_obj.benchmark.machineinfo.cpu_mhz
    data_dict['NCores'] = file_obj.benchmark.machineinfo.cores
    data_dict['NProcessors'] = file_obj.benchmark.machineinfo.processors
    data_dict['Vendor_ID'] = file_obj.benchmark.machineinfo.vendor_id
    data_dict['OS_Type'] = file_obj.benchmark.machineinfo.ostype
    data_dict['Processor_Model_Name'] = file_obj.benchmark.machineinfo.model_name

    #Query the database for individual Image Performance
    data_dict['Rt_Average'] = file_obj.benchmark.rtaverage_set.all()[0].abs_rps
    data_dict['Rt_Bldg391'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
    data_dict['Rt_M35'] = file_obj.benchmark.rtm35_set.all()[0].abs_rps
    data_dict['Rt_Moss'] = file_obj.benchmark.rtmoss_set.all()[0].abs_rps
    data_dict['Rt_Sphlake'] = file_obj.benchmark.rtbldg391_set.all()[0].abs_rps
    data_dict['Rt_Star'] = file_obj.benchmark.rtstar_set.all()[0].abs_rps
    data_dict['Rt_World'] = file_obj.benchmark.rtworld_set.all()[0].abs_rps

    data_dict['filename'] = filename
    return render_to_response('result.html', data_dict, context_instance=RequestContext(request))
Esempio n. 12
0
class SqlInjection():
    def __init__(self):
        self.parser = Parser()
        self.randomize = Randomize()
        self.report_targets_vull = []
        self.error_list = [
            "mysql_fetch_array()", "You have an error in your SQL syntax",
            "MySQL Query fail.", "PostgreSQL ERROR", "Access Database Engine",
            "Microsoft Access Driver"
        ]

    def report(self):
        print("*" * 79)
        log_info("Number of vulnerable sites: {0}".format(
            len(self.report_targets_vull)))

        for target in self.report_targets_vull:
            log_info(target)

    def check_vull(self, target):
        targets_with_payloads = self.parser.insert_sqli_payloads(target)

        for target in targets_with_payloads:
            user_agent = self.randomize.get_random_user_agent()
            log_info(target)

            try:
                response = requests.get(url=target, headers=user_agent)

                for error in self.error_list:
                    if error in response.text:
                        log_vulnerable("{0} is vulnerable".format(target))
                        self.report_targets_vull.append(target)
            except:
                log_danger("{0} have error in request".format(target))
Esempio n. 13
0
def main():
    args = Parser().get_parser().parse_args()
    layer_no = state_space_parameters.layer_no
    controller = Controller(args, state_space_parameters)
    for arch_no in range(state_space_parameters.total_arch):
        print('arch no.:{}, layer np.:{}'.format(arch_no, layer_no))
        controller = init_train_child_update_controller(
            args, layer_no, controller)
        if (arch_no + 1) % 500 == 0:
            layer_no += state_space_parameters.layer_increment
    replay_dictionary.to_csv('./replayDict1.csv')
Esempio n. 14
0
    def __init__(self,
                 user_id,
                 message,
                 posted_time=0,
                 recipients=set(),
                 urls=set(),
                 hashtags=set(),
                 replies=set(),
                 favorited=set(),
                 retweeted=set()):
        self._values = dict()
        # holds sets, dicts, etc
        self._cplx_values = dict()
        self._values[self.MSG_ID_KEY] = None
        self._values[self.USER_ID_KEY] = user_id

        if not urls and not hashtags and not recipients:
            parsr = Parser(message)

            self._cplx_values[self.URL_KEY] = parsr.urls
            self._cplx_values[self.FMT_MSG_KEY] = parsr.formatted_msg
            self._cplx_values[self.HT_KEY] = parsr.hashtags
            self._cplx_values[self.RECIP_KEY] = parsr.recipients
            self._cplx_values[self.MSG_TOKENS_KEY] = parsr._msg_values
            self._cplx_values[self.PARSED_MSG_KEY] = parsr._parsed_message
        else:
            # formtted_url = set()
            # formatted_hts = set()
            # formatted_recip = set()
            #
            # for url in urls:
            #     formtted_url += self.__URL_HTML % url
            # urls = formtted_url
            #
            # for ht in hashtags:
            #     formatted_hts += self.__HT_HTML % ht
            # hashtags = formatted_hts
            #
            # for user in recipients:
            #     formatted_recip += self.__USER_HTML % user
            # recipients = formatted_recip
            self._cplx_values[self.URL_KEY] = urls
            self._cplx_values[self.HT_KEY] = hashtags
            self._cplx_values[self.RECIP_KEY] = recipients

        self._values[self.POST_KEY] = int(math.ceil(time.time()))

        self._values[self.MSG_KEY] = message

        self._cplx_values[self.REPLY_KEY] = replies
        self._cplx_values[self.FAV_KEY] = favorited
        self._cplx_values[self.RT_KEY] = retweeted
Esempio n. 15
0
class SearchGoogle():
    def __init__(self, query, start_page=0, number_of_results=100, timeout=10):

        self.query = query
        self.start_page = start_page
        self.number_of_results = number_of_results
        self.timeout_requests = timeout
        self.failed_request = 'Our systems have detected unsual traffic from\
                                your computer network'

        self.filter_string = ["google"]
        self.randomize = Randomize()
        self.parser = Parser()

    def search_results(self):
        parameters = {
            'q': self.query,
            'start': self.start_page,
            'num': self.number_of_results
        }

        user_agent = self.randomize.get_random_user_agent()
        google_url = self.randomize.get_random_google_url()

        log_info("Searching")
        log_info(user_agent)
        log_info("Google URL {0}".format(google_url))

        response = requests.get(url=google_url,
                                params=parameters,
                                timeout=self.timeout_requests,
                                headers=user_agent)

        if self.failed_request in response:
            logo_danger("Google detected malicious traffic")
            return 1

        bs = BeautifulSoup(response.text, "lxml")

        links = bs.find_all("a")
        links = self.parser.remove_bad_urls(self.filter_string, links)

        log_info("Number of targets: {0}".format(len(links)))
        print("-" * 79)

        return links
Esempio n. 16
0
class Qeopps:
    """Create the enviroment for the optimizations based on the config file."""
    def __init__(self, config_file_name):
        self.config_file = open(config_file_name, 'r')
        self.get_config()
        self.parser = Parser(self.settings['target_program'])
        self.population_dir = self.settings["population_programs_dir"]

    def get_config(self):
        """Loads data from the configuration file"""
        self.settings = dict()
        for line in self.config_file.readlines():
            line_match_int_value = re.match(pattern_int_config, line)
            line_match_float_value = re.match(pattern_float_config, line)
            line_match_string_value = re.match(pattern_string_config, line)
            if line_match_int_value:
                setting = line_match_int_value.group(1)
                value = int(line_match_int_value.group(2))
            elif line_match_float_value:
                setting = line_match_float_value.group(1)
                value = float(line_match_float_value.group(2))
            elif line_match_string_value:
                setting = line_match_string_value.group(1)
                value = line_match_string_value.group(2)
            else:
                continue
            self.settings[setting] = value

    def must_stop(self):
        """Returns if the end of the optimization was reached"""
        if "max_generations" in self.settings:
            if self.settings["max_generations"] < self.generation:
                return True

    def add_individual(self, individual):
        """ Add an individual to the population"""
        if(type(individual) is list):
            for i in individual:
                self.population.append(Solution(i))
        else:
            self.population.append(Solution(individual))

    def update_population(self):
        """ Update the popuation, removing the bad solutions and adding new
        individuals after mutations and crossing-over """
        population_size = self.settings["population_size"]
        self.population = sorted(self.population,
                                 key=lambda x: x.fitness)[:population_size]
        for i in xrange(population_size):
          child = None
          # Mutate
          if probability(self.settings["mutation_probability"]):
            solution_index = getLinearDistributedIndex(population_size)
            child = self.population[solution_index].table.get_copy()
            child.mutate()
            self.add_individual(child)
          # Crossover
          if probability(self.settings["crossingover_probability"]):
            # Create a child if there is no child yet
            if child == None:
              solution_index = getLinearDistributedIndex(population_size)
              child = self.population[solution_index].table.get_copy()
            other_index = getLinearDistributedIndex(population_size)
            other_table = self.population[other_index].table
            # The cross_over operation returns 2 solutions
            for c in child.cross_over(other_table):
                self.add_individual(c)

    def run(self):
        """ Run the optimization """
        self.generation = 0
        while True:
            print 'Generation %i' % (self.generation)
            for i, solution in enumerate(self.population):
                file_name = self.population_dir + "Qeopps_son" + str(i) + ".c"
                code_generator = Generator(self.parser.tree, file_name,
                    solution.table)
                compiler_string = self.settings["compiler_string"]
                compiler_flags = self.settings["compiler_flags"]
                server_host = self.settings["server_host"]
                code_generator.generate_code()
                f = Fitness(file_name, compiler_string, compiler_flags, server_host)
                solution.fitness = f.benchmark()
                print solution.fitness
            self.update_population()
            self.generation += 1
            if self.must_stop():
                break

    def start_optimization(self):
        """ Configure the initial scenario and run the optimization """
        self.parser.parse()
        init_pop_size = self.settings["init_population_size"]
        scope_mutations = self.settings["init_population_scope_mutations"]
        type_mutations = self.settings["init_population_type_mutations"]
        table_population = generate_population(self.parser.syncTable,
            init_pop_size, scope_mutations, type_mutations)
        self.population = [Solution(t) for t in table_population]
        self.run()
Esempio n. 17
0
from libs.utils import JsonConf
from libs.utils import File
from libs.parser import Parser
from libs.clientsocket import ClientSocket


CWD = os.getcwd()

# Load configuration data
if len(sys.argv) == 3:
    print ("Loading " + CWD + "/conf/friends.json" + " configuration")
    friends = JsonConf(CWD + "/conf/friends.json")
    friends.load()
    conn_data = friends.get(sys.argv[2])

if len(sys.argv) == 4:
    print ("Loading friends.json configuration")
    params = Parser.parseCommandLineArguments(sys.argv[1:])
    conn_data = {"address": params["address"], "port": params["port"]}

conn = ClientSocket(conn_data["address"], int(conn_data["port"]))
conn.connect()

inputfile = File(sys.argv[1], 'rb')
conn.writeMetadata(str(inputfile.size) + "|" + inputfile.name + "|")
conn.writeFile(inputfile, 4096000)

print ("Sent file: " + inputfile.path)
conn.close()
Esempio n. 18
0
 def __init__(self, config_file_name):
     self.config_file = open(config_file_name, 'r')
     self.get_config()
     self.parser = Parser(self.settings['target_program'])
     self.population_dir = self.settings["population_programs_dir"]