Ejemplo n.º 1
0
def apikey_auth(token, required_scopes):
    validate = TOKEN_DB.get(token, None)
    if not validate:
        log('Invalid token', 'critical')
        raise OAuthProblem('Invalid token!')
    log(f'User Logged: {validate}')
    return validate
Ejemplo n.º 2
0
 def fields(self):
    commons.log('Field analysis' + ' -- ' + '"' + self.prefix + '"')
    tag=['fields', 'profile', None, None]
    self._execute(tag, commons.agent_field_options() + self._get_program_options())
    tag=['fields', 'analysis', None, None]
    self._execute(tag, commons.analyis_options() + self._get_fields_analysis_class())
    commons.log('Field analysis' + ' -- ' + '"' + self.prefix + '"' + ' -- ' + 'DONE.')
Ejemplo n.º 3
0
 def tuples(self, options):
    commons.log('Tuples analysis' + ' -- ' + '"' + self.prefix + '"')
    tag=['tuples', 'profile', options.use_max_depth, options.max_depth]
    self._execute(tag, options.as_list + commons.agent_tuples_options() + self._get_program_options())
    tag=['tuples', 'analysis', options.use_max_depth, options.max_depth]
    self._execute(tag, options.as_list + commons.analyis_options() + self._get_tuples_analysis_class())
    commons.log('Tuples analysis' + ' -- ' + '"' + self.prefix + '"' + ' -- ' + 'DONE.')
Ejemplo n.º 4
0
    def __init__(self, tokens, account, on_tweet):

        '''
        Sets up the Twitter API connection and creates a stream listener.

        @param tokens: dictionary containing the app's consumer key + secret,
            and also the associated account's access token + secret
        @param account: username of twitter account to listen to
        @param on_tweet: callback method to be executed when a new tweet is sent
        '''

        # Twitter API authentication
        auth = OAuthHandler(tokens['consumer_key'], tokens['consumer_secret'])
        auth.set_access_token(tokens['access_token'], tokens['access_token_secret'])
        api = tweepy.API(auth)
        user_id = str(api.get_user(account).id)

        global twitter_account
        twitter_account = account + " (" + user_id + ")"

        commons.log(LOG_TAG, "listening for new tweets from " + twitter_account)

        # instantiate tweet listener and start listening
        listener = TweetListener(on_tweet)
        self.stream = Stream(auth = api.auth, listener = listener)
        self.stream.filter(follow = [user_id], async = True)
Ejemplo n.º 5
0
 def ranking(self):
    #
    commons.log('Ranking -- ' + '"' + self.program.prefix + '"' + '-- *without* clustering"')
    self._ranking(False)
    #
    commons.log('Ranking -- ' + '"' + self.program.prefix + '"' + '-- *with* clustering"')
    self._ranking(True)
Ejemplo n.º 6
0
 def time_jvisualvm(self):
     commons.log('Time analysis (JVisualVM)' + ' -- ' + '"' + self.prefix +
                 '"')
     tag = ['time', 'convert', None, None]
     time_profile_file = '%s/%s/%s' % (
         self.options.folder, config.time_dir(), config.time_profile_file())
     time_statistics_file = '%s/%s/%s' % (
         self.options.folder, config.time_dir(), config.statistics_file())
     time_output_file = '%s/%s/%s' % (
         self.options.folder, config.time_dir(), config.time_output_file())
     time_total_file = '%s/%s/%s' % (self.options.folder, config.time_dir(),
                                     config.time_total_file())
     converter_params = [
         time_profile_file, time_statistics_file, time_total_file
     ]
     self._execute(
         tag,
         commons.scala_options() + self._get_time_converter_class() +
         converter_params)
     tag = ['time', 'filter', None, None]
     filter_params = [
         time_statistics_file, time_total_file, time_output_file
     ]
     self._execute(
         tag,
         commons.scala_options() + self._get_time_filter_class() +
         filter_params + self.time_options.as_list_plain)
     commons.log('Time analysis (JVisualVM)' + ' -- ' + '"' + self.prefix +
                 '"' + ' -- ' + 'DONE.')
Ejemplo n.º 7
0
 def time_options(self):
     if commons.filter_using_time() == True:
         commons.log('Using default time profile options.')
         return options.TimeOptions.default()
     else:
         commons.log('Using ALL_METHODS time profile options.')
         return options.TimeOptions.all_methods()
Ejemplo n.º 8
0
def main_task():
    # Training
    xi, labels = get_training_prinicipal_features_and_labels()
    labels[labels == NEGATIVE_CLASS] = NEGATIVE_CLASS_MAPPED
    labels[labels == POSITIVE_CLASS] = POSITIVE_CLASS_MAPPED
    x_nd = np.column_stack((xi, labels))
    root_node = build_tree(x_nd)
    stats_dict = {}
    traverse_tree(root_node, stats_dict)
    log(stats_dict)
    training_target_actual = [0] * np.alen(x_nd)
    for idx in range(0, np.alen(x_nd)):
        training_target_actual[idx] = x_nd[idx][NUM_FEATURES]
    plot_contours(x_nd, training_target_actual, root_node)

    # Testing
    test_xi, test_labels = get_test_prinicipal_features_and_labels()
    test_labels[test_labels == NEGATIVE_CLASS] = NEGATIVE_CLASS_MAPPED
    test_labels[test_labels == POSITIVE_CLASS] = POSITIVE_CLASS_MAPPED
    test_x_nd = np.column_stack((test_xi, test_labels))
    test_target_actual = [0] * np.alen(test_x_nd)
    test_target_predicted = [0] * np.alen(test_x_nd)
    for idx in range(0, np.alen(test_x_nd)):
        test_target_actual[idx] = test_x_nd[idx][NUM_FEATURES]
        test_target_predicted[idx] = evaluate_tree(
            (test_x_nd[idx][:NUM_FEATURES]), root_node)

    plot_contours(test_x_nd, test_target_actual, root_node)
    cm = confusion_matrix(test_target_actual, test_target_predicted)
    log("Accuracy: ", (cm[0][0] + cm[1][1]) / (np.sum(cm)))
Ejemplo n.º 9
0
  def do_GET(self):


    global http_running
    global data


    try:
      if self.path.endswith(".json"):

	self.send_response(200)
	self.send_header('Content-type','application/x-javascript')
	self.end_headers()

	self.wfile.write(data.getJson())

      elif self.path.endswith("control?quit"):
	self.changeState(False, 'Web interface stopped', True)

      else:
	if self.path=='/':
	  path='wilocate.html'
	else:
	  path = self.path
	  
	self.path = os.curdir + os.sep + 'html' + os.sep + path

	return SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self)


    except IOError:
      log('Error ' + 404 + ' File not found: %s' % self.path)
Ejemplo n.º 10
0
def main( argv = None ):
   #
   parser = argparse.ArgumentParser(description = 'MemoizeIt - Finding memoization opportunities.')
   parser.add_argument('--path', dest='path', help='specifies the working directory')
   parser.add_argument('--folder', dest='folder', default=get_now(), help='specify where to save the profiled data')
   parser.add_argument('--time', dest='time', action="store_true", help='Run the initial time profiling phase (Use pre-loaded JVisual VM profiles)')
   parser.add_argument('--fields', dest='fields', action="store_true", help='Run the initial field profiling phase')
   parser.add_argument('--memo', dest='memo', action="store_true", help='Run the tuples profiling phase')
   parser.add_argument('--program', dest='program', help='Run the profiling for provided program')
   parser.add_argument('--ranking', dest='ranking', action="store_true", help='Print the ranking of the candidate methods')
   parser.add_argument('--descriptions', dest='descriptions', action="store_true", help='Print the list of programs that can be analyzed and a short description')
   parser.add_argument('--limit', dest='limit', default='pow2', help='Select iterative mode. Use MemoizeIt algorithm to traverse all the depths argument is depth incremental function [exhaustive, inc1, pow2 (default)]')
   #
   args = parser.parse_args()
   #
   if not args.descriptions == None and args.descriptions == True:
      descriptions()
      return 0
   #
   if args.program == None:
      commons.log('Please specify a program to analyze!')
      return 1
   #
   setup(args)
   #
   if not args.ranking == None and args.ranking == True:
      run_ranking(args.program, args)
      return 0
   #
   if args.limit == 'exhaustive':
      set_exhaustive_traversal()
   else:
      set_iterative_traversal( args.limit )
   #
   run(args.program, args)
Ejemplo n.º 11
0
 def time_options(self):
     if commons.filter_using_time() == True:
         commons.log('Using default time profile options.')
         return options.TimeOptions.default()
     else:
         commons.log('Using ALL_METHODS time profile options.')
         return options.TimeOptions.all_methods()
Ejemplo n.º 12
0
def run_ranking(program, args):
    #
    commons.log('Ranking' + '=' + '"' + program + '"')
    #
    program = programs[program]
    program.init()
    exp = experiment.IterativeExperiment(program)
    exp.ranking()
Ejemplo n.º 13
0
def basic_auth(username):
    log(f"Token to user: {username}")
    user_sha = USER_DB.get(username, None)
    return_token = generate_numbers()
    string_token = f"{return_token}:{user_sha}"
    byte_hash = hashlib.sha256(string_token.encode()).hexdigest()
    TOKEN_DB[username] = byte_hash
    return return_token
Ejemplo n.º 14
0
def freight_smart(port, cookies):
    log("Creating player for [%s]" % port)
    try:
        return FreightSmart("", port, cookies)
    except BaseException as err:
        log("Failed to create player for " + port + " - " + str(err))
        traceback.print_exc()
        return None
Ejemplo n.º 15
0
def run_ranking(program, args):   
   #
   commons.log('Ranking' + '=' + '"' + program + '"')
   #
   program = programs[program]
   program.init()
   exp = experiment.IterativeExperiment(program)
   exp.ranking()
Ejemplo n.º 16
0
 def ranking(self):
     #
     commons.log('Ranking -- ' + '"' + self.program.prefix + '"' +
                 '-- *without* clustering"')
     self._ranking(False)
     #
     commons.log('Ranking -- ' + '"' + self.program.prefix + '"' +
                 '-- *with* clustering"')
     self._ranking(True)
Ejemplo n.º 17
0
    def on_error(self, status):

        '''
        Called when a non-200 status code is returned from the listener.

        @param status: status code of the error received
        '''

        commons.log(LOG_TAG, str(status))
Ejemplo n.º 18
0
def query(reverse_ip):
    try:
        reverse_dns = f"{TOKEN}.{reverse_ip}.{URI}"
        addr1 = socket.gethostbyname(reverse_dns)
        return analyse(addr1)
    except socket.gaierror as e:
        log(f'IP not found! Sound Good!', 'debug')
        return analyse('127.0.0.0')
    except Exception as e:
        log(f'{e}', 'error')
Ejemplo n.º 19
0
def get_keys(keys):
    log("Getting Keys..")
    redis = redis_conn()
    keys_return = {}
    for key in keys:
        key = key.replace('"', '')
        key = key.replace("'", "")
        resp = redis.get(key)
        if resp:
            keys_return[key] = resp.decode('utf-8', 'replace')
    return keys_return
Ejemplo n.º 20
0
def add_keys(keys):
    log("Adding Keys..")
    redis = redis_conn()
    for key in keys:
        name = key['name']
        value = json.dumps(key['value'])
        resp = redis.set(name, value)

        if resp:
            return make_response("Successfully created.", 200)
        else:
            return abort(404, "[ERROR] On created!")
Ejemplo n.º 21
0
 def tuples(self, options):
     commons.log('Tuples analysis' + ' -- ' + '"' + self.prefix + '"')
     tag = ['tuples', 'profile', options.use_max_depth, options.max_depth]
     self._execute(
         tag, options.as_list + commons.agent_tuples_options() +
         self._get_program_options())
     tag = ['tuples', 'analysis', options.use_max_depth, options.max_depth]
     self._execute(
         tag, options.as_list + commons.analyis_options() +
         self._get_tuples_analysis_class())
     commons.log('Tuples analysis' + ' -- ' + '"' + self.prefix + '"' +
                 ' -- ' + 'DONE.')
Ejemplo n.º 22
0
 def fields(self):
     commons.log('Field analysis' + ' -- ' + '"' + self.prefix + '"')
     tag = ['fields', 'profile', None, None]
     self._execute(
         tag,
         commons.agent_field_options() + self._get_program_options())
     tag = ['fields', 'analysis', None, None]
     self._execute(
         tag,
         commons.analyis_options() + self._get_fields_analysis_class())
     commons.log('Field analysis' + ' -- ' + '"' + self.prefix + '"' +
                 ' -- ' + 'DONE.')
Ejemplo n.º 23
0
def run(program, args):   
   #
   commons.log('Profiling' + '=' + '"' + program + '"')
   #
   program = programs[program]
   program.init()
   exp = experiment.IterativeExperiment(program)
   #
   exp.initial_candidates()
   #
   if args.memo == True:
      commons.log('Executing program' + '=' + '"' + program.prefix + '"')
      exp.refine_candidates()
Ejemplo n.º 24
0
def run(program, args):
    #
    commons.log('Profiling' + '=' + '"' + program + '"')
    #
    program = programs[program]
    program.init()
    exp = experiment.IterativeExperiment(program)
    #
    exp.initial_candidates()
    #
    if args.memo == True:
        commons.log('Executing program' + '=' + '"' + program.prefix + '"')
        exp.refine_candidates()
Ejemplo n.º 25
0
 def time_jvisualvm(self):
    commons.log('Time analysis (JVisualVM)' + ' -- ' + '"' + self.prefix + '"')
    tag=['time', 'convert', None, None]
    time_profile_file = '%s/%s/%s' % (self.options.folder, config.time_dir(), config.time_profile_file()) 
    time_statistics_file = '%s/%s/%s' % (self.options.folder, config.time_dir(), config.statistics_file()) 
    time_output_file = '%s/%s/%s' % (self.options.folder, config.time_dir(), config.time_output_file())
    time_total_file = '%s/%s/%s' % (self.options.folder, config.time_dir(), config.time_total_file())
    converter_params = [time_profile_file, time_statistics_file, time_total_file]
    self._execute(tag, commons.scala_options() + self._get_time_converter_class() + converter_params)
    tag=['time', 'filter', None, None]
    filter_params = [time_statistics_file, time_total_file, time_output_file]
    self._execute(tag, commons.scala_options() + self._get_time_filter_class() + filter_params + self.time_options.as_list_plain)
    commons.log('Time analysis (JVisualVM)' + ' -- ' + '"' + self.prefix + '"' + ' -- ' + 'DONE.')
Ejemplo n.º 26
0
    def _log(self, message, chat):

        '''
        Formatted logging to include chat context.

        @param message: the message to be logged
        @param chat: dictionary containing information about the chat between
            bot and the user/group; see https://core.telegram.org/bots/api#chat
            for more information
        '''

        sender = chat['title' if 'title' in chat else ('username' if 'username' in chat else 'first_name')]
        commons.log(LOG_TAG, "[" + sender + ":" + str(chat['id']) + "] " + message)
Ejemplo n.º 27
0
def _remove_subscriber(id):

    '''
    Performs the write operation for removing a subscriber.
    TODO: add error handling for when user is not in the list of subscribers

    @param id: stringified unique user id as provided by telegram
    '''

    subscribers = commons.get_data("subscribers")
    commons.log(LOG_TAG, "removing subscriber: " + subscribers[id] + "[" + id + "]")
    del subscribers[id]
    commons.set_data("subscribers", subscribers)
Ejemplo n.º 28
0
    def on_data(self, data):

        '''
        Called when raw data is received from connection (i.e. new tweets are sent, etc).

        @param data: a dictionary containing information about the tweet. See
            https://dev.twitter.com/overview/api/tweets for more information
        '''

        tweet = json.loads(data)
        if not tweet["retweeted"]: # filter out retweets
            self.loop.create_task(self.on_tweet(tweet)) # execute callback
            commons.log(LOG_TAG, "new tweet from " + twitter_account + ": " + tweet['text'])
            return True
Ejemplo n.º 29
0
def setup(args):
    #
    experiments_path = '%s/experiments/%s' % (args.path, args.folder)
    #
    commons.log('Experiment path' + '=' + experiments_path)
    #
    config.set_options_file('%s/memoizeit/options.json' % args.path)
    #
    commons.set_general_path(args.path)
    commons.set_programs_path('%s/programs' % args.path)
    commons.set_libs_path('%s/memoizeit/libs' % args.path)
    commons.set_jars_path('%s/memoizeit/jars' % args.path)
    commons.set_profiles_path('%s/profiles' % args.path)
    commons.set_callgraphs_path('%s/callgraphs' % args.path)
    #
    commons.set_log_depth(True)
    #
    commons.set_filter_using_time(args.time)
    commons.set_filter_using_fields(args.fields)
    #
    commons.log('Setting working path to' + '=' + args.path)
    commons.log('Setting to use log depths' + '=' + str(commons.log_depth()))
    commons.log('Setting to filter methods based on time' + '=' +
                str(commons.filter_using_time()))
    commons.log('Setting to filter methods based on field accesses' + '=' +
                str(commons.filter_using_fields()))
    #
    programs['Soot'] = soot.SootProgram.create(experiments_path, 'toy')
    programs['Checkstyle'] = checkstyle.CheckStyleProgram.create(
        experiments_path, 'original')
    #
    programs['DaCapo-antlr'] = dacapo.DacapoProgram.create(
        experiments_path, 'antlr', 'default')
    programs['DaCapo-bloat'] = dacapo.DacapoProgram.create(
        experiments_path, 'bloat', 'default')
    programs['DaCapo-chart'] = dacapo.DacapoProgram.create(
        experiments_path, 'chart', 'default')
    programs['DaCapo-fop'] = dacapo.DacapoProgram.create(
        experiments_path, 'fop', 'default')
    programs['DaCapo-luindex'] = dacapo.DacapoProgram.create(
        experiments_path, 'luindex', 'default')
    programs['DaCapo-pmd'] = dacapo.DacapoProgram.create(
        experiments_path, 'pmd', 'default')
    #
    programs['Apache-POI'] = poi.ApachePoiProgram.create(
        experiments_path, 'excel_extractor')
    programs['Apache-Tika-Jar'] = tika.ApacheTikaProgram.create(
        experiments_path, 'jars')
    programs['Apache-Tika-Excel'] = tika.ApacheTikaProgram.create_pkg(
        experiments_path, 'excel', 'org.apache.poi')
Ejemplo n.º 30
0
def _new_subscriber(id, name):

    '''
    Performs the write operation for adding a subscriber.

    @param id: stringified unique user id as provided by telegram
    @param name: string to identify the user; can be first name, username or
        group chat name
    '''

    subscribers = commons.get_data("subscribers")
    subscribers[id] = name
    commons.set_data("subscribers", subscribers)
    commons.log(LOG_TAG, "new subscriber: " + name + "[" + id + "]")
Ejemplo n.º 31
0
   def run ( self ):

      while True:
	
	try:
	  self.httpd = ExitableSocketServer(('127.0.0.1', self.port), httpRequestHandler)
	except Exception, e:
	  log('!', e)
	  self.__changeState(False, 'Port ' + str(self.port) + ' unavailable,\nplease shutdown any other process that keep port open.\nRetry in 10s.',True)
	  time.sleep(10)
	else:
	  break

	if self.forced_quit:
	  break
Ejemplo n.º 32
0
def apikey_auth(credentials, required_scopes):
    def invalied_token(username="******"):
        msg = f"Invalid token User: {username}"
        log(msg, 'critical')
        raise OAuthProblem('Invalid token')

    if ':' not in credentials:
        invalied_token()
    username = credentials.split(':')[0]
    token = credentials.split(':')[1]
    validate = TOKEN_DB.get(username, None)

    if not validate or validate != token:
        invalied_token(username)
    log(f"User Logged: {username}")
    return USER_DB.get(username)
Ejemplo n.º 33
0
def build_tree_recursive(x_t_all, level=0):
    x_t_len = np.alen(x_t_all)
    assert np.alen(x_t_all > 0)
    if globals.tree_height < level:
        globals.tree_height = level

    index_target = NUM_FEATURES
    prevalence_negative, prevalence_positive = get_prevalence(x_t_all[:, index_target])
    prevalence = prevalence_negative * prevalence_positive

    log_debug("Tree max height so far: ", globals.tree_height)
    log("X very pure? : subset len: {}, prevalence: {}", x_t_len, prevalence)
    if prevalence < LIMIT_LEAF_NODE_PREVALENCE or x_t_len < LIMIT_LEAF_NODE_SUBSET_SIZE:
        log_debug("X very pure. Bailing out: subset len: {}, prevalence: {}", x_t_len, prevalence)
        return get_leaf_node_by_prevalence(prevalence_negative, prevalence_positive)

    delta_array, tau_array = get_delta_and_tow_impl(x_t_all)
    delta_max_idx = np.argmax(delta_array)
    tau = tau_array[delta_max_idx]
    log_debug("delta_array: ", delta_array, ", delta_max_idx: ", delta_max_idx, ", tau: ", tau_array)

    x_t_all_sorted_delta_max = x_t_all[x_t_all[:, delta_max_idx].argsort(kind='mergesort')]
    x_delta_max = x_t_all_sorted_delta_max[:, delta_max_idx]
    log_debug("Tau, idx: ", tau, np.where(x_delta_max == tau), ", x_sorted: ", x_delta_max)
    tau_idx = np.where(x_delta_max == tau)[0][0]
    assert (tau_idx >= 0) and (tau_idx <= np.alen(x_t_all_sorted_delta_max))
    # log_debug("\n level: ", level, ", tau_idx: ", tau_idx)

    x_t_all_left = x_t_all_sorted_delta_max[0:tau_idx, :]
    x_t_all_right = x_t_all_sorted_delta_max[tau_idx:, :]
    if np.alen(x_t_all_left) > 0 and np.alen(x_t_all_right) > 0:
        node = DNode("RULE", feature_idx=delta_max_idx, tau=tau)

        assert (tau_idx > 0)
        # log_debug("\n level:", level, ", x_left: ", x_t_all_left.shape[0])
        node.left = build_tree_recursive(x_t_all_left, level + 1)

        assert(np.alen(x_t_all_sorted_delta_max) - tau_idx > 0)
        # log_debug("\n level: ", level, ", x_right: ", x_t_all_right.shape[0])
        node.right = build_tree_recursive(x_t_all_right, level + 1)
    else:
        assert np.alen(x_t_all_left) == 0 or np.alen(x_t_all_right) == 0
        node = get_leaf_node(x_t_all_sorted_delta_max[:, 2])

    return node
Ejemplo n.º 34
0
def loadOptions():

    global options

    if not os.path.exists(confdir):
        os.makedirs(confdir)

    if not os.path.exists(confdir + 'wilocate.conf'):
        setDefaultOptions()
        log('No config founded, loaded default options.')
        saveOptions()

    else:
        try:
            f = open(confdir + 'wilocate.conf', 'r')
            options = json.loads(f.read())
        except Exception, e:
            setDefaultOptions()
            log('Error loading or parsing config file, loaded default options.'
                )
Ejemplo n.º 35
0
def loadOptions():

  global options


  if not os.path.exists(confdir):
    os.makedirs(confdir)

  if not os.path.exists(confdir + 'wilocate.conf'):
    setDefaultOptions()
    log('No config founded, loaded default options.')
    saveOptions()

  else:
    try:
      f = open(confdir + 'wilocate.conf','r')
      options = json.loads(f.read())
    except Exception, e:
      setDefaultOptions()
      log('Error loading or parsing config file, loaded default options.')
Ejemplo n.º 36
0
def setup(args):
   #
   experiments_path = '%s/experiments/%s' % (args.path, args.folder)
   #
   commons.log('Experiment path' + '=' + experiments_path)
   #
   config.set_options_file('%s/memoizeit/options.json' % args.path)
   #
   commons.set_general_path(args.path)
   commons.set_programs_path('%s/programs' % args.path)
   commons.set_libs_path('%s/memoizeit/libs' % args.path)
   commons.set_jars_path('%s/memoizeit/jars' % args.path)
   commons.set_profiles_path('%s/profiles' % args.path)
   commons.set_callgraphs_path('%s/callgraphs' % args.path)
   #
   commons.set_log_depth(True)
   #
   commons.set_filter_using_time(args.time)
   commons.set_filter_using_fields(args.fields)
   #
   commons.log('Setting working path to' + '=' + args.path)
   commons.log('Setting to use log depths' + '=' + str(commons.log_depth()))
   commons.log('Setting to filter methods based on time' + '=' + str(commons.filter_using_time()))
   commons.log('Setting to filter methods based on field accesses' + '=' + str(commons.filter_using_fields()))
   #
   programs['Soot'] = soot.SootProgram.create(experiments_path, 'toy')
   programs['Checkstyle'] = checkstyle.CheckStyleProgram.create(experiments_path, 'original')
   #
   programs['DaCapo-antlr'] = dacapo.DacapoProgram.create(experiments_path, 'antlr', 'default')
   programs['DaCapo-bloat'] = dacapo.DacapoProgram.create(experiments_path, 'bloat', 'default') 
   programs['DaCapo-chart'] = dacapo.DacapoProgram.create(experiments_path, 'chart', 'default') 
   programs['DaCapo-fop'] = dacapo.DacapoProgram.create(experiments_path, 'fop', 'default') 
   programs['DaCapo-luindex'] = dacapo.DacapoProgram.create(experiments_path, 'luindex', 'default')
   programs['DaCapo-pmd'] = dacapo.DacapoProgram.create(experiments_path, 'pmd', 'default') 
   #
   programs['Apache-POI'] = poi.ApachePoiProgram.create(experiments_path, 'excel_extractor')
   programs['Apache-Tika-Jar'] = tika.ApacheTikaProgram.create(experiments_path, 'jars')
   programs['Apache-Tika-Excel'] = tika.ApacheTikaProgram.create_pkg(experiments_path, 'excel', 'org.apache.poi')
Ejemplo n.º 37
0
async def on_tweet(data):
    '''
    Function to be called when a new tweet is sent.

    @param data: a dictionary containing information about the tweet; see
        https://dev.twitter.com/overview/api/tweets for more information
    '''

    # increment tweet stat count
    stats = commons.get_data("stats")
    stats["tweets"] = (0
                       if "tweets" not in stats else int(stats["tweets"])) + 1
    commons.set_data("stats", stats)

    # send tweet to all subscribers
    tweet_message = "<b>" + data['user']['screen_name'] + "</b>: " + data[
        'text']
    subscribers = commons.get_data("subscribers")
    commons.log(LOG_TAG,
                "sending tweet to " + str(len(subscribers)) + " subscribers")
    for subscriber_id in subscribers.keys():
        await bot_delegator.sendMessage(int(subscriber_id),
                                        tweet_message,
                                        parse_mode='HTML')
Ejemplo n.º 38
0
    def refine_candidates(self):
        #
        self._filter_with_black_list()
        #
        depth = 1
        #
        use_max_depth = not commons.profile_exaustive()
        #
        get_next_depth = None
        if commons.increment_function() == 'inc1':
            get_next_depth = lambda x: x + 1
        elif commons.increment_function() == 'pow2':
            get_next_depth = lambda x: 2 * x
        else:
            if commons.profile_exaustive() == False:
                raise Exeception(
                    'Increment function parameter has wrong value -- ' +
                    commons.increment_function())
            get_next_depth = None

        while (True):
            commons.log('Exploring depth ' + str(depth) + ' -- ' + '"' +
                        self.program.prefix + '"')
            self._save_white_list(depth)
            tuple_options = options.TuplesOptions(use_max_depth, depth, True,
                                                  False)
            self.program.tuples(tuple_options)
            self._write_options_to_file(
                '%s/%s' % (self.folder, config.tuples_dir()), tuple_options)
            stop = False
            if self._max_depth_reached():
                stop = True
            #
            if commons.profile_exaustive() == False:
                self._filter_tuples(depth)
                self._save_current_depth_directory(depth)
            #
            candidates_list_new = '%s/%s' % (self.folder,
                                             config.white_list_file())
            number_of_candidates = self._count_lines(candidates_list_new)
            #
            if number_of_candidates == 0:
                commons.log('No caching candidates left to explore' + ' -- ' +
                            str(depth) + ' -- ' + '"' + self.program.prefix +
                            '"')
                break
            #
            if stop:
                self._create_tuples_file()
                commons.log('Max depth ' + str(depth) + 'reached' + ' -- ' +
                            '"' + self.program.prefix + '"')
                break
            depth = get_next_depth(depth)
Ejemplo n.º 39
0
def main(ip):
    try:
        reverse_ip = ip.split('.')
        response = json.loads(
            query(
                f"{reverse_ip[3]}.{reverse_ip[2]}.{reverse_ip[1]}.{reverse_ip[0]}"
            ))
        response['info']['IP'] = ip
        return response
    except Exception as e:
        log(e, 'error')
        log(f'Try split to reverse: {ip}', 'error')
        log(f'Query function response: {response}', 'error')
Ejemplo n.º 40
0
 def refine_candidates(self):
    #
    self._filter_with_black_list()
    #
    depth = 1
    #
    use_max_depth = not commons.profile_exaustive()
    #
    get_next_depth = None
    if commons.increment_function() == 'inc1':
       get_next_depth = lambda x: x + 1
    elif commons.increment_function() == 'pow2':
       get_next_depth = lambda x: 2 * x
    else:
       if commons.profile_exaustive() == False:
          raise Exeception('Increment function parameter has wrong value -- ' + commons.increment_function())
       get_next_depth = None
       
    while (True):
       commons.log('Exploring depth ' + str(depth) + ' -- ' + '"' + self.program.prefix + '"')
       self._save_white_list(depth)
       tuple_options = options.TuplesOptions(use_max_depth, depth, True, False)
       self.program.tuples(tuple_options)
       self._write_options_to_file('%s/%s' % (self.folder, config.tuples_dir()), tuple_options)
       stop = False
       if self._max_depth_reached():
          stop = True
       #
       if commons.profile_exaustive() == False:
          self._filter_tuples(depth)
          self._save_current_depth_directory(depth)
       #
       candidates_list_new = '%s/%s' % (self.folder, config.white_list_file())      
       number_of_candidates = self._count_lines(candidates_list_new)
       #
       if number_of_candidates == 0:
          commons.log('No caching candidates left to explore' + ' -- ' + str(depth) + ' -- ' + '"' + self.program.prefix + '"')
          break 
       #
       if stop:
          self._create_tuples_file()
          commons.log('Max depth ' +  str(depth) + 'reached' + ' -- ' + '"' + self.program.prefix + '"')
          break
       depth = get_next_depth(depth)
Ejemplo n.º 41
0
# -*- coding: utf-8 -*-

from threading import Thread, Lock
import SimpleHTTPServer, SocketServer, os, sys, urllib2, socket, mimetypes, time
from commons import log

try: import json
except ImportError: import simplejson as json
try:
  import wx
  #import wx.lib.newevent
except ImportError:
  log('! Install wxPython library version 2.6 with \'sudo apt-get install python-wxgtk2.6\'')
  sys.exit(1)


#WebStateUpdateEvent, WEB_STATE_EVENT = wx.lib.newevent.NewEvent()

data = None


class ExitableSocketServer(SocketServer.TCPServer):
  allow_reuse_address = True

class httpRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):

  def log_request(self, code='-', size='-'):
    pass

  def log_error(self, *args):
    pass
Ejemplo n.º 42
0
def __login__():
    login_driver.get("https://freightsmart.oocl.com/")

    __load_cookies__()

    login_driver.get("https://freightsmart.oocl.com/")

    if login_driver.title == "403 Forbidden":
        raise Exception(login_driver.title)

    __store_cookies__()

    element = __cookie_notice_dialog__()
    if element is not None:
        log("Cookie Notice Dialog...")
        element.find_element_by_class_name("el-button--danger").click()
        log("Allow All button is clicked.")
    else:
        log("Cookie Notice is skipped.")

    element = __auth_container__()

    if element is None:
        return

    element.click()
    log("Auth container is clicked.")
    login_driver.find_element_by_name("login_dialog_username").send_keys(
        login_account[0])
    log("Username is entered.")
    login_driver.find_element_by_id("login-password-input").send_keys(
        login_account[1])
    log("Password is entered.")
    login_driver.find_element_by_name("login_dialog_btn_login").click()
    log("Login button is clicked.")