def main():

    # para troca de mensagens esparsas entre leiloeiro e compradores
    f = Filter()
    f.performative = ACLMessage.INFORM

    agents = list()
    port = int(argv[1]) 

    logger = Logger()

    # criando objeto a ser leiloado
    objeto = ObjetoLeiloado('Vaso Antigo', 40)

    # criando agente leiloeiro
    agente_leiloeiro = AgenteLeiloeiro(
        AID(name=f'leiloeiro@localhost:{port}'),f, objeto, logger)
    agents.append(agente_leiloeiro)  

    port += 1
    numero_de_compradores=3

    for i in range(numero_de_compradores):
        # criando agentes compradores
        agent_dinheiro = randint(100,1000)
        agente_comprador = AgenteComprador(AID(name=f'comprador_{i}@localhost:{port+i}'), f, logger,agent_dinheiro)
        agents.append(agente_comprador)

    start_loop(agents)
Exemplo n.º 2
0
def main():
    log = Logger().log
    proxy_manager = ProxyManager()
    log('******************************************************\n'
    
    
                                                 
'''
               )                                       )                                
            ( /(           )        (               ( /(                                
            )\())    )  ( /(    (   )\ )            )\()) (   (                         
   __ __ __((_)\  ( /(  )\())  ))\ (()/(   __ __ __((_)\  )\  )(    (    (     __ __ __ 
  / // // / _((_) )(_))((_)\  /((_) ((_)) / // // / _((_)((_)(()\   )\   )\   / // // / 
 / // // / | \| |((_)_ | |(_)(_))   _| | / // // / | \| | (_) ((_) ((_) ((_) / // // /  
/_//_//_/  | .` |/ _` || / / / -_)/ _` |/_//_//_/  | .` | | || '_|/ _ \/ _ \/_//_//_/   
           |_|\_|\__,_||_\_\ \___|\__,_|           |_|\_| |_||_|  \___/\___/            
                                                                                                                                                                                                                                   
'''
                                                 

        
        '\nCreated by Niroo @NirooOfficial'
        '\nCredits to Shevi @Shevids1996\n\n'
    
    
        '******************************************************', color='blue', timestamp=False)
    log('starting tasks', color='green')

    threads = []
    i = 0
    for task in listdir('tasks'):
        threads.append(Naked(i, 'tasks/{}'.format(task), proxy_manager))
        threads[i].start()
        i += 1
Exemplo n.º 3
0
    def __init__(self, config):
        try:
            self.email = config['account'].split(':')[0]
            self.password = config['account'].split(':')[1]
        except:
            sys.exit(1)

        self.config = config
        self.randomWord = RandomWords()
        self.action = 'starting the oneCaptcha tool.'
        self.notification = None

        self.startTime = time.time()
        self.currentWatchTime = 0
        self.completedSearches = 0
        self.readStories = 0
        self.currentTranslations = 0
        self.completedEmailActions = 0

        self.maxSearches = random.randint(int(self.config['settings']['min_searches']), int(self.config['settings']['max_searches']))
        self.maxWatchTime = random.randint(int(self.config['settings']['min_watchTime']), int(self.config['settings']['max_watchTime']))
        self.maxStories = random.randint(int(self.config['settings']['min_newsStories']), int(self.config['settings']['max_newsStories']))
        self.maxTranslations = random.randint(int(self.config['settings']['min_translations']), int(self.config['settings']['max_translations']))
        self.maxEmailActions = random.randint(int(self.config['settings']['min_emailActions']), int(self.config['settings']['max_emailActions']))

        self.log = Logger(config['tid']).log
Exemplo n.º 4
0
 def __init__(self):
     self.elevators = [Elevator() for i in range(ELEVATOR_NUM)]
     self.elevatorAI = ElevatorAI(self.elevators)
     self.people = []
     self.time = DAY_START_TIME
     self.day = 0
     self.logger = Logger()
Exemplo n.º 5
0
    def __init__(self, config):

        if not os.path.exists(FLAGS.log_path):
            os.mkdir(FLAGS.log_path)
        self.logger = Logger(config.log_path, 'client')

        self.stub = service_grpc.RecognitionStub(
            grpc.insecure_channel('{}:{}'.format(FLAGS.ip, FLAGS.port)))
Exemplo n.º 6
0
 def __init__(self, tid, config_filename):
     threading.Thread.__init__(self)
     self.tid = tid
     self.start_time = time()
     self.log = Logger(tid).log
     with open(config_filename) as task_file:
         self.T = load(task_file)
     with open('config.json') as config_file:
         self.C = load(config_file)
Exemplo n.º 7
0
 def __init__(self,
              worker_id,
              billing,
              dummy_variant,
              drop_time,
              retry,
              proxy=None):
     super().__init__()
     self.start_time = time()
     self.worker_id = worker_id
     lg = Logger(worker_id=worker_id)
     self.log = lg.log
     self.err = lg.err
     self.suc = lg.suc
     self.drop_time = drop_time
     self.variant = billing['target_variant']
     self.dummy_variant = dummy_variant
     self.retry_delay = retry
     self.build_ids = list()
     self.billing = billing
     self.s = requests.session()
     self.s.headers = {
         'User-Agent':
         'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) '
         'Chrome/71.0.3578.98 Safari/537.36',
         'Accept':
         'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
         'Accept-Encoding':
         'gzip, deflate',
         'Accept-Languate':
         'en-US,en;q=0.9',
         'Connection':
         'keep-alive',
         'Upgrade-Insecure-Requests':
         '1',
         'DNT':
         '1'
     }
     self.s.verify = False
     if proxy is not None:
         _ = proxy.split(':')
         if len(_) == 4:
             self.s.proxies = {
                 'http':
                 'http://{}:{}@{}:{}'.format(_[2], _[3], _[0], _[1]),
                 'https':
                 'https://{}:{}@{}:{}'.format(_[2], _[3], _[0], _[1])
             }
         elif len(_) == 2:
             self.s.proxies = {
                 'http': 'http://{}:{}'.format(_[0], _[1]),
                 'https': 'https://{}:{}'.format(_[0], _[1])
             }
         else:
             self.err('[error] Malformed proxy {}'.format(proxy))
             exit(-1)
Exemplo n.º 8
0
 def __init__(self, tid, config_filename, headless=False):
     threading.Thread.__init__(self)
     self.tid = tid
     self.start_time = time()
     self.log = Logger(tid).log
     self.web = Browser(showWindow=headless)
     with open(config_filename) as task_file:
         self.T = load(task_file)
     with open('config.json') as config_file:
         self.C = load(config_file)
Exemplo n.º 9
0
    def __init__(self, tid, config_filename, headless=False):
        threading.Thread.__init__(self)
        self.tid = tid
        self.start_time = time()
        self.log = Logger(tid).log
        self.web = Browser(showWindow=not headless, incognito=True)
        self.gold_link = 'https://catalog.usmint.gov/basketball-hall-of-fame-2020-uncirculated-silver-dollar-20CD.html?cgid=silver-dollars#start=1'
        self.silver_link = ''

        with open(config_filename) as task_file:
            self.T = load(task_file)
  def __init__(self, ev, folder):
    self._folder = folder
    self._ev = ev
    self._logger = Logger(self.__class__.__name__).get()

    self._net_file = xml.etree.ElementTree.parse(self._folder+'/osm.net.xml').getroot()

    self._edges_order = []
    self._edges_with_tl = []
    self.edges_to_reroute = []
    self.compute_adj = False
    self.staticdynamic = False
Exemplo n.º 11
0
def main():
    log = Logger('M').log
    log(colored('main bot template made by Alex Gompper @edzart', 'green'))
    threads = []
    i = 0
    for config in listdir('tasks'):
        if config in {'task.example.json'}:  # configs to ignore
            pass
        else:
            log('loading thread {} with config {}'.format(i, config))
            threads.append(Site(i, 'tasks/' + config, True))
            threads[i].start()
            i += 1
Exemplo n.º 12
0
def main():
    log = Logger().log
    proxy_manager = ProxyManager()
    log(
        '*************************\n\nSHOPIFY ATC V0.2.0 OPEN SOURCE\nBY ALEX GOMPPER @edzart\n\n**********************'
        '***',
        color='blue',
        timestamp=False)
    log('starting tasks', color='green')
    threads = []
    i = 0
    for task in listdir('tasks'):
        threads.append(Shopify(i, 'tasks/{}'.format(task), proxy_manager))
        threads[i].start()
        i += 1
Exemplo n.º 13
0
    def harvest(self, queue):
        log = Logger().log
        api_key = self.apiKey
        log('Harvesting Captcha..','info')

        s = requests.Session()

        captcha_id = s.post("http://2captcha.com/in.php?key={}&method=userrecaptcha&googlekey={}&pageurl={}".format(api_key, self.sitekey, self.url)).text.split('|')[1]
        recaptcha_answer = s.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(api_key, captcha_id)).text
        log("solving ref captcha...", 'yellow')
        while 'CAPCHA_NOT_READY' in recaptcha_answer:
            sleep(1)
            recaptcha_answer = s.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(api_key, captcha_id)).text
        recaptcha_answer = recaptcha_answer.split('|')[1]

        log('Solved Captcha: ' + str(recaptcha_answer),'success')
        queue.put(recaptcha_answer)
Exemplo n.º 14
0
def main():
    l = Logger(lid='M')
    log = l.log

    print(('=' * 20).center(80, ' '))
    print('GOAT Black Friday and Autocheckout'.center(80, ' '))
    print(
        """

                      _________  ___ ______  _______   ____
                     / ___/ __ \/ _ /_  __/ / ___/ /  /  _/
                    / (_ / /_/ / __ |/ /   / /__/ /___/ /  
                    \___/\____/_/ |_/_/    \___/____/___/  
                                           
        """
    )
    print('\u00a92019 Alexander Gompper'.center(80, ' '))
    print(('=' * 20).center(80, ' '))

    # Collection of workers
    workers = []

    # Collection of proxies
    log('Loading proxies')
    manager = Proxy(PROXY_FILE_PATH)
    log('Using {} proxies'.format(len(manager.proxies)))

    log('Loading accounts')
    with open(CSV_FILE_PATH) as csv_file:
        reader = csv.reader(csv_file)
        log('Loaded {} accounts'.format(sum(1 for _ in reader)))
        log('=' * 20)

        csv_file.seek(0)
        for idx, row in enumerate(reader):
            w = Worker(
                username=row[0],
                password=row[1],
                proxy=manager.get_proxy(),
                products=None,
                skip_to_idx=row[2]  # In case you crash or something you can skip all the previous entries
            )
            workers.append(w)
            workers[idx].start()
            sleep(0.1)  # makes output a little cleaner tbh
Exemplo n.º 15
0
    def __init__(self, config):
        super(Server, self).__init__()

        self.logger = Logger(config.log_path, 'server')

        self.logger.log("--- Initialization started ---")
        self.logger.info("--- Initialization started ---")

        start_time = time.time()

        self.classifier = Classifier(config.graph_path, config.labels_path,
                                     config.input_layer, config.output_layer,
                                     config.input_height, config.input_width,
                                     config.input_mean, config.input_std)

        self.logger.log("--- Total initialization took %s seconds ---" %
                        (time.time() - start_time))
        self.logger.info("--- Total initialization took %s seconds ---" %
                         (time.time() - start_time))
Exemplo n.º 16
0
def main():
    log = Logger('main').log

    log('starting monitor')

    with open('sites.txt') as sitelist:
        sites = sitelist.read().splitlines()
    log('loaded {} sites to monitor'.format(len(sites)))

    with open('proxies.txt') as proxylist:
        proxies = proxylist.read().splitlines()
    log('loaded {} proxies to monitor with'.format(len(proxies)))

    i = 0
    monitors = []
    for site in sites:
        monitors.append(Monitor(site))
        monitors[i].start()
        i += 1
Exemplo n.º 17
0
def serve():
    logger = Logger(FLAGS.log_path, 'server')

    server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))

    service = Server(FLAGS)

    service_grpc.add_RecognitionServicer_to_server(service, server)

    server.add_insecure_port('[::]:{}'.format(FLAGS.port))

    logger.log("--- Server has been started... ---")
    logger.info("--- Server has been started... ---")

    server.start()
    try:
        while True:
            time.sleep(1)
    except KeyboardInterrupt:
        server.stop(0)
Exemplo n.º 18
0
def main():
    load_dotenv()
    db = Database()
    db.connect()
    amazonScraper = AmazonScraper()
    neweggScraper = NeweggScraper()
    logger = Logger('Create New Product')
    product = {}
    # logger.info("What is the URL of the product?")
    productURL = input("What is the URL of the product? ")

    isUnique = db.checkUniqueURL(productURL)

    if not isUnique:
        logger.error('ProductURL is aleady setup for scraping')
        return

    # logger.info(
    #     "What is the name of the product (this is just the name you want to give it)?")
    productName = input(
        "What is the name of the product (this is just the name you want to give it)? "
    )
    if 'amazon' in productURL:
        price = amazonScraper.getPrice(productURL)
    elif 'newegg' in productURL:
        price = neweggScraper.getPrice(productURL)
    product["ProductURL"] = productURL
    product['ProductName'] = productName
    product["LatestPrice"] = price
    product["LowestPriceAllTime"] = price
    product["LowestPriceAllTimeDate"] = datetime.date.today()
    product["LowestPriceMonth"] = price
    product["LowestPriceMonthDate"] = datetime.date.today()
    product["LowestPriceWeek"] = price
    product["LowestPriceWeekDate"] = datetime.date.today()
    product["LastDatePricePulled"] = datetime.date.today()

    db.insert(product)
    db.closeConnection()
Exemplo n.º 19
0
def main():
    l = Logger(lid='M')
    log = l.log
    error = l.error

    s = requests.Session()
    s.verify = False
    s.headers = {
        'Host': 'www.goat.com',
        'Accept-Encoding': 'gzip,deflate',
        'Connection': 'keep-alive',
        'Accept': '*/*',
        'Accept-Language': 'en-US;q=1',
        'User-Agent': 'GOAT/2.7.0 (iPhone; iOS 12.1; Scale/3.00)'  # Keep this updated.
    }

    print(('=' * 20).center(80, ' '))
    print('GOAT Black Friday / Summer Raffle Tickets'.center(80, ' '))
    print(
        """

                      _________  ___ ______  _______   ____
                     / ___/ __ \/ _ /_  __/ / ___/ /  /  _/
                    / (_ / /_/ / __ |/ /   / /__/ /___/ /  
                    \___/\____/_/ |_/_/    \___/____/___/  
                                           
        """
    )
    print('\u00a92018 Alexander Gompper'.center(80, ' '))
    print(('=' * 20).center(80, ' '))

    # Collection of workers
    workers = []

    # Collection of proxies
    log('Loading proxies')
    manager = Proxy(PROXYFILE)
    log('Using {} proxies'.format(len(manager.proxies)))

    # TODO: change status code handling for 429 errors from scraping

    # Collection of products
    log('Loading products')
    products = []
    for page in range(5):
        sleep(1)
        url = 'https://www.goat.com/api/v1/contests/3?page={}'.format(page)
        try:
            r = s.get(
                url,
                timeout=5
            )
            if r.status_code == 200:
                try:
                    r = r.json()
                    for prod in r['productTemplates']:
                        products.append(prod['id'])
                        # log('{} \t\t|| {}'.format(prod['id'], prod['name'].encode('utf-8')))
                    log('scraped {} ids'.format(len(products)))
                except KeyError:
                    error('[failed] failed to scrape product ids')
                    return False
            else:
                error('got bad status code {} from pid scrape'.format(r.status_code))
                return False
        except Timeout:
            error('[error] timeout from pid scrape')
            return False
    print(products)
    sleep(3)

    # Collection of locations
    log('Loading locations')
    # Need to have a dummy worker to login so that we dont get denied looking for locations
    # No clue why you must be logged in to get location ids but not social... nice.
    dummy_worker = Worker(username=DUMMYUSER, password=DUMMYPASS)
    dummy_worker.login()
    locations = []
    url = 'https://www.goat.com/api/v1/contests/3/locations'
    try:
        r = dummy_worker.s.get(
            url,
            timeout=5
        )
        if r.status_code == 200:
            try:
                r = r.json()
                for loc in r:
                    locations.append(loc['id'])
                    # log('{} \t {}'.format(loc['id'], loc['name'].encode('utf-8')))
                log('scraped {} loc ids'.format(len(locations)))
            except KeyError:
                error('couldnt find location ids')
                return False
        else:
            error('got bad status code {} from loc scrape'.format(r.status_code))
            return False
    except requests.exceptions.Timeout:
        error('timeout from loc scrape')
        return False
    print(locations)
    sleep(3)

    log('Loading accounts')
    with open(CSVFILE) as csvfile:
        reader = csv.reader(csvfile)
        log('Loaded {} accounts'.format(sum(1 for _ in reader)))
        log('=' * 20)

        csvfile.seek(0)
        for idx, row in enumerate(reader):
            w = Worker(
                username=row[0],
                password=row[1],
                proxy=manager.get_proxy(),
                products=products,
                locations=locations,
                skip_to_idx=row[2]  # In case you crash or something you can skip all the previous entries
            )
            w.start()
            workers.append(w)
            sleep(0.05)
Exemplo n.º 20
0
                    default='covid',
                    help="Set name contract tag")
parser.add_argument("--connection",
                    default='tgwnew',
                    help="Set name of connection")
parser.add_argument("--max_part",
                    default=10000,
                    help="Set max part of uuids list")
parser.add_argument("--verbose",
                    default=True,
                    help="Set dublicate output to stdout")
args = parser.parse_args()

config = dict()
logfile = 'covid_maker.log'
logger = Logger(logfile)
#logger.set_param('covid_maker.log')
conn = args.connection

# Press the green button in the gutter to run the script.
if __name__ == '__main__':
    #args.date_start, args.date_end = ('2021-06-01','2021-06-02')
    t0 = timer.get_time()
    report = DIT_report(args.date_start, args.date_end, args.config,
                        args.output, args.contract)
    report.tunnels.start()
    #tunnel_cfg = report.get_config_db('sshtunnel')

    #список шаблонов
    query = "SELECT message_template_id FROM message_template_tag WHERE tag_id=(select id from tag where text = %s ) ORDER BY message_template_id;"
    logger.log(f"Query: {query}, {args.contract}", 'INFO', args.verbose)
Exemplo n.º 21
0
#!/usr/bin/env python3

import requests
import os
from bs4 import BeautifulSoup
from classes.logger import Logger
from classes.tools import Tools
from classes.proxies import Proxy
import webbrowser
import time
from requests.adapters import HTTPAdapter
import re

log = Logger().log
tools = Tools()
empty_warn = 0


class Supreme:
    proxy = Proxy()

    def __init__(self):
        self.override = False
        self.chromepath = 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe %s --incognito'
        self.jackets = 'http://www.supremenewyork.com/shop/all/jackets'
        self.shirts = 'http://www.supremenewyork.com/shop/all/shirts'
        self.sweaters = 'http://www.supremenewyork.com/shop/all/tops_sweaters'
        self.tshirts = 'http://www.supremenewyork.com/shop/all/t-shirts'
        self.sweatshirts = 'http://www.supremenewyork.com/shop/all/sweatshirts'
        self.hats = 'http://www.supremenewyork.com/shop/all/hats'
        self.pants = 'http://www.supremenewyork.com/shop/all/pants'
Exemplo n.º 22
0
    def __init__(self,
                 graph_path,
                 labels_path,
                 input_layer,
                 output_layer,
                 input_height=224,
                 input_width=224,
                 input_mean=0,
                 input_std=255):

        self.logger = Logger()
        """ -------------------------------------- Original session -------------------------------------- """

        start_time = time.time()

        # We load the protobuf file from the disk and parse it to retrieve the unserialized graph_def
        with tf.gfile.GFile(graph_path, "rb") as f:
            self.graph = tf.Graph()
            graph_def = tf.GraphDef()
            graph_def.ParseFromString(f.read())

        # Then, we import the graph_def into a new Graph and returns it
        with self.graph.as_default():
            # The name var will prefix every op/nodes in your graph
            # Since we load everything in a new graph, this is not needed
            tf.import_graph_def(graph_def)

        # Creating a session one time to reduce the time for serving a lot of images
        self.session = tf.Session(graph=self.graph)

        self.logger.log(
            "--- Deep Neural Network session initialization took %s seconds ---"
            % (time.time() - start_time))
        self.logger.info(
            "--- Deep Neural Network session initialization took %s seconds ---"
            % (time.time() - start_time))
        """ -------------------------------------- Input & output -------------------------------------- """
        # Access input and output nodes
        self.input_operation = self.graph.get_tensor_by_name('import/' +
                                                             input_layer)
        self.output_operation = self.graph.get_tensor_by_name('import/' +
                                                              output_layer)
        """ --------------------------------- Image preprocessing session ---------------------------------- """

        start_time = time.time()

        # Image processing graph
        self.image_graph = tf.Graph()
        with self.image_graph.as_default():
            self.image_path = tf.placeholder(tf.string)
            file_reader = tf.read_file(self.image_path, "file_reader")

            # Define image extension
            ext = tf.string_split([self.image_path], '.').values[1]

            def read_jpg(fr):
                return tf.image.decode_jpeg(fr, channels=3, name="jpeg_reader")

            def read_png(fr):
                return tf.image.decode_png(fr, channels=3, name="png_reader")

            def read_bmp(fr):
                return tf.image.decode_bmp(fr, name="bmp_reader")

            def read_gif(fr):
                return tf.image.decode_gif(fr, name="gif_reader")

            # Load image bytes
            image = tf.case(
                {
                    tf.equal(ext, tf.constant('jpg', dtype=tf.string)):
                    lambda: read_jpg(file_reader),
                    tf.equal(ext, tf.constant('png', dtype=tf.string)):
                    lambda: read_png(file_reader),
                    tf.equal(ext, tf.constant('bmp', dtype=tf.string)):
                    lambda: read_bmp(file_reader),
                    tf.equal(ext, tf.constant('gif', dtype=tf.string)):
                    lambda: read_gif(file_reader)
                },
                default=lambda: read_jpg(file_reader),
                exclusive=True)

            # Convert input image and transform to [0; 1) range
            if image.dtype != tf.float32:
                image = tf.image.convert_image_dtype(image, dtype=tf.float32)

            # Central crop does not work without that
            image.set_shape((None, None, None))

            # Accidentally it increases overall accuracy
            image = tf.image.central_crop(image, central_fraction=0.875)

            # Resize the image to the specified height and width.
            if input_height and input_height:
                image = tf.expand_dims(image, 0)
                image = tf.image.resize_bilinear(image,
                                                 [input_height, input_width],
                                                 align_corners=False)

            # Transform to [-1; 1) range
            self.image_output = tf.multiply(tf.subtract(image, 0.5), 2.0)

            # float_caster = tf.cast(image, tf.float32)
            # dims_expander = tf.expand_dims(float_caster, 0)
            # resized = tf.image.resize_bilinear(dims_expander, [input_height, input_width])
            # self.image_output = tf.divide(tf.subtract(resized, input_mean), input_std)

        self.image_sess = tf.Session(graph=self.image_graph)

        self.logger.log(
            "--- Image preprocessing session initialization took %s seconds ---"
            % (time.time() - start_time))
        self.logger.info(
            "--- Image preprocessing session initialization took %s seconds ---"
            % (time.time() - start_time))
        """ -------------------------------------- Labels loading -------------------------------------- """
        # Loading captions of labels
        self.labels = []
        self.load_labels(labels_path)
Exemplo n.º 23
0
 def __init__(self, bot):
     super().__init__(bot)
     self.logger = Logger("CryBot", "logs")
Exemplo n.º 24
0
    """)
    parser.add_argument('-wf',
                        '--wfolder',
                        help='Working Folder',
                        type=str,
                        default='./')
    args = parser.parse_args()

    # Initialization
    error_found = False
    tic = time.time()
    path = Parameters(os.path.join(args.wfolder, 'PAR/path.par'),
                      fsource='dict',
                      how='to_dict',
                      multiple=False)
    log = Logger(os.path.join(args.wfolder, 'logs', 'e_bbt_creator.log'),
                 prepend_timestamp=True)
    dbpar = Parameters(os.path.join(args.wfolder, 'resources', 'db.par'),
                       fsource='list',
                       how='to_list',
                       multiple=True)
    par = Parameters(os.path.join(args.wfolder, 'resources', 'par.par'),
                     fsource='dict',
                     how='to_dict',
                     multiple=False)
    fg = Parameters(os.path.join(args.wfolder, 'resources', 'fg.par'),
                    fsource='list',
                    how='to_list',
                    multiple=True)
    calcfg = Parameters(os.path.join(args.wfolder, 'resources', 'calcfg.par'),
                        fsource='list',
                        how='to_list',
Exemplo n.º 25
0
import time, inquirer
from threading import Thread
from classes.utils import *
from classes.logger import Logger
from classes.openfile import Open
from modules.account import Account

if __name__ == '__main__':
    log = Logger("M").log
    read = Open().read
    logo()

    git = Github()
    git.printCommit(amount=1)

    log("Loading oneCaptcha settings...\n", 'note')
    accounts = read('config/accounts.json')

    for account in accounts:
        questions = [
            inquirer.List('selection', message=f"Action choice for {str(account['account']).split(':')[0]}",
                          choices=['Start oneCaptcha Engine', 'Regular Browsing Session', 'Update Cookie Session'],
                          ),
        ]

        answers = inquirer.prompt(questions)
        acc = Account(account)

        if answers['selection'] == 'Start oneCaptcha Engine':
            Thread(target=acc.startProcess, daemon=True).start()
        elif answers['selection'] == "Regular Browsing Session":
    optParser.add_option("--withpreemption", action="store_true",
                         default=False, help="Indicates if preemption solution will be used")
    (options, args) = optParser.parse_args()
    return options


# this is the main entry point of this script
if __name__ == "__main__":
    options = get_options()

    if not options.scenario_folder:
      sys.exit("You must specify the Scenario Folder using the '--scenario' option")

    Logger.set_globals(options.scenario_folder,'desenv',logging.INFO, options.withpreemption)

    logger = Logger('Runner').get()

    logger.info(options)

    #scenario = options.scenario

    # this script has been called from the command line. It will start sumo as a
    # server, then connect and run
    if options.nogui:
        sumoBinary = checkBinary('sumo')
    else:
        sumoBinary = checkBinary('sumo-gui')

    # this is the normal way of using traci. sumo is started as a
    # subprocess and then the python script connects and runs
    #traci.start([sumoBinary, "-c", './' + scenario + '/osm.sumocfg',
Exemplo n.º 27
0
import time
import yaml

from pyasesm import ActiveLists

from classes.logger import Logger
from classes.configuration import Configuration
from classes.helper import Helper
from classes.misp import Misp
from classes.arcsight import ArcSight

LOGLEVEL='info'

if __name__ == '__main__':
    logger = Logger()
    logger.customize(LOGLEVEL)
    config = Configuration('Script synchronize MISP with ArcSight.')
    config.run_parser()
    #logger.customize(config.get('loglevel'))

    proxy = config.get('proxy')
    proxy = {'http': config.get('proxy'), 'https': config.get('proxy')}

    misp = Misp(config.get('misp-url'), config.get('misp-key'), not config.get('misp-no-verify-cert'))

    misp.loadRules(config.get('rules-file'))
    misp.downloadOrganisations()
    misp.loadPriorities(config.get('priorities-file'))

    if config.get('input'):
Exemplo n.º 28
0
        '--wfolder',
        help='Working Folder (default folder from which this script is run)',
        type=str,
        default='./')
    parser.add_argument(
        '-ts',
        '--timestamp',
        help='date in which the bbt_creator was run (format is YYYYMMDD)',
        type=str,
        default=None)
    args = parser.parse_args()

    # Initialization
    tic = time.time()
    log = Logger(
        os.path.join(args.wfolder, 'logs',
                     args.timestamp + '_bbt_analyzer.log'))
    log.update(version)
    log.update('reading BBTs...')
    with open(os.path.join(args.wfolder, 'data', args.timestamp + '_BBTs.pic'),
              'rb') as f:
        BBTs = pic.load(f)
    pBBTs = process_bbts(BBTs)
    log.update('performing nbbs by natoms and multi analysis...')
    df = get_bbs_by_natoms(pBBTs)
    df.to_csv(os.path.join(
        args.wfolder, 'data',
        args.timestamp + '_nbbs_by_natoms_and_multi_analysis.csv'),
              index=False)
    log.update('performing nbbs by natoms and multi analysis...')
    df = get_bbts_by_bbtype(pBBTs, minbbs=0)
Exemplo n.º 29
0
 error_found = False
 path = Parameters(os.path.join(args.wfolder, 'resources', 'path.par'), fsource='dict', how='to_dict', multiple=False)
 par = Parameters(os.path.join(args.wfolder, 'resources', 'par.par'), fsource='dict', how='to_dict', multiple=False)
 fg = Parameters(os.path.join(args.wfolder, 'resources', 'fg.par'), fsource='list', how='to_list', multiple=True)
 reaction = Parameters(os.path.join(args.wfolder, 'resources', 'reaction.par'), fsource='list', how='to_list',
                       multiple=True)
 enum_reaction = Parameters(os.path.join(args.wfolder, 'resources', 'enum_reaction.par'), fsource='list',
                            how='to_list', multiple=True)
 deprotection = Parameters(os.path.join(args.wfolder, 'resources', 'deprotection.par'), fsource='list',
                           how='to_list', multiple=True)
 enum_deprotection = Parameters(os.path.join(args.wfolder, 'resources', 'enum_deprotection.par'), fsource='list',
                                how='to_list', multiple=True)
 headpieces = Parameters(os.path.join(args.wfolder, 'resources', 'headpieces.par'), fsource='list', how='to_list',
                         multiple=True)
 token = '_'.join([path.par['Database_Run'], path.par['run']])
 log = Logger(os.path.join(args.wfolder, 'logs', token + '_e_designer.log'))
 log.update(version)
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'path.par'), 'path parameters')
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'par.par'), 'par parameters')
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'fg.par'), 'fg parameters')
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'reaction.par'), 'reaction parameters')
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'enum_reaction.par'), 'enum_reaction parameters')
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'deprotection.par'), 'deprotection parameters')
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'enum_deprotection.par'),
                        'enum_deprotection parameters')
 log.insert_file_in_log(os.path.join(args.wfolder, 'resources', 'headpieces.par'), 'headpieces parameters')
 for item in [path, par, fg, reaction, enum_reaction, deprotection, enum_deprotection, headpieces]:
     if item.success is None:
         error_found = True
         log.update('    Error(s) found while reading parameters: ' + item.path)
         for error in item.errors:
Exemplo n.º 30
0
from classes.parameter_reader import Parameters

if __name__ == '__main__':
    # Arg parser
    parser = argparse.ArgumentParser(description="""incompatibility_mapper: 
    This script creates files to visualize incompatibilities in reactions and functional groups.
    """)
    parser.add_argument('-wf',
                        '--wfolder',
                        help='Working Folder',
                        type=str,
                        default='./')
    args = parser.parse_args()
    # Initiallization
    tic = time.time()
    log = Logger(
        os.path.join(args.wfolder, 'logs', 'incompatibility_mapper.log'))
    log.update(version)
    fg = Parameters(os.path.join(args.wfolder, 'resources/fg.par'),
                    fsource='list',
                    how='to_list',
                    multiple=True)
    reaction = Parameters(os.path.join(args.wfolder, 'resources/reaction.par'),
                          fsource='list',
                          how='to_list',
                          multiple=True)
    deprotection = Parameters(os.path.join(args.wfolder,
                                           'resources/deprotection.par'),
                              fsource='list',
                              how='to_list',
                              multiple=True)