def __init__(self): self.elevators = [Elevator() for i in range(ELEVATOR_NUM)] self.elevatorAI = ElevatorAI(self.elevators) self.people = [] self.time = DAY_START_TIME self.day = 0 self.logger = Logger()
def __init__(self, config): if not os.path.exists(FLAGS.log_path): os.mkdir(FLAGS.log_path) self.logger = Logger(config.log_path, 'client') self.stub = service_grpc.RecognitionStub( grpc.insecure_channel('{}:{}'.format(FLAGS.ip, FLAGS.port)))
class CommandErrorHandler(Cog): def __init__(self, bot): super().__init__(bot) self.logger = Logger("CryBot", "logs") async def cog_command_error(self, ctx, error): self.logger.error(error.message) ctx.send(error.message)
def buildMsg(self, name, params): pth = mypath("mails") fname = os.path.join(pth, "mail_" + name + ".txt") params = self.buildParams(name, params) Logger.debug("preparing mail %s w params %s", fname, str(params)) if not os.path.exists(fname): raise Mailer.MailNotFoundError(name) body = None with open(fname, "r") as f: body = f.read() params = self.buildFiles(body, params, pth) return body.format(m=params)
def __init__(self, ev, folder): self._folder = folder self._ev = ev self._logger = Logger(self.__class__.__name__).get() self._net_file = xml.etree.ElementTree.parse(self._folder+'/osm.net.xml').getroot() self._edges_order = [] self._edges_with_tl = [] self.edges_to_reroute = [] self.compute_adj = False self.staticdynamic = False
def readFile(self, fname, enc): Logger.debug("Adding file %s %s", fname, enc) data = None with open(fname, 'rb') as f: data = f.read() if enc is None: return data if enc == 'base64': data = base64.encodestring(data) elif enc == 'base64l': data = base64.encodestring(data).replace("\r", "").replace("\n", "") else: raise Exception("Unknown file encoding: " + enc) return data
def __init__(self, thread_id, task_file, proxy_manager): threading.Thread.__init__(self) self.start_time = time() Logger.set_tid(thread_id) self.S = requests.Session() with open('config.json') as cfg: self.c = load(cfg) with open(task_file) as tsk: self.t = load(tsk) if self.t['exec_config']['use_proxies']: proxy = proxy_manager.get_next_proxy() log('[{}] adding proxy to task'.format(proxy), color='blue') p = { 'http': 'http://{}'.format(proxy), 'https': 'https://{}'.format(proxy) }
def main(): log = Logger().log proxy_manager = ProxyManager() log('******************************************************\n' ''' ) ) ( /( ) ( ( /( )\()) ) ( /( ( )\ ) )\()) ( ( __ __ __((_)\ ( /( )\()) ))\ (()/( __ __ __((_)\ )\ )( ( ( __ __ __ / // // / _((_) )(_))((_)\ /((_) ((_)) / // // / _((_)((_)(()\ )\ )\ / // // / / // // / | \| |((_)_ | |(_)(_)) _| | / // // / | \| | (_) ((_) ((_) ((_) / // // / /_//_//_/ | .` |/ _` || / / / -_)/ _` |/_//_//_/ | .` | | || '_|/ _ \/ _ \/_//_//_/ |_|\_|\__,_||_\_\ \___|\__,_| |_|\_| |_||_| \___/\___/ ''' '\nCreated by Niroo @NirooOfficial' '\nCredits to Shevi @Shevids1996\n\n' '******************************************************', color='blue', timestamp=False) log('starting tasks', color='green') threads = [] i = 0 for task in listdir('tasks'): threads.append(Naked(i, 'tasks/{}'.format(task), proxy_manager)) threads[i].start() i += 1
def main(): # para troca de mensagens esparsas entre leiloeiro e compradores f = Filter() f.performative = ACLMessage.INFORM agents = list() port = int(argv[1]) logger = Logger() # criando objeto a ser leiloado objeto = ObjetoLeiloado('Vaso Antigo', 40) # criando agente leiloeiro agente_leiloeiro = AgenteLeiloeiro( AID(name=f'leiloeiro@localhost:{port}'),f, objeto, logger) agents.append(agente_leiloeiro) port += 1 numero_de_compradores=3 for i in range(numero_de_compradores): # criando agentes compradores agent_dinheiro = randint(100,1000) agente_comprador = AgenteComprador(AID(name=f'comprador_{i}@localhost:{port+i}'), f, logger,agent_dinheiro) agents.append(agente_comprador) start_loop(agents)
def __init__(self, config): try: self.email = config['account'].split(':')[0] self.password = config['account'].split(':')[1] except: sys.exit(1) self.config = config self.randomWord = RandomWords() self.action = 'starting the oneCaptcha tool.' self.notification = None self.startTime = time.time() self.currentWatchTime = 0 self.completedSearches = 0 self.readStories = 0 self.currentTranslations = 0 self.completedEmailActions = 0 self.maxSearches = random.randint(int(self.config['settings']['min_searches']), int(self.config['settings']['max_searches'])) self.maxWatchTime = random.randint(int(self.config['settings']['min_watchTime']), int(self.config['settings']['max_watchTime'])) self.maxStories = random.randint(int(self.config['settings']['min_newsStories']), int(self.config['settings']['max_newsStories'])) self.maxTranslations = random.randint(int(self.config['settings']['min_translations']), int(self.config['settings']['max_translations'])) self.maxEmailActions = random.randint(int(self.config['settings']['min_emailActions']), int(self.config['settings']['max_emailActions'])) self.log = Logger(config['tid']).log
def __init__(self, tid, config_filename): threading.Thread.__init__(self) self.tid = tid self.start_time = time() self.log = Logger(tid).log with open(config_filename) as task_file: self.T = load(task_file) with open('config.json') as config_file: self.C = load(config_file)
def __init__(self, worker_id, billing, dummy_variant, drop_time, retry, proxy=None): super().__init__() self.start_time = time() self.worker_id = worker_id lg = Logger(worker_id=worker_id) self.log = lg.log self.err = lg.err self.suc = lg.suc self.drop_time = drop_time self.variant = billing['target_variant'] self.dummy_variant = dummy_variant self.retry_delay = retry self.build_ids = list() self.billing = billing self.s = requests.session() self.s.headers = { 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/71.0.3578.98 Safari/537.36', 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8', 'Accept-Encoding': 'gzip, deflate', 'Accept-Languate': 'en-US,en;q=0.9', 'Connection': 'keep-alive', 'Upgrade-Insecure-Requests': '1', 'DNT': '1' } self.s.verify = False if proxy is not None: _ = proxy.split(':') if len(_) == 4: self.s.proxies = { 'http': 'http://{}:{}@{}:{}'.format(_[2], _[3], _[0], _[1]), 'https': 'https://{}:{}@{}:{}'.format(_[2], _[3], _[0], _[1]) } elif len(_) == 2: self.s.proxies = { 'http': 'http://{}:{}'.format(_[0], _[1]), 'https': 'https://{}:{}'.format(_[0], _[1]) } else: self.err('[error] Malformed proxy {}'.format(proxy)) exit(-1)
def __init__(self, tid, config_filename, headless=False): threading.Thread.__init__(self) self.tid = tid self.start_time = time() self.log = Logger(tid).log self.web = Browser(showWindow=headless) with open(config_filename) as task_file: self.T = load(task_file) with open('config.json') as config_file: self.C = load(config_file)
def __init__(self, config): super(Server, self).__init__() self.logger = Logger(config.log_path, 'server') self.logger.log("--- Initialization started ---") self.logger.info("--- Initialization started ---") start_time = time.time() self.classifier = Classifier(config.graph_path, config.labels_path, config.input_layer, config.output_layer, config.input_height, config.input_width, config.input_mean, config.input_std) self.logger.log("--- Total initialization took %s seconds ---" % (time.time() - start_time)) self.logger.info("--- Total initialization took %s seconds ---" % (time.time() - start_time))
def __init__(self): print "starting smarthome..." self.setPid() self.threads = [] self.dispatcher = Dispatcher() self.config = Config(self.dispatcher) self.values = Values() self.hue = Hue(self.config.getHueIP(), self.dispatcher) self.hue.start() self.threads.append(self.hue) self.switch = Switch(self.dispatcher, self.config.getSwitchConfig()) self.pilight = PilightClient(self.dispatcher) self.pilight.registerCallback(self.switch.callback, 'protocol', ['arctech_screen']) self.pilight.registerCallback(self.climateCallback, 'protocol', ['alecto_ws1700']) self.pilight.start() self.threads.append(self.pilight) if self.config.hasLCD(): self.lcd = Lcd(self.values) self.lcd.start() self.threads.append(self.lcd) self.fhem = Fhem(self.config.getFhemIp(), self.config.getFhemPort(), self.dispatcher) self.fhem.registerCallback(self.fhemCallback) self.config.initDevices(self.fhem, self.values) self.fhem.start() self.threads.append(self.fhem) self.api = Api(self.values, self.dispatcher) self.events = Events(self.values, self.dispatcher) self.config.initEvents(self.events) self.events.start() self.threads.append(self.events) self.logger = Logger(self.values) self.logger.start() self.threads.append(self.logger) self.highcharts = Highcharts(self.logger, self.dispatcher) self.webserver = Webserver(self.values, self.dispatcher, self.config.getWebserverPort()) self.webserver.start() self.dispatcher.start() self.threads.append(self.dispatcher) self.serve() self.clearPid()
def __init__(self, tid, config_filename, headless=False): threading.Thread.__init__(self) self.tid = tid self.start_time = time() self.log = Logger(tid).log self.web = Browser(showWindow=not headless, incognito=True) self.gold_link = 'https://catalog.usmint.gov/basketball-hall-of-fame-2020-uncirculated-silver-dollar-20CD.html?cgid=silver-dollars#start=1' self.silver_link = '' with open(config_filename) as task_file: self.T = load(task_file)
class Client(object): TOP_K = 3 def __init__(self, config): if not os.path.exists(FLAGS.log_path): os.mkdir(FLAGS.log_path) self.logger = Logger(config.log_path, 'client') self.stub = service_grpc.RecognitionStub( grpc.insecure_channel('{}:{}'.format(FLAGS.ip, FLAGS.port))) def RecognizeTest(self, filepath): self.logger.log('--- Performing recognition ---') self.logger.info('--- Performing recognition ---') request = service_pb.Request() # Extract data from image file with open(filepath, 'rb') as file: request.image.data = file.read() # Define file extension request.image.format = os.path.basename(filepath).split('.')[-1] start_time = time.time() # Perform request responses = self.stub.Recognize(iter([request])) results = None for response in responses: if response.status.code == 0: print('Result = {} '.format(response.status.text)) return False if len(response.label) == 0: return False results = sorted(response.label, key=lambda l: -l.probability)[:self.TOP_K] end_time = (time.time() - start_time) self.logger.log("--- Recognition took %s seconds ---" % end_time) self.logger.info("--- Recognition took %s seconds ---" % end_time) print('Results:') for result in results: print("'{}' with probability {}%.".format( result.text, math.floor(result.probability * 100))) return True
def mail(self, name=0, **kwargs): name = str(name) cfg = Config.smtp Logger.debug("sending mail %s%s from %s to %s", name, str(kwargs), cfg['from'], cfg['to']) usr = cfg.get('user') pswd = cfg.get('password') msg = self.buildMsg(name, kwargs) dump = kwargs.get("dump") if dump: with open(dump, "w") as f: f.write(msg) Logger.info("Message %s saved to %s", name, dump) return cli = smtplib.SMTP(cfg['host']) if cfg.get('ssl') == True: cli.starttls() if usr and pswd: cli.ehlo() cli.login(usr, pswd) else: cli.helo() cli.sendmail(cfg['from'], kwargs.get('to') or cfg['to'], msg) cli.quit() Logger.info("Message %s sent", name)
def main(): log = Logger('M').log log(colored('main bot template made by Alex Gompper @edzart', 'green')) threads = [] i = 0 for config in listdir('tasks'): if config in {'task.example.json'}: # configs to ignore pass else: log('loading thread {} with config {}'.format(i, config)) threads.append(Site(i, 'tasks/' + config, True)) threads[i].start() i += 1
def main(): load_dotenv() db = Database() db.connect() amazonScraper = AmazonScraper() neweggScraper = NeweggScraper() logger = Logger('Create New Product') product = {} # logger.info("What is the URL of the product?") productURL = input("What is the URL of the product? ") isUnique = db.checkUniqueURL(productURL) if not isUnique: logger.error('ProductURL is aleady setup for scraping') return # logger.info( # "What is the name of the product (this is just the name you want to give it)?") productName = input( "What is the name of the product (this is just the name you want to give it)? " ) if 'amazon' in productURL: price = amazonScraper.getPrice(productURL) elif 'newegg' in productURL: price = neweggScraper.getPrice(productURL) product["ProductURL"] = productURL product['ProductName'] = productName product["LatestPrice"] = price product["LowestPriceAllTime"] = price product["LowestPriceAllTimeDate"] = datetime.date.today() product["LowestPriceMonth"] = price product["LowestPriceMonthDate"] = datetime.date.today() product["LowestPriceWeek"] = price product["LowestPriceWeekDate"] = datetime.date.today() product["LastDatePricePulled"] = datetime.date.today() db.insert(product) db.closeConnection()
def main(): log = Logger().log proxy_manager = ProxyManager() log( '*************************\n\nSHOPIFY ATC V0.2.0 OPEN SOURCE\nBY ALEX GOMPPER @edzart\n\n**********************' '***', color='blue', timestamp=False) log('starting tasks', color='green') threads = [] i = 0 for task in listdir('tasks'): threads.append(Shopify(i, 'tasks/{}'.format(task), proxy_manager)) threads[i].start() i += 1
def harvest(self, queue): log = Logger().log api_key = self.apiKey log('Harvesting Captcha..','info') s = requests.Session() captcha_id = s.post("http://2captcha.com/in.php?key={}&method=userrecaptcha&googlekey={}&pageurl={}".format(api_key, self.sitekey, self.url)).text.split('|')[1] recaptcha_answer = s.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(api_key, captcha_id)).text log("solving ref captcha...", 'yellow') while 'CAPCHA_NOT_READY' in recaptcha_answer: sleep(1) recaptcha_answer = s.get("http://2captcha.com/res.php?key={}&action=get&id={}".format(api_key, captcha_id)).text recaptcha_answer = recaptcha_answer.split('|')[1] log('Solved Captcha: ' + str(recaptcha_answer),'success') queue.put(recaptcha_answer)
def main(): l = Logger(lid='M') log = l.log print(('=' * 20).center(80, ' ')) print('GOAT Black Friday and Autocheckout'.center(80, ' ')) print( """ _________ ___ ______ _______ ____ / ___/ __ \/ _ /_ __/ / ___/ / / _/ / (_ / /_/ / __ |/ / / /__/ /___/ / \___/\____/_/ |_/_/ \___/____/___/ """ ) print('\u00a92019 Alexander Gompper'.center(80, ' ')) print(('=' * 20).center(80, ' ')) # Collection of workers workers = [] # Collection of proxies log('Loading proxies') manager = Proxy(PROXY_FILE_PATH) log('Using {} proxies'.format(len(manager.proxies))) log('Loading accounts') with open(CSV_FILE_PATH) as csv_file: reader = csv.reader(csv_file) log('Loaded {} accounts'.format(sum(1 for _ in reader))) log('=' * 20) csv_file.seek(0) for idx, row in enumerate(reader): w = Worker( username=row[0], password=row[1], proxy=manager.get_proxy(), products=None, skip_to_idx=row[2] # In case you crash or something you can skip all the previous entries ) workers.append(w) workers[idx].start() sleep(0.1) # makes output a little cleaner tbh
def main(): log = Logger('main').log log('starting monitor') with open('sites.txt') as sitelist: sites = sitelist.read().splitlines() log('loaded {} sites to monitor'.format(len(sites))) with open('proxies.txt') as proxylist: proxies = proxylist.read().splitlines() log('loaded {} proxies to monitor with'.format(len(proxies))) i = 0 monitors = [] for site in sites: monitors.append(Monitor(site)) monitors[i].start() i += 1
def serve(): logger = Logger(FLAGS.log_path, 'server') server = grpc.server(futures.ThreadPoolExecutor(max_workers=1)) service = Server(FLAGS) service_grpc.add_RecognitionServicer_to_server(service, server) server.add_insecure_port('[::]:{}'.format(FLAGS.port)) logger.log("--- Server has been started... ---") logger.info("--- Server has been started... ---") server.start() try: while True: time.sleep(1) except KeyboardInterrupt: server.stop(0)
class Simulation(): def __init__(self): self.elevators = [Elevator() for i in range(ELEVATOR_NUM)] self.elevatorAI = ElevatorAI(self.elevators) self.people = [] self.time = DAY_START_TIME self.day = 0 self.logger = Logger() # Create the people for the simulation def populate(self): # Create people building_capacity = ( FLOORS - 1) * FLOOR_CAPACITY # -1 because floor 0 is not a valid home floor people_slots = random.sample( range(building_capacity), math.ceil(building_capacity * BUILDING_FULLNESS)) for ps in people_slots: home_floor = math.floor( ps / FLOOR_CAPACITY) + 1 # +1 because 0 is not a valid home floor entry_time = MEAN_ENTRY_TIME + timedelta( minutes=np.random.normal(0, 30)) to_lunch_time = MEAN_TO_LUNCH_TIME + timedelta( minutes=np.random.normal(0, 30)) from_lunch_time = to_lunch_time + timedelta( minutes=MEAN_LUNCH_TIME) + timedelta( minutes=np.random.normal(0, 5)) leave_time = MEAN_LEAVE_TIME + timedelta( minutes=np.random.normal(0, 30)) self.people.append( Person(ps, home_floor, entry_time, to_lunch_time, from_lunch_time, leave_time)) def run(self, days): # Main loop while self.day < days: # People Act ############# for person in self.people: person.act(self.time) # Check if spotted by camera if person.distance_to_elevator == TIME_TO_ELEVATOR: # Call elevator (prediction) self.elevatorAI.call_elevator_prediction( person.current_floor, person.target_floor) # Check if waiting for elevator if person.waiting and not person.elevator_called: # Call elevator self.elevatorAI.call_elevator(person, person.current_floor, person.get_direction()) person.elevator_called = True # ELEVATOR AI ACT ################# self.elevatorAI.act() # ELEVATORS ACT ############### for elevator in self.elevators: elevator.act(self.elevatorAI, self.people) # TIME ###### # Move time self.time = self.time + timedelta(seconds=TICK_DURATION) # Check if the day is over if self.time.hour >= DAY_END_TIME.hour: # Move to next day self.day += 1 self.time = DAY_START_TIME + timedelta(days=self.day) # Reset people for person in self.people: person.next_day() # Reset elevators for elevator in self.elevators: elevator.reset() # At the end of each cycle self.logger.log_people_movement(self.time, self.people) print(self.logger.get_total_waiting_time()) print(self.logger.get_total_elevator_time()) self.logger.save_to_file()
class App: pidfile = "/tmp/smarthome.pid" def __init__(self): print "starting smarthome..." self.setPid() self.threads = [] self.dispatcher = Dispatcher() self.config = Config(self.dispatcher) self.values = Values() self.hue = Hue(self.config.getHueIP(), self.dispatcher) self.hue.start() self.threads.append(self.hue) self.switch = Switch(self.dispatcher, self.config.getSwitchConfig()) self.pilight = PilightClient(self.dispatcher) self.pilight.registerCallback(self.switch.callback, 'protocol', ['arctech_screen']) self.pilight.registerCallback(self.climateCallback, 'protocol', ['alecto_ws1700']) self.pilight.start() self.threads.append(self.pilight) if self.config.hasLCD(): self.lcd = Lcd(self.values) self.lcd.start() self.threads.append(self.lcd) self.fhem = Fhem(self.config.getFhemIp(), self.config.getFhemPort(), self.dispatcher) self.fhem.registerCallback(self.fhemCallback) self.config.initDevices(self.fhem, self.values) self.fhem.start() self.threads.append(self.fhem) self.api = Api(self.values, self.dispatcher) self.events = Events(self.values, self.dispatcher) self.config.initEvents(self.events) self.events.start() self.threads.append(self.events) self.logger = Logger(self.values) self.logger.start() self.threads.append(self.logger) self.highcharts = Highcharts(self.logger, self.dispatcher) self.webserver = Webserver(self.values, self.dispatcher, self.config.getWebserverPort()) self.webserver.start() self.dispatcher.start() self.threads.append(self.dispatcher) self.serve() self.clearPid() def setPid(self): pid = str(os.getpid()) if os.path.isfile(self.pidfile): try: os.kill(int(file(self.pidfile,'r').readlines()[0]), 9) except: pass else: time.sleep(2) file(self.pidfile, 'w').write(pid) def clearPid(self): os.unlink(self.pidfile) def serve(self): print "started!\n" while True: try: run = False for thread in self.threads: self.sendChanges() thread.join(1) if thread.isAlive(): run = True if not run: self.webserver.stop() for thread in self.threads: if thread.isAlive(): thread.stop() return except KeyboardInterrupt: for thread in self.threads: thread.stop() def sendChanges(self): if self.values.changed: data = { 'params': ['path', 'values'], 'path': 'outputToJs', 'values': { 'type': 'values', 'data': self.values.getValues() } } self.values.changed = False self.dispatcher.send(data) def fhemCallback(self, data): uid = data.get('id') for attr in data.get('values').get('attr'): value = data.get(attr) if value: if attr == 'state' and data.get('values').get('type') == 'climate': if value.find('set_desired-temp') != -1: desired = value.replace('set_desired-temp','').strip() self.values.addValue(uid, 'desired-temp', desired) self.values.addValue(uid, 'info', 'Set to %s°C (Current: %s°C)' %(desired, data.get('desired-temp'))) else: self.values.addValue(uid, 'info', '') else: self.values.addValue(uid, attr, value) self.values.addValue(uid, 'device', uid) def climateCallback(self, data): code = data.get('message') if not code: return temperature = code.get('temperature') humidity = code.get('humidity') if temperature: self.values.addValue(code.get('id'), 'temperature', temperature) if humidity: self.values.addValue(code.get('id'), 'humidity', humidity)
def main(): l = Logger(lid='M') log = l.log error = l.error s = requests.Session() s.verify = False s.headers = { 'Host': 'www.goat.com', 'Accept-Encoding': 'gzip,deflate', 'Connection': 'keep-alive', 'Accept': '*/*', 'Accept-Language': 'en-US;q=1', 'User-Agent': 'GOAT/2.7.0 (iPhone; iOS 12.1; Scale/3.00)' # Keep this updated. } print(('=' * 20).center(80, ' ')) print('GOAT Black Friday / Summer Raffle Tickets'.center(80, ' ')) print( """ _________ ___ ______ _______ ____ / ___/ __ \/ _ /_ __/ / ___/ / / _/ / (_ / /_/ / __ |/ / / /__/ /___/ / \___/\____/_/ |_/_/ \___/____/___/ """ ) print('\u00a92018 Alexander Gompper'.center(80, ' ')) print(('=' * 20).center(80, ' ')) # Collection of workers workers = [] # Collection of proxies log('Loading proxies') manager = Proxy(PROXYFILE) log('Using {} proxies'.format(len(manager.proxies))) # TODO: change status code handling for 429 errors from scraping # Collection of products log('Loading products') products = [] for page in range(5): sleep(1) url = 'https://www.goat.com/api/v1/contests/3?page={}'.format(page) try: r = s.get( url, timeout=5 ) if r.status_code == 200: try: r = r.json() for prod in r['productTemplates']: products.append(prod['id']) # log('{} \t\t|| {}'.format(prod['id'], prod['name'].encode('utf-8'))) log('scraped {} ids'.format(len(products))) except KeyError: error('[failed] failed to scrape product ids') return False else: error('got bad status code {} from pid scrape'.format(r.status_code)) return False except Timeout: error('[error] timeout from pid scrape') return False print(products) sleep(3) # Collection of locations log('Loading locations') # Need to have a dummy worker to login so that we dont get denied looking for locations # No clue why you must be logged in to get location ids but not social... nice. dummy_worker = Worker(username=DUMMYUSER, password=DUMMYPASS) dummy_worker.login() locations = [] url = 'https://www.goat.com/api/v1/contests/3/locations' try: r = dummy_worker.s.get( url, timeout=5 ) if r.status_code == 200: try: r = r.json() for loc in r: locations.append(loc['id']) # log('{} \t {}'.format(loc['id'], loc['name'].encode('utf-8'))) log('scraped {} loc ids'.format(len(locations))) except KeyError: error('couldnt find location ids') return False else: error('got bad status code {} from loc scrape'.format(r.status_code)) return False except requests.exceptions.Timeout: error('timeout from loc scrape') return False print(locations) sleep(3) log('Loading accounts') with open(CSVFILE) as csvfile: reader = csv.reader(csvfile) log('Loaded {} accounts'.format(sum(1 for _ in reader))) log('=' * 20) csvfile.seek(0) for idx, row in enumerate(reader): w = Worker( username=row[0], password=row[1], proxy=manager.get_proxy(), products=products, locations=locations, skip_to_idx=row[2] # In case you crash or something you can skip all the previous entries ) w.start() workers.append(w) sleep(0.05)
default='covid', help="Set name contract tag") parser.add_argument("--connection", default='tgwnew', help="Set name of connection") parser.add_argument("--max_part", default=10000, help="Set max part of uuids list") parser.add_argument("--verbose", default=True, help="Set dublicate output to stdout") args = parser.parse_args() config = dict() logfile = 'covid_maker.log' logger = Logger(logfile) #logger.set_param('covid_maker.log') conn = args.connection # Press the green button in the gutter to run the script. if __name__ == '__main__': #args.date_start, args.date_end = ('2021-06-01','2021-06-02') t0 = timer.get_time() report = DIT_report(args.date_start, args.date_end, args.config, args.output, args.contract) report.tunnels.start() #tunnel_cfg = report.get_config_db('sshtunnel') #список шаблонов query = "SELECT message_template_id FROM message_template_tag WHERE tag_id=(select id from tag where text = %s ) ORDER BY message_template_id;" logger.log(f"Query: {query}, {args.contract}", 'INFO', args.verbose)
"--date_end", default=f"{today.year:04d}-{today.month:02d}-{today.day:02d}", help="Set end date, example 2020-01-01. ") parser.add_argument("--config", default="./config/config_ditrep.xml", help="Set path to config file") parser.add_argument("--output", default=None, help="Set path to output file") parser.add_argument("--contract", default='COVID', help="Set path to output file") parser.add_argument("--verbose", default=True, help="Set path to output file") args = parser.parse_args() config = dict() logfile = 'gen_daily.log' logger = Logger(logfile) #logger.set_param('covid_maker.log') # Press the green button in the gutter to run the script. if __name__ == '__main__': t0 = timer.get_time() report = DIT_report(args.date_start, args.date_end, args.config, args.output, args.contract) date_list = report.get_time_period('10min') logger.log(len(date_list), 'INFO', args.verbose) res_data = dict(select=0, all=0, insert=0, update=0) for item in date_list: date_start = item['date_start'] date_end = item['date_end'] query = f"SELECT * FROM insert_messages_by_period('{date_start}','{date_end}')" logger.log(query, 'INFO', True)
#!/usr/bin/env python3 import requests import os from bs4 import BeautifulSoup from classes.logger import Logger from classes.tools import Tools from classes.proxies import Proxy import webbrowser import time from requests.adapters import HTTPAdapter import re log = Logger().log tools = Tools() empty_warn = 0 class Supreme: proxy = Proxy() def __init__(self): self.override = False self.chromepath = 'C:/Program Files (x86)/Google/Chrome/Application/chrome.exe %s --incognito' self.jackets = 'http://www.supremenewyork.com/shop/all/jackets' self.shirts = 'http://www.supremenewyork.com/shop/all/shirts' self.sweaters = 'http://www.supremenewyork.com/shop/all/tops_sweaters' self.tshirts = 'http://www.supremenewyork.com/shop/all/t-shirts' self.sweatshirts = 'http://www.supremenewyork.com/shop/all/sweatshirts' self.hats = 'http://www.supremenewyork.com/shop/all/hats' self.pants = 'http://www.supremenewyork.com/shop/all/pants'
def __init__(self, bot): super().__init__(bot) self.logger = Logger("CryBot", "logs")