def test_multiple_ping(self): subject = PingSensor() with TemporaryDirectory() as d: with ChangeToDir(d): # when: multiple pings should be sent subject.execute('1234', '8.8.8.8', {'method': PingSensor.MULTIPLE_PING_ID}, {'executable': RECORD_ARGS}) # then: the correct arguments are passed on the command line args = arguments(d) self.assertEqual(['-c', '3', '8.8.8.8'], args)
def test_ping_with_different_packet_size(self): subject = PingSensor() with TemporaryDirectory() as d: with ChangeToDir(d): # when: a single ping with a custom packet size should be sent subject.execute('1234', '8.8.8.8', {'method': PingSensor.SINGLE_PING_ID, 'size': 300}, {'executable': RECORD_ARGS}) # then: the correct arguments are passed on the command line args = arguments(d) self.assertEqual(['-c', '1', '-s', '300', '8.8.8.8'], args)
def eval_input(self, s): namespace = {} exec PREEXEC in {}, namespace def plot(f=None, **kwargs): """Plot functions. Not the same as SymPy's plot. This plot function is specific to Gamma. It has the following syntax:: plot([x^2, x^3, ...]) or:: plot(y=x,y1=x^2,r=sin(theta),r1=cos(theta)) ``plot`` accepts either a list of single-variable expressions to plot or keyword arguments indicating expressions to plot. If keyword arguments are used, the plot will be polar if the keyword argument starts with ``r`` and will be an xy graph otherwise. Note that Gamma will cut off plot values above and below a certain value, and that it will **not** warn the user if so. """ pass namespace.update({ 'plot': plot, # prevent textplot from printing stuff 'help': lambda f: f }) evaluator = Eval(namespace) # change to True to spare the user from exceptions: if not len(s): return None transformations = [] transformations.append(synonyms) transformations.extend(standard_transformations) transformations.extend((convert_xor, custom_implicit_transformation)) parsed = stringify_expr(s, {}, namespace, transformations) try: evaluated = eval_expr(parsed, {}, namespace) except SyntaxError: raise except Exception as e: raise ValueError(str(e)) input_repr = repr(evaluated) namespace['input_evaluated'] = evaluated return parsed, arguments(parsed, evaluator), evaluator, evaluated
def main(): args = arguments() browser = args.browser if browser.lower() == 'c' or browser.lower() == 'chrome': driver = driver_chrome() elif browser.lower() == 'f' or browser.lower() == 'firefox': driver = driver_firefox() print('\n-- RECLAME AQUI SCRAPER --') file = args.file id_page = args.id pages = args.pages conn, cursor = db_conn() coletor = url_collector(driver, file, id_page, pages, conn, cursor) scraper(driver, coletor, id_page, conn, cursor) driver.quit()
def eval_input(self, s): namespace = {} exec PREEXEC in {}, namespace evaluator = Eval(namespace) # change to True to spare the user from exceptions: if not len(s): return None transformations = [] transformations.append(synonyms) transformations.extend(standard_transformations) transformations.extend((convert_xor, custom_implicit_transformation)) local_dict = { 'plot': lambda *args: None # prevent textplot from printing stuff } global_dict = {} exec 'from sympy import *' in global_dict parsed = stringify_expr(s, local_dict, global_dict, transformations) evaluated = eval_expr(parsed, local_dict, global_dict) input_repr = repr(evaluated) namespace['input_evaluated'] = evaluated return parsed, arguments(parsed, evaluator), evaluator, evaluated
# print('\n'*5) adam.zero_grad() loss.backward() for p in self.q.parameters(): p.grad.data.clamp_(-1., 1.) adam.step() self.update_counter += 1 if self.update_counter % self.target_net_update_freq == 0: self.update_counter = 0 self.target.load_state_dict(self.q.state_dict()) args = utils.arguments() bins = 2 env = BranchingTensorEnv(args.env, bins, 500_000, 500_000) config = AgentConfig() memory = ExperienceReplayMemory(config.memory_size) agent = BranchingDQN(env.env.observation_space(), env.env.action_space(), bins, config) adam = optim.Adam(agent.q.parameters(), lr=config.lr) s = env.reset() ep_reward = 0. recap = [] p_bar = tqdm(total=config.max_frames)
#!/usr/bin/env python import argparse from utils import Output, is_valid_user, arguments from classes import Model parser = argparse.ArgumentParser(description='Performs a data modeling pipeline', usage='python model.py user password [OPTIONS]') input_username, input_password, db_name, db_host = arguments(parser) output = Output() def model_data(): model = Model(input_username, input_password, db_name, db_host) # Create course content similarity DataFrame output.write('Create a course content similarity DataFrame') output.warning('This process can take a long time') output.start_spinner('Creating a course content similarity DataFrame') try: model.create_course_content_similarity_df() output.spinner_success() except Exception as err: output.spinner_fail(str(err)) exit(1) # Save courses content similarities to database
booking.rating = _host['rating'] booking.reviews = _host['evaluations'] session.add(booking) session.commit() logging.info(host) except: logging.error(_host) logging.error("Exception occurred", exc_info=True) except: logging.error("Exception occurred", exc_info=True) if __name__ == "__main__": try: args = arguments() checkin = args['checkin'] checkout = args['checkout'] max_price = args['maxprice'] min_price = args['minprice'] num_pages = args['pages'] path = args['path'] host = args['host'] username = args['username'] password = args['password'] db = "hosts.db" log_file = 'webscraping.log' # logging.basicConfig(level=logging.WARNING, filename=os.path.join(path, log_file) , format='%(asctime)s :: %(message)s', datefmt='%Y-%m-%d %H:%M:%S', filemode='w+') logging.basicConfig(level=logging.WARNING)