def relay(semaphore: mp.Semaphore, queue: mp.Queue, output_lock: mp.Lock,
          bmsg: bytes, addr: tuple, relay_dict, recv_time: datetime):
    semaphore.acquire()
    bmsg = bytearray(bmsg)
    header = DNSHeader(bmsg[:12])
    header.aa = 1
    bmsg = header.bmsg + bmsg[12:]
    assert header.qdcount == 1
    question = DNSQuestion(bmsg, offset=12)
    with output_lock:
        cprint(f'[{recv_time}][recv query {bytes_to_int(bmsg[:2])}]: {bmsg} from {addr}', fore='green', style='reverse')
        cprint_header(header, fore='green')
        cprint_question(question, fore='green')
    if question.qname in relay_dict:
        if relay_dict[question.qname] == '0.0.0.0':
            header.rcode = 3
            answer = header.bmsg + bmsg[12:]
            mode = 'intercept  '
        elif question.qtype == 1:
            answer = fake_bmsg(bmsg, relay_dict[question.qname])
            mode = 'local resolve '
        else:
            answer = forward(bmsg)
            if answer is None:
                return
            mode = 'relay msg  '
    else:
        answer = forward(bmsg)
        mode = 'relay msg  '
    queue.put((answer, addr, recv_time, mode))
    semaphore.release()
def receiver(queue, socket_lock, output_lock, sock):
    config_path = osp.join(osp.dirname(__file__), 'etc', 'config')
    last_read_config_time = -1
    semaphore = mp.Semaphore(7)
    while True:
        with socket_lock:
            if osp.getmtime(config_path) > last_read_config_time:
                last_read_config_time = osp.getmtime(config_path)
                config_file = open(config_path)
                relay_dict = {}
                for line in config_file:
                    if len(line) == 1 and line[0] == '\n':
                        continue
                    addr, name = line.strip('\n').split(' ')
                    relay_dict[name] = addr
                print(relay_dict)
                config_file.close()
            sock.settimeout(0.1)
            try:
                bmsg, addr = sock.recvfrom(1024)
                mp.Process(target=relay, args=(semaphore, queue, output_lock,
                                               bmsg, addr, relay_dict, datetime.now())).start()
            except socket.timeout:
                ...
            except ConnectionResetError:
                cprint("ConnectionResetError", fore='red')
                ...
            except Exception:
                ...
Пример #3
0
 def bind(self):
     try:
         self.s.bind(self.addr)
         print("INFO: Server %s:%s will run on - IP : %s, PORT : %s."%(cprint(self.name,'yellow',False),\
                 cprint(self.role,'sky',False),cprint(self.addr[0],'Green',False),cprint(self.addr[1],'Green',False)))
     except Exception as e:
         print("Bind Error.\n", e)
         sys.exit()
Пример #4
0
    def __init__(self):

        # Credentials are now being stored in the ~/.aws folder where boto knows to look for them
        # still need to find a better way to set them without having them in the repo
        self.client = boto3.client('s3',
                                   config=Config(signature_version='s3v4'))

        self.BUCKET = BUCKET
        cprint('Using bucket: %s' % self.BUCKET, 'info')
Пример #5
0
    def create_mem(self, name, size):
        if os.path.exists(self.mem_dir + name):
            utils.cprint("mem_handlers", "create_mem",
                         "memory file " + name + " exists. Will replace!")
            os.remove(self.mem_dir + name)

        buf_fd = open(self.mem_dir + name, 'w+b')
        buf_fd.seek(size - 1)
        buf_fd.write(b'\0')
        buf_fd.close()
Пример #6
0
def print_actions (to_process, ACTION):
    print()

    cprint(('<White>Here are the modifications that will be done</White> '
            '(action will be <Magenta>{}</Magenta>):').format(ACTION),
           end='\n\n')

    for i in range(len(to_process)):
        print(os.path.basename(to_process[i].source))
        cprint('<Magenta>--></Magenta> <White>{}</White>'.format(to_process[i].new_name))

    if len(to_process) == 0:
        print('Nothing to do :-(')
Пример #7
0
Файл: yasR.py Проект: Niols/yasR
def print_title ():
    """
    Prints the title.
    """

    cprint('''<White>################################################################################
##                                           </White><Magenta>___</Magenta><White>                              ##
##                              </White><Magenta>_  _ __ _ __| _ \\</Magenta><White>                             ##
##                             </White><Magenta>| || / _` (_-<   /</Magenta><White>                             ##
##                              </White><Magenta>\_, \__,_/__/_|_\\</Magenta><White>                             ##
##                       yet an-</White><Magenta>|__/</Magenta><White>-other series renamer                     ##
##                                                                 v1.0-alpha ##
################################################################################
</White>''')
 def render(self, mode='human'):
     if mode is not 'human':
         print(self.pos)
         return
     for x in range(self.shape[0]):
         for y in range(self.shape[1]):
             end = " " if y < self.shape[0] - 1 else ""
             bg_color = self.altitude_colors[self.altitudes[x][y]]
             color = "white" if bg_color == "black" else "black"
             if self.pos == (x, y):
                 cprint(" P ", "red", bg_color)
             else:
                 cprint(f" {self.altitudes[x][y]} ", color, bg_color)
         print()
Пример #9
0
    def store(self, filename_keyword):
        """Store collected data in (hard) disk:
        Dump data to \data\proxy\%filename_keyword%_%time%.pkl
        Save data as txt to \data\proxy\%filename_keyword%_%time%.pxy
        """
        if not os.path.exists('data'):
            os.mkdir('data')
        if not os.path.exists('data\proxy'):
            os.mkdir('data\proxy')

        tag = time.strftime('%Y%m%d%H%M%S')
        
        filename_pkl = '%s_%s.pkl' % (filename_keyword, tag)
        try:
            output_pkl = open('data\proxy\%s' % filename_pkl, 'wb')
        except IOError as errinfo:
            cprint(self, 'Proxy data dump failed: %s' % errinfo)
            return
        with output_pkl:
            cPickle.dump(self.proxyinfo, output_pkl , cPickle.HIGHEST_PROTOCOL)
            cprint(self, 'Proxy data save to data\proxy\%s' % filename_pkl)

        filename_pxy = '%s_%s.pxy' % (filename_keyword, tag)
        try:
            output_pxy = open('data\proxy\%s' % filename_pxy, 'w')
        except IOError as errinfo:
            cprint(self, 'Proxy data save failed: %s' % errinfo)
            return
        with output_pxy:
            for i in range(len(self.proxyinfo.values())):
                output_pxy.write("%s:%s\n" %
                                 (self.proxyinfo.values()[i]['ip'],
                                  self.proxyinfo.values()[i]['port']))
            cprint(self, 'Proxy data save to data\proxy\%s' % filename_pxy)
Пример #10
0
    def visualize(self, step, ax=None):
        scores = np.array([each.fitness_score() for each in self.population])
        best_individual = self.population[scores.argmin()]
        best_fit = best_individual.fitness_score(0)
        feasibility = best_individual.total_violation()

        if ax is None and len(plt.get_fignums()) == 0:
            fig, (ax0, ax1) = plt.subplots(1, 2)
        else:
            ax0, ax1 = plt.gcf().get_axes()

        best_individual.visualize(ax=ax0, title=f'best fit ({best_fit:6.2f})')
        utils.cprint(
            f'step [y]{step}[w], best score: [y]{best_individual.fitness_score(0):.5} ({feasibility})[w] mean score: [y]{scores.mean():.5}. [w]Penalty multiplier is[y] {float(self.penalty_multiplier):.3}'
        )  # noqa
        plt.pause(0.05)
def backsender(queue: mp.Queue, socket_lock: mp.Lock, output_lock: mp.Lock, sock: socket.socket):
    while True:
        with socket_lock:
            sock.settimeout(5)
            for answer_count in range(queue.qsize()):
                if queue.qsize() <= 0:
                    break
                answer, addr, recv_time, mode = queue.get()
                if answer is None:
                     continue
                with output_lock:
                    cprint(f'[{datetime.now()}][{mode}{bytes_to_int(answer[:2])}]: {answer}', fore='cyan', style='reverse')
                    answer = parse_msg(answer, fore='cyan')
                sock.sendto(answer, addr)
                send_time = datetime.now()
                time_cost = send_time - recv_time
                with output_lock:
                    cprint(f'[{send_time}][time cost  {bytes_to_int(answer[:2])}]: {time_cost}', fore='blue', style='reverse')
Пример #12
0
    def find_target_portfolio(self, exchange):
        '''
        populates `self.target_portfolio` of weights that the agent will
        try to reach.
        '''
        self.target_portfolio = {}

        for sector, g in itertools.groupby(exchange.portfolio.get_stocks(),
                                           lambda x: x.company.sector):
            g = np.array(list(g))  # make a array, so it can be reused

            pe_values = [tu.get_historical_pe(stock,
                                              orderbook=exchange.orderbooks[stock],
                                              periods=self.theta,
                                              decay=1) for stock in g]
            pe_values = np.array(pe_values)

            mean_pe = np.mean(pe_values)
            long_indices = np.argwhere(pe_values < mean_pe).flatten()

            weights = np.random.uniform(0, np.maximum(0, pe_values - mean_pe))
            # weights = np.random.uniform(size=long_indices.size)
            # print(pe_values)
            import utils
            utils.cprint(f'mean: {mean_pe:.2f} ' + ' '.join(['{}{:.2f}'.format('[g]' if e < mean_pe else '[r]', e) for e in pe_values]))

            short_stocks = np.array(g)[~long_indices]
            long_stocks = np.array(g)[long_indices]  # get those we will go long
            for stock, weight in zip(long_stocks, weights):
                self.target_portfolio[stock] = weight

            for stock in short_stocks:
                self.target_portfolio[stock] = 0.0
            # import ipdb; ipdb.set_trace()

        V = sum(self.target_portfolio.values())
        if V == 0: return  # in this special case, all are above mean.
        self.target_portfolio = {k: v / V for (k, v) in self.target_portfolio.items()}
        X = np.array(list(self.target_portfolio.values()))
Пример #13
0
def start(player_turn=1):
    state = State()
    screen = Screen()
    agent = Agent()
    agent_turn = 3 - player_turn
    while not state.winner():
        screen.render(state)
        if state.turn == agent_turn:
            choice = agent.choice(state, agent_turn)
            try:
                state.move(choice)
            except ValueError:
                cprint('Invalid position, AI is broken!', RED)
                input()
                continue
        elif state.turn == player_turn:
            inp = input()
            if inp == '':
                continue
            inp = int(inp)
            if inp < 1 or inp > 9:
                cprint('Invalid position', RED)
                input()
                continue
            pos = map_move[inp]
            try:
                state.move(pos)
            except ValueError:
                cprint('Invalid position', RED)
                input()
                continue

    screen.render(state)
    winner = state.winner()

    if winner == -1:
        cprint(f'Tie!\n', YELLOW)
    else:
        cprint(f'Player {winner} wins!\n', GREEN)
Пример #14
0
 async def paywalls(self, ctx, *args):
     if args[0] == "add":
         # Add new domains to list of paywalled domains
         # Format: `!paywalls add DOMAIN_1 DOMAIN_2 ... DOMAIN_n` will add DOMAIN_1 thru DOMAIN_n to list
         #     of paywalled sites and respond with a confirmation message.
         self.paywalled_sites = list(set(self.paywalled_sites).union(set(args[1:])))
         with open("paywalled", "w") as file:
             sites = "\n".join(self.paywalled_sites)
             file.write(sites)
             await ctx.send(
                 "**Added the following domains:**" + "\n" + cprint(args[1:])
             )
     elif args[0] == "list":
         # List all paywalled sites currently tracking
         # Format: `!paywalls list` will list all paywalled sites
         await ctx.send(
             "**Paywalled sites:**" + "\n" + cprint(sorted(self.paywalled_sites))
         )
     elif args[0] == "delete":
         # Delete domains to list of paywalled domains
         # Format: `!paywalls delete DOMAIN_1 DOMAIN_2 ... DOMAIN_n` will add DOMAIN_1 thru DOMAIN_n to list
         #     of paywalled sites and respond with a confirmation message.
         self.paywalled_sites = list(
             set(self.paywalled_sites).difference(set(args[1:]))
         )
         with open("paywalled", "w") as file:
             sites = "\n".join(self.paywalled_sites)
             file.write(sites)
             await ctx.send(
                 "**Deleted the following domains:**" + "\n" + cprint(args[1:])
             )
     elif args[0] == "link":
         # Manually link to archive.is
         # Format: `!paywalls link URL` will link to archive.is/URL
         if url_validator([args[1]]):
             await ctx.send(f"https://www.archive.is/{args[1]}")
         else:
             await ctx.send(f"**{args[1]}** is an invalid url.")
Пример #15
0
 def __init__(
     self,
     *,
     density_fn: Callable,
     num_samples: int,
     init_weights: List[float] = None,
 ):
     self.density_fn = density_fn
     if init_weights is None:
         cprint("Initializing samples weights...")
         self.weights = [density_fn(1, 0.5) for _ in range(num_samples)]
     else:
         cprint("Using obtained weights...")
         self.weights = init_weights
     if len(self.weights) < num_samples:
         diff = num_samples - len(self.weights)
         cprint(f"Extending weights by {diff} samples...")
         self.weights += [density_fn(1, 0.5) for _ in range(diff)]
     if len(self.weights) > num_samples:
         diff = len(self.weights) - num_samples
         cprint(f"Shrinking weights by {diff} samples...")
         self.weights = self.weights[:num_samples]
     cprint("Samples weights initialized!")
Пример #16
0
    def choice(self, state: State, turn: int):
        # Minimax decision
        choices = state.choices()
        random.shuffle(choices)

        best_move, best_value = 0, -2
        percent = 0

        for i, choice in enumerate(choices):
            new_state = state.clone()
            new_state.move(choice)

            child_value = self.min_value(new_state, turn)
            percent += 100 / len(choices)
            cprint(f'Thinking... {percent}%\n', CYAN)
            if child_value > best_value:
                best_value = child_value
                best_move = choice

        if best_value > 1000:
            cprint('mmmm... I\'m about to win :)\n', YELLOW)
            input()
        return best_move
Пример #17
0
Файл: yasR.py Проект: Niols/yasR
    def check_action (self):
        available_actions = [a[:-6] for a in dir(action) if re.match('[^_].*Action', a)]

        print()
        cprint('Actions available: ', 'white', attrs=['bold'])
        for act in available_actions:
            print(' - ' + act)
        print()

        if 'action' in PARAMS:
            ACTION = PARAMS['action']
            log.ok('Found action in parameters: %s.' % ACTION)
        else:
            log.info('No action found in parameters. Asking user.')
            ACTION = cinput('Enter action')

        while ACTION not in available_actions:
            log.warn('Action not available: %s.' % ACTION)
            ACTION = cinput('Enter action')
        log.ok('Action available.')

        PARAMS['action'] = ACTION
        save_params()
Пример #18
0
def frPath(start_thread,goal_thread,T,s_t = None):    
    global matlab
    matlab = Wormhole()      

    # generate a sequence of fantasy states Sf
    # minimize energy to get Sr
    # calculate jacobians
    # solve linear prob for changes in u
    
    #s_start = calcFeats(start_thread.getXYZ().flatten())
    s_start = start_thread.getConstraints()
    #s_goal = calcFeats(goal_thread.getXYZ().flatten())    
    s_goal = goal_thread.getConstraints()
    
    #s_t = ndinterp(linspace(0,1,T),np.r_[s_start,s_goal],[0,1])
    if s_t is None: s_t = ndinterp(np.linspace(0,1,T),[0,1],np.array([s_start,s_goal]))

    #import mayavi.mlab as mlab
    for i in xrange(10):
        
        #fwd_states1,_ = calcStatesJacs(start_thread,s_t)
        #fwd_states2 = calcStates(start_thread,s_t)
        #print np.linalg.norm(fwd_states1-fwd_states2)

        #rev_states1,_ = calcStatesJacs(goal_thread,s_t[::-1])
        #rev_states2 = calcStates(goal_thread,s_t[::-1])
        #print np.linalg.norm(rev_states1-rev_states2)
        
        
        print "forwards"
        fwd_states,fwd_jacs = calcStatesJacs(start_thread,s_t)
        print "backwards"
        rev_states,rev_jacs = [li[::-1] for li in calcStatesJacs(goal_thread,s_t[::-1])]
        #fwd_jacs = [jacobian(state) for state in fwd_states]
        #rev_jacs = [jacobian(state) for state in rev_states]
        #mlab.clf()
        #for s in fwd_states:
            #x,y,z = s.reshape(3,-1)
            #mlab.plot3d(x,y,z,tube_radius=.1,color=(1,0,0))   
            #mlab.points3d(x[[0,-1]],y[[0,-1]],z[[0,-1]],.5*np.ones(2),scale_factor=1,color=(1,1,1))
        #mlab.show()
        
        diff_states = fwd_states - rev_states
        #print "diff", evalDiff(start_thread,goal_thread,s_t)                
        
        cprint("states diff: %.2f"%np.linalg.norm(diff_states),"blue")
        cprint("jac diff: %.2f"%np.linalg.norm((fwd_jacs - rev_jacs).flatten()),"blue")
        cprint("end error: %.2ef"%np.linalg.norm(diff_states[-1]),"blue")
        #ds_t = frLinear(diff_states,fwd_jacs,rev_jacs)
        ds_t = frLinear2(diff_states,fwd_jacs - rev_jacs,s_t)
        step_sizes = np.array([-.5,-.1,.01,.1,.5,1])
        dists = [evalDiff(start_thread,goal_thread,s_t+step*ds_t) for step in step_sizes]
        print "dists",dists
        best_step = step_sizes[np.argmin(dists)]
        s_t += best_step*ds_t
        print "best step",best_step
        if i in [0,9]: fb(fwd_states.reshape(T,3,-1),goal_thread.getXYZ()).start()
Пример #19
0
    def crawl(self, save = True):
        """Crawl necessary info from nntime.com
        Regular expression pattern and decrypt encrypted-port code
        maybe change depend on url source

        - if param save set to False, collected info will not be saved in memory

        Here's a simply explanation for decrypt encrypted-port:
            Ports at nntime are encrypted as fake random-string,
            as it random - String is really random
            as it fake - same number at per page is same random string
            So we can find regular pattern at those string
            Key:
                1.Per ip:port slot has specific string, and that string has
                port value at bottom.
                2.Each single number at port itself is encrypted as '+alpha'
                3.So that we can get len of encrypted port and capture the
                real port by Key.1
        """
        ProxySpider.crawl(self)
        p_port1 = re.compile(r'value="([0-9.]{0,36})" onclick', re.DOTALL)
        p_port2 = re.compile(r'\(":"([a-zA-Z+]{0,8})\)', re.DOTALL)
        p_ip = re.compile(r'</td><td>([0-9.]{0,16})<script', re.DOTALL)
        for link in self.links:
            self.view(link)
            #get decrypt port
            encrypt_ports1 = [
                port1.group(1) for port1 in re.finditer(p_port1, self.cont)]
            encrypt_ports2 = [
                port2.group(1) for port2 in re.finditer(p_port2, self.cont)]
            ports = []
            for i in range(len(encrypt_ports1)):
                if len(encrypt_ports2) != len(encrypt_ports1):
                    cprint(self, 'Port crawled odd,')
                    cprint(self, 'may occur some issues')
                    pause()
                ports.append(encrypt_ports1[i][-len(encrypt_ports2[i])/2:])
            #get ip    
            ips = [ip.group(1) for ip in re.finditer(p_ip, self.cont)]
            #exception tick
            if len(ips) != len(ports):
                cprint(self, 'Port&IP collected amount is different,')
                cprint(self, 'may occur some issues')
                pause()

            if not save is False:
                self.save_collected(ips, ports)
Пример #20
0
def list_all_styles(all_models, base_url, years):
    '''
    - all_styles {make: models}
        - models [model1, model2, ...]
            - model {name: 'name', year: styles}
                - styles [style1, style2, ...]
                    - style {'name', 'id', 'attr1', 'attr2', ...}
    '''
    all_styles = {}

    for make in all_models:
        models = []

        for m in all_models[make]:
            model_name = m.get('name')

            model = {}
            model['name'] = model_name
            for year in years:
                url = '{}{}/{}/{}/styles/?intent=buy-used'.format(base_url, make, model_name, year)
                try:
                    page = urllib.urlopen(url)
                except:
                    cprint("Failed to open url: {}".format(url), 'err')
                    continue

                soup = BeautifulSoup(page, 'lxml')
                if page_not_found(soup):
                    cprint("Model {} {} {} does not exist".format(year, make, model_name), 'r')
                    continue

                cprint("Retrive style for {} {} {}".format(year, make, model_name), 'g')
                styles = []

                style_containers = soup.find_all('div', {'class': 'vehicle-styles-container'})
                for sc in style_containers:
                    style = {}

                    for t in sc.contents:
                        if div_class_vehicle_style_head(t):
                            link = t.a['href']
                            style['url'] = link
                            style['name'] = t.div.text.strip(' \r\n\t')
                            style['id'] = parse_qs(urlparse(link).query)['vehicleid'][0]
                        if div_class_vehicle_style_body(t):
                            for tr in t.table.find_all('tr'):
                                tds = tr.find_all('td')
                                style[tds[0].text] = tds[1].text

                    if not style.get('name') is None:
                        styles.append(style)
                model[year] = styles
            models.append(model)
        all_styles[make] = models

    return all_styles
Пример #21
0
def save_files(files, basedir, verbose=True):
    saved = 0
    for f in files:
        filename = basedir + "/" + f['name']
        mode = f.get('mode', '644')

        try:
            if os.path.exists(filename):
                os.chmod(filename, 0o644)

            verbose and cprint(LGREEN, "W %s" % filename)
            save_file(filename, f['content'], mode)
            saved += 1

        except IOError as e:
            cprint(LRED, e)
            cprint(LRED, "Failed saving file: '%s'" % f['name'])

        except OSError as e:
            cprint(YELLOW, e)
            cprint(LRED, "Failed setting file mode: '%s'" % f['name'])

    return saved
Пример #22
0
def read_specification(filename=None, verbose=False):
    # deal with a custom specification file name
    if filename:
        _assert(os.path.exists(filename), "File %s not found" % filename)
        cprint(LCYAN, "Reading specification file: %s" % filename)
        return JsonFile(filename, array2map="tests")

    # deal with default specification file names
    tstyaml_exists = os.path.exists('tst.yaml')
    tstjson_exists = os.path.exists('tst.json')
    if verbose and tstyaml_exists and tstjson_exists:
        cprint(YELLOW, "Found both tst.yaml and tst.json: using tst.yaml")

    if tstyaml_exists:
        try:
            specification = JsonFile('tst.yaml', array2map="tests")
        except CorruptedJsonFile:
            _assert(False, "Corrupted specification file")
        return specification

    elif tstjson_exists:
        try:
            specification = JsonFile('tst.json', array2map="tests")
        except CorruptedJsonFile:
            _assert(False, "Corrupted specification file")
        return specification

    # neither tst.yaml, nor tst.json exist
    candidates = glob.glob("*.yaml")
    if len(candidates) == 0:
        candidates = glob.glob("*.json")

    if len(candidates) == 1:
        cprint(YELLOW, "Using %s as specification file" % candidates[0])
        try:
            specification = JsonFile(candidates[0], array2map="tests")
        except:
            _assert(False, "Invalid specification file")
        return specification

    cprint(LRED, "No tst tests found")
    sys.exit(1)
Пример #23
0
def save_assignment(activity, dir_name, etag, url, repo):

    # move into directory
    os.chdir(dir_name)

    # save the original activity data
    dirname = './.tst' 
    if not os.path.exists(dirname):
        os.makedirs(dirname)

    with codecs.open('./.tst/activity.json', mode='w', encoding='utf-8') as f:
        f.write(data2json({
            "url": url,
            "name": activity.get('name'),
            "activity": activity,
            "etag": etag,
            "repo": repo,
            "updated_at": dt.datetime.utcnow().isoformat().split(".").pop(0) + "Z"
        }))

    # save activity files
    files = activity['files']
    for file in files:
        if os.path.exists(file['name']):
            contents = open(file['name']).read().decode('utf-8')
            if contents != file['data']:
                cprint(LRED, "skipping modified file: '%s' (use --overwrite)" % file['name'])
            else:
                cprint(RESET, "skipping unmodified file: '%s'" % file['name'])
            continue

        try:
            with codecs.open(file['name'], mode='w', encoding='utf-8') as f:
                f.write(file['data'])
            cprint(LCYAN, "Adding file '%s'" % file['name'])
        except:
            print("tst: fatal: Can't save file '%s'" % file['name'], file=sys.stderr)
            sys.exit(1)
Пример #24
0
def solve_tsp(X,
              start_index=None,
              circular_indices=False,
              run_2_opt=False,
              plot=False,
              verbose=False):
    '''
    X: np.array of shape (N, 2) where each row corresponds to a xy-coordinate.
    start_index: Boolean, if True then start with `start_index` in the return value
    run_2_opt: Boolean, run additional tweaking to tsp. Takes some more time

    plot: Boolean, if True then shows a plot of the TSP
    verbose: Boolean, if True then adds a tqdm progress bar. Nice for large problems

    returns: np.array of shape (N,) with unique elements, each being an index to
    one of the rows.

    Example:

     X = array([[85,  7],
               [19, 69],
               [99, 50],
               [ 4,  2],
               [66,  6],
               [42, 86],
               [70, 82],
               [76, 67],
               [57,  7],
               [55, 39]])

    returns array([8, 4, 1, 2, 3, 6, 7, 9, 8]) (for example)
    '''
    '''
    pt. 1: Insertion heuristic (convex hull)
    '''
    if X.shape[0] <= 3:
        # utils.cprint('[y]Warning: cannot solve TSP; too few points. Aborting')
        return np.hstack((np.arange(X.shape[0]), 0))
        # return np.arange(X.shape[0])

    try:
        hull = ConvexHull(X)
    except:
        utils.cprint('[y]Warning: cannot solve TSP; too few points. Aborting')
        return np.arange(X.shape[0])

    D = utils.all_euclidean_dist(X)

    all_vertices = np.arange(X.shape[0])
    unvisited = np.setdiff1d(all_vertices, hull.vertices)
    vertices = np.hstack((hull.vertices, hull.vertices[0]))

    def cheapest_insertion(i, seq):
        '''
        returns (cost, j, k)
        where cost, int -- the cost of the insertion (positive is bad)
              j, int -- the index of the element that will go to j
              k, int -- the index of the element that i will go to

        (so we end up with something like this: ... -> j -> i -> k -> ...
        '''
        L = []
        for j, k in zip(seq, seq[1:]):
            old_edge = utils.euclidean_dist(X[[j, k]])
            new_edge = D[j, i] + D[i, k]
            cost = -old_edge + new_edge
            L.append((cost, j, k))
        return min(L, key=lambda x: x[0])

    if verbose:
        pbar = tqdm(total=len(unvisited))
    while len(unvisited) > 0:
        if verbose:
            pbar.update(1)
        data = []
        for i in unvisited:
            distances = D[i, np.setdiff1d(all_vertices, unvisited)]
            min_distance = distances.min()
            idx = D[i, :] == min_distance

            cost, j, k = cheapest_insertion(i, vertices)
            data.append((cost, j, i, k))

        cost, j, i, k = min(data, key=lambda x: x[0])

        idx = np.argwhere(vertices == j)[0][0]
        vertices = np.hstack((vertices[:idx + 1], i, vertices[idx + 1:]))

        unvisited = utils.delete_by_value(unvisited, i)

    if verbose:
        pbar.close()
    '''
    pt. 2  -- tour improvement
    '''

    if plot:
        if not plt.fignum_exists(1):
            fig, (ax1, ax2) = plt.subplots(1, 2)
            ax1.plot(X[:, 0], X[:, 1], '.')
        else:
            ax1, ax2 = plt.gcf().get_axes()
            ax1.cla()
            ax2.cla()

        ax1.plot(X[vertices, 0], X[vertices, 1], 'r--', alpha=0.3, label='pt1')
        ax1.plot(X[vertices[0], 0], X[vertices[0], 1], 'ro')
        ax1.set_title('at step 1')

    if run_2_opt:

        def two_opt(i, k, seq):
            new_seq = seq[:i + 1], seq[k:i:-1], seq[k + 1:]
            return np.hstack(new_seq)

        best_cost = D[vertices[:-1], vertices[1:]].sum()
        for i, j in itertools.combinations(np.arange(1, len(vertices) - 1), 2):
            # iterate for every combinations, except start and ending point

            new_path = two_opt(i, k, np.array(vertices, copy=True))
            cost = D[new_path[:-1], new_path[1:]].sum()
            if cost < best_cost:
                if verbose:
                    print('hey found shorter')
                vertices = np.array(new_path, copy=True)
                best_cost = cost

    if plot:
        ax1.plot(X[:, 0], X[:, 1], '.')
        ax1.plot(X[vertices, 0], X[vertices, 1], 'g--', alpha=0.3, label='pt2')
        ax1.plot(X[vertices[0], 0], X[vertices[0], 1], 'ro')

    # get rid of the 'end' of the path, because we know it's the same as the first
    n_points = X.shape[0]
    # import ipdb; ipdb.set_trace()
    # vertices = utils.delete_by_value(vertices, n_points-1)

    if start_index is not None:
        if start_index == vertices[0]:
            pass
        else:
            '''
            example:
                vertices =     [6, 7, 0, 5, 2, 4, 8, 3, 9, 1, 6]
                start_index = 3
                --> vertices = [3, 9, 1, 6, 7, 0, 5, 2, 4, 8, 3]
            '''
            idx = np.argwhere(vertices == start_index)[0][0]
            vertices = np.concatenate((vertices[idx:-1], vertices[:idx + 1]))
            pass

    if circular_indices:
        return np.hstack((vertices, vertices[0]))
    else:
        return vertices
Пример #25
0
    daemon_msg = "Not Running."
    out, err = utils.check_pidfile(DAEMON_PIDFILE)
    if not err:
        daemon_msg = "Running. PID(s): %s" % str(out)

    # logging
    import logging
    log = logging.getLogger(__name__)
    out_hdlr = logging.StreamHandler(sys.stdout)
    out_hdlr.setFormatter(logging.Formatter('%(asctime)s %(message)s'))
    out_hdlr.setLevel(logging.INFO)
    log.addHandler(out_hdlr)
    log.setLevel(logging.INFO)

    print("\n".join([
        r"##############################################################",
        r"#                                         _              _   #",
        r"#   _ __ ___   ___  __ _  __ _   ___  ___| |__   ___  __| |  #",
        r"#  | '_ ` _ \ / _ \/ _` |/ _` | / __|/ __| '_ \ / _ \/ _` |  #",
        r"#  | | | | | |  __/ (_| | (_| | \__ \ (__| | | |  __/ (_| |  #",
        r"#  |_| |_| |_|\___|\__, |\__,_| |___/\___|_| |_|\___|\__,_|  #",
        r"#                  |___/                                     #",
        r"#------------------------------------------------------------#",
    ]))
    print("#", utils.cprint("MegaTools: ", megatools_status()), megatools_msg)
    print("#", utils.cprint("Scheduler: ", daemon_status()), daemon_msg)
    print("##############################################################\n")

    # do cli stuff
    cli()
Пример #26
0
 def __init__(self):
     utils.cprint("mem_handlers", "__init__", "inputs=self:" + str(self))
     self.mem_dir = 'memories/'
Пример #27
0
    parser.add_argument("--length", type=int)
    parser.add_argument("--on_cuda",
                        type=bool,
                        default=False,
                        const=True,
                        nargs="?")
    parser.add_argument("--gripper", type=str)
    parser.add_argument("--aux_gripper", type=str)
    parser.add_argument("--forces", type=str)
    parser.add_argument("--thrower", type=str)
    parser.add_argument("--workdir_path", type=str)
    parser.add_argument("--output_path", type=str)
    args = parser.parse_args()
    if args.forces is not None:
        args.forces = json.loads(args.forces)
    cprint(args)

    os.makedirs(args.output_path, exist_ok=False)

    gripper = get_gripper_by_name(args.gripper)
    thrower = get_thrower_by_name(args.thrower)
    gripper.load(os.path.join(args.workdir_path, "gripper"))
    thrower.load(os.path.join(args.workdir_path, "thrower"))
    image_transformer = load_obj(
        os.path.join(args.workdir_path, "image_transformer"))

    aux_grippers: List[AuxGripperModule] = []
    if not args.forces:
        aux_gripper = get_aux_gripper_by_name(args.aux_gripper)
        aux_gripper.load(os.path.join(args.workdir_path, "aux-gripper"))
        aux_grippers.append(aux_gripper)
Пример #28
0
 def _na(self, desc_rows=10):
     cprint("\n1. 统计缺失率...")
     self.s_na = self.df.isnull().sum()[lambda x: x > 0].sort_values(0, False) \
                 / self.df.__len__() * 100
     print(self.s_na.head(desc_rows))
Пример #29
0
    def request(self, method, path, headers={}, payload=None, exit_on_fail=False):
        curl_command = [
            'curl',
            '-q', # don't use ~/.curlrc (must be first arg)
            '-X', method.upper(), # http verb
            '-v', # be verbose: print report to stderr
            '-s', # don't print progress meter
            '-L'  # follow redirects
        ]

        headers['TST-CLI-Release'] = self.config.get('release', 'unknown')
        if 'Authorization' not in headers:
            headers['Authorization'] = 'Bearer %s' % self.token
        for hname, hvalue in headers.items():
            curl_command.append('-H')
            curl_command.append('%s: %s' % (hname, hvalue))

        url = self.config['url'] + path
        curl_command.append(url)
        if payload is not None:
            curl_command.append('-d')
            data = "%s" % json.dumps(payload)
            curl_command.append(data)

        signal.alarm(20000) # timeout in seconds
        process = Popen(curl_command, stdout=PIPE, stderr=PIPE) 
        try:
            stdout, stderr = map(to_unicode, process.communicate())
            signal.alarm(0) # reset alarm for future use...
            process.wait()
        except: # timeout!!!
            process.terminate()
            raise

        # raw data
        response = self.Response()
        response.stderr = stderr
        response.stdout = stdout
        response.exit_status = process.returncode

        # curl messages
        lines = [l[2:] for l in stderr.splitlines() if l and l[0] == '*']
        response.curl_messages = "\n".join(lines)

        # request headers
        lines = [l[2:] for l in stderr.splitlines() if l and l[0] == '>']
        response.request_headers = "\n".join(lines)

        # response headers
        lines = [l[2:] for l in stderr.splitlines() if l and l[0] == '<']
        response.headers = "\n".join(lines)

        if not response.headers:
            if exit_on_fail:
                msg = "tst: can't connect to server"
                _assert(False, msg)
                
            raise ConnectionFail("can't connect to tst online")

        # body
        response_lines = response.headers.splitlines()
        response.status_code = None
        for i in xrange(len(response_lines)-1, -1, -1):
            if response_lines[i].startswith("HTTP"):
                status_line = response_lines[i]
                response.status_code = int(status_line.split()[1])
                break
            
        # exit_on_fail
        if exit_on_fail and not (200 <= response.status_code < 300):
            msg = 'Request to server failed'
            try:
                data = json.loads(response.stdout)
                if 'messages' in data and type(data['messages'] == list):
                    msg += "\nServer message: " + str(data['messages'][0])
            except:
                data = {}
                msg += ('\n' + "Couldn't parse server response")

            cprint(LRED, msg)
            if 'messages' in data and data['messages'][0] == 'invalid token':
                print("---")
                print("Use `tst login` to log in to the server")

            sys.exit(1)
        
        response.body = stdout if response.status_code else None
        
        return response
Пример #30
0
 def test(self, ss):
     cprint(ss, 'red')
Пример #31
0
def retrieve_prices_by_compare(all_styles, compare_url, years, out_dir):
    '''
    - all_styles {make: models}
        - models [model1, model2, ...]
            - model {name: 'name', year: styles}
                - styles [style1, style2, ...]
                    - style {'name', 'id', 'attr1', 'attr2', ...}
                             (this function adds 'price' attr to it)
    '''

    if not os.path.exists(out_dir):
        os.makedirs(out_dir)

    for make in all_styles:
        models = all_styles[make]
        for model in models:
            model_name = model.get('name')
            if model_name is None:
                cprint("Empty model name in list for make {}".format(make),
                       'err')
                continue

            for year in years:
                styles = model.get(year)
                if styles is None:
                    continue
                for style in styles:
                    vehicle_id = style.get('id')
                    if vehicle_id is None:
                        if style.get('name') is None:
                            cprint(
                                "Empty vehicle id for {} {} {} UNKNOWN TYPE".
                                format(year, make, model_name), 'r')
                        else:
                            cprint(
                                "Empty vehicle id for {} {} {} style {}".
                                format(year, make, model_name,
                                       style['name']), 'r')
                        continue

                    url = '{}{}-{}-{}-{}/'.format(compare_url, year, make,
                                                  model_name, vehicle_id)
                    try:
                        page = urllib.urlopen(url)
                    except:
                        cprint("Failed to open url: {}".format(url), 'err')
                        continue

                    soup = BeautifulSoup(page, 'lxml')
                    if page_not_found(soup):
                        cprint(
                            "Compare page for {} {} {} {} does not exist".
                            format(year, make, model_name, vehicle_id), 'r')
                        continue

                    for td in soup.find_all('td', {'class': ''}):
                        spans = td.find_all('span')
                        if len(spans) == 2 and spans[
                                0].text == 'KBB Suggested Retail':
                            style['price'] = spans[1].text
                            cprint(
                                "Suggested price {} for {} {} {} style {}".
                                format(style['price'], year, make, model_name,
                                       style['name']), 'g')
        cprint("Saving data for make {}".format(make), 'hi')
        out_file = out_dir + make + '.json'
        dump_to_json(all_styles[make], out_file)
Пример #32
0
        )
        throw_loss = run_offline(
            sampler=construct_sampler(sampling_name="shuf",
                                      dataset=tra_dataset,
                                      batch_size=2,
                                      num_workers=0),
            gripper=heur_gripper,
            aux_gripper=aux_gripper,
            thrower=thrower,
            train=True,
            lr=(0.1 if epoch_idx < 100 else 0.001),
        )
        cprint(
            "epoch_idx=",
            epoch_idx,
            "| grip_loss=",
            grip_loss,
            "| throw_loss=",
            throw_loss,
        )

    gripper.save("/tmp/grip-and-throw/example_gripper")
    thrower.save("/tmp/grip-and-throw/example_thrower")

    gripper.load("/tmp/grip-and-throw/example_gripper")
    thrower.load("/tmp/grip-and-throw/example_thrower")

    eval_dataset = generate_data(
        gripper=gripper,
        aux_gripper=aux_gripper,
        thrower=thrower,
        image_transformer=image_transformer,
Пример #33
0
async def on_message(message: Message):

    global paywalled_sites  # include list of paywalled site inside this function
    global last_check_in

    #rate limiter
    if message:
        try:
            #TODO: Put some random fuzz on the checkin timedelta
            #TODO: Lower the checkin time delta based on the subsequent frequency
            if not last_check_in or last_check_in < (message.created_at -
                                                     timedelta(seconds=60)):
                #grab the non-bot members
                memb_ls = [
                    m async for m in message.guild.fetch_members(limit=None)
                    if not m.bot
                ]
                #grab the last ten minutes of messages, up to 600 messages
                last_check_in = message.created_at
                ten_min_ago = message.created_at - timedelta(seconds=600)
                messages = await message.channel.history(
                    limit=600, after=ten_min_ago).flatten()
                #get the history of message authors who aren't bots
                human_authors_history = [
                    m.author for m in messages if m.author in memb_ls
                ]
                #get the unique authors
                human_author_set = set(human_authors_history)
                #if two users are talking
                prefix = None
                if len(human_author_set) == 2:
                    prefix = f"{list(human_author_set)[0].mention} and {list(human_author_set)[1].mention} are "
                #if one user is talking to themself
                elif len(human_author_set) == 1:
                    prefix = f"{list(human_author_set)[0].mention} is "
                if prefix:
                    if len(messages) > 100:
                        await message.channel.send(prefix +
                                                   "going at it. Wow!")
                    if len(messages) > 200:
                        await message.channel.send(
                            prefix + "getting into some serious behavior.")
                    if len(messages) > 300:
                        await message.channel.send(prefix +
                                                   "setting a record!")
                    if len(messages) > 400:
                        await message.channel.send(prefix +
                                                   "very serious about this!")
                    if len(messages) > 500:
                        await message.channel.send(prefix +
                                                   ", shut up. Please.")
        except:
            pass

    if message.content.startswith('!paywall'):
        # Manually link to archive.is
        # Format: `!paywall URL` will link to archive.is/URL
        words = message.content.split(" ")
        await message.channel.send(f"https://www.archive.is/{words[1]}")

    if and_includes(message.content, 'thank', 'soros'):
        # Responds to Sal when he says 'Thanks Soros'
        # (note: not antisemitic joke, used to mock the antisemitic globalist Soros stories)
        await message.channel.send(
            'No problemo buckaroo, anything for a fellow reptile.')

    if and_includes(message.content, 'who', 'horrible'):
        # You know what this does
        await message.channel.send(f"Why, {message.author.mention} of course!")

    if or_includes(message.content, 'socialis', 'communis'):
        # You know what this does
        await message.channel.send(f"AJ is the real commie here!")

    if or_includes(message.content, 'shane', 'metricity',
                   'the best') and (message.author != client.user):
        await message.channel.send(f"Shane really is the best.")

    if or_includes(message.content, "suck",
                   "sux") and (message.author != client.user):
        # ya know what this does too
        await message.channel.send("You know what else sucks? Salex Bexman.")

    if url_validator(message.content):
        # Checks if message is a valid URL and a paywalled domain.  If it is, returns the archive.is link.
        raw_url = message.content
        url = tldextract.extract(message.content)
        if url.domain in paywalled_sites:
            await message.channel.send(f"https://www.archive.is/{raw_url}")

    if message.content.startswith('!add'):
        # Add new domains to list of paywalled domains
        # Format: `!add DOMAIN_1 DOMAIN_2 ... DOMAIN_n` will add DOMAIN_1 thru DOMAIN_n to list
        #     of paywalled sites and respond with a confirmation message.
        new_paywalls = message.content.split(" ")[1:]
        paywalled_sites += new_paywalls
        paywalled_sites = list(set(paywalled_sites))
        paywalled_sites = [i for i in paywalled_sites if i != ""]
        with open('paywalled', 'w') as file:
            sites = "\n".join(paywalled_sites)
            file.write(sites)
            await message.channel.send('**Added the following domains:**' +
                                       "\n" + cprint(new_paywalls))

    if message.content.startswith('!delete'):
        # Delete domains to list of paywalled domains
        # Format: `!add DOMAIN_1 DOMAIN_2 ... DOMAIN_n` will add DOMAIN_1 thru DOMAIN_n to list
        #     of paywalled sites and respond with a confirmation message.
        new_paywalls = message.content.split(" ")[1:]
        paywalled_sites = [i for i in paywalled_sites if i not in new_paywalls]
        with open('paywalled', 'w') as file:
            sites = "\n".join(paywalled_sites)
            file.write(sites)
            await message.channel.send('**Deleted the following domains:**' +
                                       "\n" + cprint(new_paywalls))

    if message.content.startswith("!list paywalls"):
        # Displays list of all sites on the current paywall list
        await message.channel.send("**Paywalled sites:**" + "\n" +
                                   cprint(sorted(paywalled_sites)))

    if message.content.startswith("!test"):
        await message.channel.send(
            "Stop spamming the f*****g chat with your damn tests u chode.")

    if message.content.startswith("!gif"):
        async with message.channel.typing(
        ):  #makes the channel say the bot is typing
            scope = 1
            melee = False
            num_gifs = 1
            parsed = message.content.split(" ")
            if parsed[1] == 'melee':
                melee = True
                stripped = [strip(word) for word in parsed[2:]]
            else:
                stripped = [strip(word) for word in parsed[1:]]
            search = "+".join(stripped)
            try:
                scope_str = parsed[0][4:]
                scope = int(scope_str)
                if melee:
                    num_gifs = scope
            except:
                pass
            choice = random.randint(1, scope)
            response = requests.get(
                f"https://api.giphy.com/v1/gifs/search?q={search}&api_key=WiLstLIo2SInusTmGDDkhhY0tU6xKNEl&limit={num_gifs}&offset={choice}"
            )
            if response.status_code != 200:
                await message.channel.send("U stupid bruh, bad request.")
            else:
                gifs = response.json()['data']
                gif_urls = [gif['url'] for gif in gifs]
                for url in gif_urls:
                    await message.channel.send(url)

    if message.content.startswith("!calc"):
        async with message.channel.typing(
        ):  #makes the channel say the bot is typing
            terms = " ".join(message.content.split(" ")[1:])
            await message.channel.send(eval(terms))
Пример #34
0
    def get_directory(self, key):
        s = requests.session()
        s = CacheControl(s, cache=FileCache(os.path.expanduser('~/.tst/cache')))

        url = "%s/%s/tst.yaml" % (self.url, key)
        headers = {}
        tokens = JsonFile(os.path.expanduser('~/.tst/tokens.json'))
        token = tokens.get(self.name)
        if token:
            headers['Authorization'] = 'Bearer %s' % token

        try:
            response = s.get(url, headers=headers, allow_redirects=True)
        except requests.ConnectionError:
            _assert(False, "Connection failed... check your internet connection (1)")

        # TODO: Split method in two. The first part performs a fetch,
        #       while the second part (below) processes the response and
        #       possibly fetches further files. We could have a simple
        #       fetch method/funcion and a more high level get_directory
        #       that uses such method. In fact, the get() method above can
        #       also be improved with such a refactoring.

        # process response
        if not response.ok:
            self.last_error = response.status_code
            self.last_response = response
            return None

        response.encoding = 'utf-8'
        try:
            import yaml
            resource = yaml.load(response.text, Loader=yaml.FullLoader)
            resource['_response'] = response

        except Exception as e:
            cprint(YELLOW, "Failed parsing yaml: %s" % url)
            cprint(YELLOW, e.message)
            raise e

        # gather files
        files = resource.get('files') or []
        files.append({
            "name": "tst.yaml",
            "content": response.text,
            "mode": "ro"
        })

        ## add text file if required
        if 'text' in resource and is_single_line_string(resource['text']):
            files.append({
                "name": resource['text'],
                "content": '%s/%s/%s' % (self.url, key, resource['text']),
                "mode": "ro"
            })

        ## add included files
        files_filenames = [f['name'] for f in files]
        for fspec in resource['include']:
            filename, category, mode = parse_file_spec(fspec)
            if filename not in files_filenames:
                files.append({
                    'name': filename,
                    'content': '%s/%s/%s' % (self.url, key, filename),
                    'mode': mode
                })
            else:
                entry = next(e for e in files if e['name'] == filename)
                entry['mode'] = mode

        ## fetch missing files
        for f in files:
            if f['content'].startswith('http://') or f['content'].startswith('https://'):
                f['content'] = fetch_file('%s/%s/%s' % (self.url, key, f['name']), encoding='utf-8')

        return {
            'kind': 'activity',
            'files': files,
        }
Пример #35
0
    def crawl(self, save = True):
        """Crawl necessary info from spysru.com
        Regular expression pattern and decrypt encrypted-port code
        maybe change depend on url source

        - if param save set to False, collected info will not be saved in memory
        
        Here's a simply explanation for decrypt encrypted-port:
            Ports at spysru are encrypted as fake random-string,
            as it random - String is really random
            as it fake - same number at per page is same random string
            So we can find regular pattern at those string
            eg. 8080 has same number with position 1&3, 2&4
        """
        ProxySpider.crawl(self)
        p_port = re.compile(r'font>"\+(.*?)\)</script>', re.DOTALL)
        p_ip = re.compile((r'class=spy14>([0-9.]{0,20})<script'), re.DOTALL)
        
        for link in self.links:
            self.view(link)
            
            x_ports = re.finditer(p_port, self.cont)
            encrypt_ports = [x_port.group(1) for x_port in x_ports]
            #decrypt number
            cout_3128 = 0
            realnum = {3128: None, 8080: None, 80: None, 8909: None}
            for encrypt_port in encrypt_ports:
                num = encrypt_port.split('+')
                num_cout = encrypt_port.count('+') + 1
                
                if num_cout == 4:
                    #Detect 8080
                    if num[0] == num[2] and num[1] == num[3] and \
                       num[0] != num[1]:
                        realnum[num[0]], realnum[8] = 8, num[0]
                        realnum[num[1]], realnum[0] = 0, num[1]
                    #Detect 8909
                    elif num[1] == num[3] and num[0] != num[1] and \
                         num[0] != num[2] and num[1] != num[2]:
                        realnum[num[0]], realnum[8] = 8, num[0]
                        realnum[num[1]], realnum[9] = 9, num[1]
                        realnum[num[2]], realnum[0] = 0, num[2]
                    #Detect 3128
                    elif num[0] != num[1] and num[0] != num[2] and \
                         num[0] != num[3] and num[1] != num[2] and \
                         num[1] != num[3] and num[2] != num[3]:
                        if realnum[3128] is None:
                            realnum[3128] = num
                        else:
                            if num == realnum[3128]:
                                cout_3128 += 1
                        #Recognize as 3128 if True
                        if cout_3128 >= 3:
                            realnum[num[0]], realnum[3] = 3, num[0]
                            realnum[num[1]], realnum[1] = 1, num[1]
                            realnum[num[2]], realnum[2] = 2, num[2]
                            realnum[num[3]], realnum[8] = 8, num[3]
                elif num_cout == 2:
                    #Detect as 80 - only detect as 80 after 8080 found
                    if realnum.has_key(8) is True and realnum[8] == num[0] and \
                       realnum.has_key(0) is True and realnum[0] == num[1]:
                        realnum[num[0]], realnum[8] = 8, num[0]
                        realnum[num[1]], realnum[0] = 0, num[1]
            #decrypt port
            try:
                for i in range(len(encrypt_ports)):
                    encrypt_ports[i] = encrypt_ports[i].replace('+', '')
                    for n in range(0, 10):
                        if realnum.has_key(n):
                            encrypt_ports[i] = encrypt_ports[i].replace(
                                realnum[n], str(n))
            except KeyError:
                cprint(self, 'Cannot detect some number, may bugged.')
                pause()
            ports = encrypt_ports

            ips = [x_ip.group(1) for x_ip in re.finditer(p_ip, self.cont)]

            if not save is False:
                self.save_collected(ips, ports)
Пример #36
0
    def request(self,
                method,
                path,
                headers={},
                payload=None,
                exit_on_fail=False):
        curl_command = [
            'curl',
            '-q',  # don't use ~/.curlrc (must be first arg)
            '-X',
            method.upper(),  # http verb
            '-v',  # be verbose: print report to stderr
            '-s',  # don't print progress meter
            '-L'  # follow redirects
        ]

        headers['TST-CLI-Release'] = self.config.get('release', 'unknown')
        if 'Authorization' not in headers:
            headers['Authorization'] = 'Bearer %s' % self.token
        for hname, hvalue in headers.items():
            curl_command.append('-H')
            curl_command.append('%s: %s' % (hname, hvalue))

        url = self.config['url'] + path
        curl_command.append(url)
        if payload is not None:
            curl_command.append('-d')
            data = "%s" % json.dumps(payload)
            curl_command.append(data)

        signal.alarm(20000)  # timeout in seconds
        process = Popen(curl_command, stdout=PIPE, stderr=PIPE)
        try:
            stdout, stderr = map(to_unicode, process.communicate())
            signal.alarm(0)  # reset alarm for future use...
            process.wait()
        except:  # timeout!!!
            process.terminate()
            raise

        # raw data
        response = self.Response()
        response.stderr = stderr
        response.stdout = stdout
        response.exit_status = process.returncode

        # curl messages
        lines = [l[2:] for l in stderr.splitlines() if l and l[0] == '*']
        response.curl_messages = "\n".join(lines)

        # request headers
        lines = [l[2:] for l in stderr.splitlines() if l and l[0] == '>']
        response.request_headers = "\n".join(lines)

        # response headers
        lines = [l[2:] for l in stderr.splitlines() if l and l[0] == '<']
        response.headers = "\n".join(lines)

        if not response.headers:
            if exit_on_fail:
                msg = "tst: can't connect to server"
                _assert(False, msg)

            raise ConnectionFail("can't connect to tst online")

        # body
        response_lines = response.headers.splitlines()
        response.status_code = None
        for i in range(len(response_lines) - 1, -1, -1):
            if response_lines[i].startswith("HTTP"):
                status_line = response_lines[i]
                response.status_code = int(status_line.split()[1])
                break

        # exit_on_fail
        if exit_on_fail and not (200 <= response.status_code < 300):
            msg = 'Request to server failed'
            try:
                data = json.loads(response.stdout)
                if 'messages' in data and type(data['messages'] == list):
                    msg += "\nServer message: " + str(data['messages'][0])
            except:
                data = {}
                msg += ('\n' + "Couldn't parse server response")

            cprint(LRED, msg)
            if 'messages' in data and data['messages'][0] == 'invalid token':
                print("---")
                print("Use `tst login` to log in to the server")

            sys.exit(1)

        response.body = stdout if response.status_code else None

        return response
Пример #37
0
                continue
            pos = map_move[inp]
            try:
                state.move(pos)
            except ValueError:
                cprint('Invalid position', RED)
                input()
                continue

    screen.render(state)
    winner = state.winner()

    if winner == -1:
        cprint(f'Tie!\n', YELLOW)
    else:
        cprint(f'Player {winner} wins!\n', GREEN)


if __name__ == '__main__':
    parser = ArgumentParser()
    parser.add_argument('--turn',
                        default='1',
                        type=int,
                        help='Your turn (1 or 2)')
    args = parser.parse_args()

    if args.turn not in [1, 2]:
        cprint('Invalid turn\n', RED)
    else:
        start(player_turn=int(args.turn))
Пример #38
0
 def _unique(self, desc_rows=10):
     cprint("\n2. 统计类别数...")
     self.s_unique = self.df.nunique(dropna=False)[lambda x: x < 1024].sort_values()
     print(self.s_unique.head(desc_rows))
Пример #39
0
 def view(self, link):
     """Get link content
     """
     self.link = link
     cprint(self, 'Start viewing at %r' % link)
     self.cont = openurlex(link).read()
Пример #40
0
 def __init__(self):
     self.cont = ''
     self.link = ''
     cprint(self, 'Web spider initial:%r' % self)
Пример #41
0
def generate_data(
    *,
    gripper: "GripperModule",
    aux_gripper: Union["AuxGripperModule", None] = None,
    thrower: "ThrowerModule",
    image_transformer: "ImageTransformer",
    seeds: List[int],
    length: int,
    on_cuda: bool,
    workdir_path: str,
    forces: Union[None, List[float]] = None,
) -> DataPoints:
    """
        Generuje zbiór przykładów o zadanych parametrach:
        (seeds, length) z użyciem zadanych modułów.
        Parametr on_cuda kontroluje użycie akceleracji
        przy pomocy karty graficznej.
        Zwracany jest obiekt typu Dataset 
        zawierający wygenerowane przykłady.

        Jeśli podano parametr forces, to jest on
        użyty jako lista sił chwytu do przetestowania.
    """

    if (forces is None
            and aux_gripper is None) or (forces is not None
                                         and aux_gripper is not None):
        raise Exception(
            "Pass exactly one of the parameters: aux_gripper, forces")

    cprint("Generating data...")

    gripper.save(os.path.join(workdir_path, "gripper"))
    if aux_gripper is not None:
        aux_gripper.save(os.path.join(workdir_path, "aux-gripper"))
    thrower.save(os.path.join(workdir_path, "thrower"))
    save_obj(os.path.join(workdir_path, "image_transformer"),
             image_transformer)

    # Dla każdego ziarna tworzymy nowy proces Python'owy.
    # Nie jest to optymalne rozwiązanie, ale za to
    # omijamy problem z tworzeniem procesów
    # potomnych po zainicjalizowaniu internalowych
    # struktur PyTorcha.
    for seed_idx, seed in enumerate(seeds):
        src_dir = os.path.dirname(__file__)
        os.system(f"""
cd {src_dir} ; 
python3 ./data.py \
    --seed {seed} \
    --length {length} \
    {"--on_cuda" if on_cuda else ""} \
    --gripper {gripper.name()} \
    {"--aux_gripper " + aux_gripper.name() if aux_gripper is not None else ""} \
    {"--forces '" + json.dumps(forces) + "'" if forces is not None else ""} \
    --thrower {thrower.name()} \
    --workdir_path {workdir_path} \
    --output_path {os.path.join(workdir_path, str(seed_idx))} &
        """)

    while True:
        time.sleep(5)
        if all([
                os.path.isfile(
                    os.path.join(workdir_path, str(seed_idx), "metadata"))
                for seed_idx in range(len(seeds))
        ]):
            break

    metadata = {
        "dataset_type": "composed",
        "part_dirpaths": [str(seed_idx) for seed_idx in range(len(seeds))],
        "seeds": seeds,
        "length": length,
        "thrower": thrower.name(),
        "gripper": gripper.name(),
        "image_transformer": image_transformer.name(),
    }
    if forces:
        metadata["forces"] = forces
    else:
        metadata["aux_gripper"] = aux_gripper.name()

    with open(os.path.join(workdir_path, "metadata"), "w") as f:
        json.dump(metadata, f)

    cprint("Data generated!")

    return construct_dataset(workdir_path)
Пример #42
0
 def crawl(self):
     """Collect specific info from viewing
     different spider has different crawl way
     """
     cprint(self, 'Start crawling at %r' % self.link)
Пример #43
0
 def __call__(self, *args, **kwargs):
     utils.cprint("Error: obsolete function '{}' was called!\n".format(self.func.__name__), "y")
     raise utils.ObsoleteError("'{}' is obsolete!".format(self.func.__name__))
Пример #44
0
    def render(self, state: State):
        clear_screen()
        m = state.matrix

        dmap = lambda c: 'X' if c == 1 else 'O' if c == 2 else ' '

        for i in [0, 3, 6]:
            line = [dmap(m[i]), dmap(m[i + 1]), dmap(m[i + 2])]
            cprint(' | '.join(line) + '\n')
            if i < 6:
                cprint('--+---+--\n')

        print('\n\n')
        cprint('moves:\n\n', BLACK)

        cprint(' 7 | 8 | 9\n', BLACK)
        cprint('---|---|---\n', BLACK)
        cprint(' 4 | 5 | 6\n', BLACK)
        cprint('---|---|---\n', BLACK)
        cprint(' 1 | 2 | 3\n\n', BLACK)
Пример #45
0
 def __call__(self, *args, **kwargs):
     utils.cprint("Warning: incomplete function '{}' was called.".format(self.func.__name__), "y")
     print()  # needed because cprint doesn't print newline char
     return self.func(*args, **kwargs)