Esempio n. 1
0
    def init_data(self):
        print('Data initialization...')
        data = self.load_data()
        for d in data:
            req = Request('PUT', d)
            rdm_sockets = random.sample(list(self._sockets.keys()),
                                        self._options.k)

            for i in rdm_sockets:
                cur_socket = self._sockets[i]

                # Send request's OPTIONS
                cur_socket.send(req.get_options())
                cur_socket.recv(100)

                # Send request
                cur_socket.send(req.get_request())

                # Receive response's OPTIONS
                res_options = pickle.loads(cur_socket.recv(100))
                # Send confirmation
                confirm = Response()
                cur_socket.send(confirm.get_response())

                # Receive response
                res = pickle.loads(cur_socket.recv(res_options['res_size']))
Esempio n. 2
0
 def retrieve_data(self):
     try:
         request = Request()
         result = request.get(SERVICE_ENDPOINT)
         if result:
             return json.loads(result)
     except ConnectionError as e:
         msg = str(e)
         logging.error(msg)
def add_uptime_information_entry():
    data = {'source': 'lasvegas'}

    try:
        request = Request()
        result = request.post(AWS_ENDPOINT, data)
        print(result)
    except ConnectionError as e:
        msg = str(e)
Esempio n. 4
0
    def test_pass(self, params_init):
        """
        用例描述:登陆正向用例
        """
        self.request = Request()
        self.params = params_init.get_params_list('login')
        self.test_assert = Assertions()

        response = self.request.post_request(self.params['Login'][0]['url'],\
                                             self.params['Login'][0]['data'], \
                                             self.params['Login'][0]['header'])

        assert self.test_assert.assert_code(response['code'], 200)
        assert self.test_assert.assert_body(response['body'], 'msg', 'OK')
Esempio n. 5
0
    def _is_root_key_duplicated(self, payload):
        root_key, temp_result = payload.split(':', 1)[0].strip('"'), {}
        req = Request('GET', root_key)
        for ip in self._sockets.keys():
            self._stream(req, ip, temp_result)

        # If there is a record with a given key, it will be removed
        if 200 in temp_result:
            if not self._is_all_servers_available():
                raise Exception(
                    '[WARNING] Cannot perform PUT operation in order to update a record. One or more servers are unavailable!'
                )
            else:
                req = Request('DELETE', root_key)
                for ip in self._sockets.keys():
                    self._stream(req, ip, {})
Esempio n. 6
0
 def getThreadsInTopic(self, topicUrl):
     '''
     Get all threads in a topic
     '''
     
     res = []
     #html = urllib.urlopen(topicUrl).read()
     html = Request.get_page_content(topicUrl)
     soup = BeautifulSoup(html)
     
     threads = soup.find('ol', {'id' : 'threads'})        
     if threads:                    
         for thread in threads.findAll('li', {'class' : 'threadbit '}):                            
             tLink = thread.find('a', {'class' : 'title'})  
             if tLink :                        
                 tUrl  = tLink['href']
                 if tUrl:
                     pos = tUrl.find('?s=') # loai bo chuoi ?s= .....
                     if pos != -1:
                         tUrl = tUrl[0:pos]                        
                     # them base URL    
                     tUrl = 'http://www.lamchame.com/forum/' + tUrl
                     
                     res.append(tUrl.encode('utf-8'))
                     #print tUrl 
                     #print '-----------'
     return res
Esempio n. 7
0
 def getThreadsInTopic(self, topicUrl):
     '''
     Lay tat ca cac thread goc co trong 1 topic
     '''
     res = []
     try:
         html = Request.get_page_content(topicUrl)
         soup = BeautifulSoup(html)
         ulThread = soup.find('ul', {'id' : 'threads'})
         #print ulThread
         if ulThread:
             threads = ulThread.findAll('li', {'class' : 'threadbit_nam_fix_select'})    
             if threads:                 
                 for thread in threads:
                     try:
                         #import pdb
                         #pdb.set_trace()
                         #print thread
                         tLink = thread.find('a', {'class' : 'title'})
                         if (tLink) :
                             tUrl = tLink['href']
                             res.append(tUrl)
                             #print tUrl
                             #print '--------------------'
                     except Exception,e:
                         print e.message;
                         tb = traceback.format_exc()
                         print tb
     except Exception, e:
         print e.message;
         tb = traceback.format_exc()
         print tb
         pass
Esempio n. 8
0
    def getThreadsInTopic(self, topicUrl):
        '''
        Get all threads in a topic
        '''

        res = []
        try:
            html = Request.get_page_content(topicUrl)
            soup = BeautifulSoup(html)
            
            threads = soup.find('ol', {'id' : 'threads'})        
            if threads:
                #find thread
                for thread in threads.findAll('h3', {'class' : 'threadtitle'}):
                    #print thread
                    tLink = thread.find('a', {'class' : 'title'})
                    if (tLink) :
                        tUrl  = tLink['href']
                        pos = tUrl.find('?s=')
                        if pos:
                            tUrl = tUrl[0:pos]
                        tUrl = 'http://www.otofun.net/forums/' + tUrl
                        if tUrl not in res:
                            res.append(tUrl)
        except :
            print 'Error when get threads in topic'
            pass
        return res               
Esempio n. 9
0
 def getAllTopics(self):
     '''
     Lay ds cac topic co trong forum
     '''
     res = []
     try:
         baseUrl = 'http://www.webtretho.com/forum/f'
         
         url = 'http://www.webtretho.com/forum/search.php?search_type=1&contenttype=vBForum_Post'
         html = Request.get_page_content(url)
         soup = BeautifulSoup(html)
         
         soupCates = soup.find('select', {'id' : 'forumchoice'})
         cates = soupCates.findAll('option')
         for cate in cates:
             topicNumber = cate['value']
             
             if topicNumber.isdigit():                
                 topicUrl = baseUrl + topicNumber + '/'
                 res.append(topicUrl)
     except Exception, e:
         print e.message;
         tb = traceback.format_exc()
         print tb
         pass
Esempio n. 10
0
 def getTotalPageInTopic(self, topicUrl):
     '''
         Lay tong so page trong 1 topic
     '''        
     total = 0
     try:
         html = Request.get_page_content(topicUrl)
         soup = BeautifulSoup(html)
         nav = soup.find('div', {'class' : 'threadpagenav'})
         if nav:
             lastPage = nav.find('span', {'class' : 'first_last'})
             if lastPage:
                 aLink = lastPage.find('a')
                 if aLink:
                     url = aLink['href']
                     pos1 = url.find('/page')
                     pos2 = url.find('?s=')
                     if pos1 != -1:
                         if pos2 != -1: # ton tai ?s=
                             page = url[pos1+5:pos2]
                         else :
                             page = url[pos1+5:]
                         total = int(page)
     except:
         print 'Error when get total page in topic'
         pass
     return total
Esempio n. 11
0
    def getThreadDetail(self, url, fileContent = '', page = 1):

        res = ()
        html = ''
        try:
            html = Request.get_page_content(url);
        except Exception, e:
            return None
Esempio n. 12
0
    def _heart_beat(self):
        req = Request('HEART_BEAT')

        ip_address = list(self._sockets.keys())
        for ip in ip_address:
            try:
                # Send request's OPTIONS
                self._sockets[ip].send(req.get_options())
                pickle.loads(self._sockets[ip].recv(100))

                # Send request
                self._sockets[ip].send(req.get_request())
                # Receive response
                pickle.loads(self._sockets[ip].recv(100))

            except socket.error as e:
                print(f'[Error] {e}')
            except EOFError as e:
                del self._sockets[ip]
def is_reachable():
    status = None
    success = False

    try:
        request = Request()
        status = retrieve_status(request)
        success = status['success']
    except ConnectionError as e:
        msg = str(e)

    return success
Esempio n. 14
0
    def getThreadDetail(self, url):
        res = {}
        
        try :
            html = Request.get_page_content(url)
            soup = BeautifulSoup(html)
            #print soup
            postContainer = soup.find('ol', {'id' : 'posts'})
            
            title = soup.find('span', {'class' : 'threadtitle'}).get_text().strip()
            
            posts = postContainer.findAll('li', {'class' : 'postbit postbitim postcontainer'})
            
            count = 0
            comments = []
            for post in posts:
                
                #print post
                count += 1 
                                   
                postContent = post.find('blockquote', {'class' : 'postcontent restore'})
                postContent = postContent.get_text()
                #postContent = re.sub('<br/>+', '_NEW_LINE_', postContent)
                postContent = re.sub('[\t]+', ' ', postContent)
                postContent = re.sub('[ ]+', ' ', postContent)
                postContent = re.sub('[\\r\\n]+', '\n', postContent)
                postContent = postContent.strip()            
                
                # date infomation
                dateInfo = post.find('span', {'class' : 'postdate'}).get_text().strip()
                dateInfo = re.sub('\s+', ' ', dateInfo)

                # user infomation
                userInfo = post.find('div', {'class' : 'username_container'}).find('strong').get_text().strip()
                
                info = {'user' : userInfo, 'date': dateInfo, 'content' : postContent}
                
                if count == 1: # post
                    postInfo = info;
                    postInfo['title'] = title
                    res['post']= postInfo
                else :  # comment
                    comments.append(info)
                    
            res['comments'] = comments
            
        except:
            #print 'ERROR when crawling URL : ' , url
            print 'ERROR when get thread detail'
            pass
        
        return res    
Esempio n. 15
0
 def getAllTopics(self):
     res = []
     
     baseUrl = 'http://www.lamchame.com/forum/forumdisplay.php/'
     
     searchUrl = 'http://www.lamchame.com/forum/search.php?search_type=1&contenttype=vBForum_Post'
     html = Request.get_page_content(searchUrl)
     soup = BeautifulSoup(html)
     
     soupCates = soup.find('select', {'id' : 'forumchoice'})
     cates = soupCates.findAll('option')
     for cate in cates:
         topicNumber = cate['value']            
         if topicNumber.isdigit():                
             title = cate.string.strip()
             topicUrl = baseUrl + topicNumber + '-' + title.replace(' ', '-')
             res.append(topicUrl.encode('utf-8'))
     return res
Esempio n. 16
0
class TestLogin():
    @allure.severity('登陆')
    @allure.story('登陆正向用例')
    @pytest.mark.skip('不执行登录用例')
    def test_pass(self, params_init):
        """
        用例描述:登陆正向用例
        """
        self.request = Request()
        self.params = params_init.get_params_list('login')
        self.test_assert = Assertions()

        response = self.request.post_request(self.params['Login'][0]['url'],\
                                             self.params['Login'][0]['data'], \
                                             self.params['Login'][0]['header'])

        assert self.test_assert.assert_code(response['code'], 200)
        assert self.test_assert.assert_body(response['body'], 'msg', 'OK')
Esempio n. 17
0
 def getTotalPageInTopic(self, topicUrl):
     '''
     Get total page in a topic
     '''
     total = 0
     try:
         html = Request.get_page_content(topicUrl)
         soup = BeautifulSoup(html)
         pageNav = soup.find('div', {'class' : 'threadpagenav'})
         if pageNav:
             lastPage = pageNav.find('span', {'class' : 'first_last1'})
             if lastPage:
                 #print lastPage.string
                 total = int(lastPage.string)
     except:
         print 'Error when get total page'
         pass            
     
     return total
Esempio n. 18
0
 def getAllTopics(self):
     res = []
     
     baseUrl  ='http://www.otofun.net/'
     url = 'http://www.otofun.net/forum.php'
     
     html = Request.get_page_content(url)
     soup = BeautifulSoup(html)
     links = soup.findAll('a')
     for link in links:
         if link['href'].startswith('forums/'):
             u = link['href']
             pos = u.find('?s=')
             if pos != -1:
                 u = u[0:pos]
             u = baseUrl + u
             
             res.append(u)
     return res  
Esempio n. 19
0
 def getAllTopics(self):
     res = []
     try:
         baseUrl  ='http://www.otofun.net/'
         url = 'http://www.otofun.net/forum.php'
         
         html = Request.get_page_content(url)
         soup = BeautifulSoup(html)
         links = soup.findAll('a')
         for link in links:
             if link['href'].startswith('forums/'):
                 u = link['href']
                 pos = u.find('?s=')
                 if pos != -1:
                     u = u[0:pos]
                 u = baseUrl + u                
                 res.append(u.encode('utf-8'))
     except:
         print 'Error when get all topic'
         pass
     return res  
Esempio n. 20
0
 def getTotalPageInThread(self, url):
     '''
         Lay tong so page co trong 1 thread
     '''
     total = 1
     html = Request.get_page_content(url);
     soup = BeautifulSoup(html)
     pageInfo = soup.find('div', {'class' : 'pageRefix'})
     if pageInfo:
         lastPage = pageInfo.find('a', {'class' : 'arrowLstPage'})
         if lastPage:
             #print lastPage
             link = lastPage['href']
             #print link
             pos = link.index('.html')
             
             if pos != False:
                 p = re.compile("index(\d+).html")
                 a =  p.search(link)
                 #print link[pos-1:]
                 total = a.group(1)
                 total = int(total)
                 #print total
     return total
Esempio n. 21
0
    def getThreadDetail(self, url):
        '''
            Lay thong tin chi tiet cua 1 thread
        '''
        #res = { 'post' : {'user' : '', 'title': '', 'date' : '', 'content' : ''}, 'comments' : [{'user' : '', 'date': '', 'content' : ''}] }
        res = {}
        try:
            html = Request.get_page_content(url);
            soup = BeautifulSoup(html)        
            soupPost = soup.find('ol', {'id' : 'posts'})
            
            title = soup.find('div', {'id' : 'widgetRefix'}).find('h1').get_text().strip()
            #print title
                
            # list posts
            posts = soupPost.findAll('li', {'class' : 'postbit postbitim postcontainer'})
            #print posts
            #print len(posts)
            #print posts.size()            
            count = 0
            comments = []
                        
            for post in posts:
                
                #print post
                count += 1 
                                   
                postContent = post.find('blockquote', {'class' : 'postcontent restore'})
                #postContent = postContent.renderContents()
                postContent = postContent.get_text()
                #print postContent
                #print '----------------------'
                postContent = re.sub('[\t]+', ' ', postContent)
                postContent = re.sub('[ ]+', ' ', postContent)
                postContent = re.sub('[\\r\\n]+', '\n', postContent)
                postContent = postContent.strip()
                #print soup2.get_text().strip()
                
                #print postContent
                
                # date infomation
                dateInfo = post.find('span', {'class' : 'postdate'}).get_text().strip()
                dateInfo = re.sub('\s+', ' ', dateInfo)
                #print dateInfo

                # user infomation
                userInfo = post.find('div', {'class' : 'username_container'}).get_text().strip()
                #print userInfo
                
                info = {'user' : userInfo, 'date': dateInfo, 'content' : postContent}
                
                if count == 1: # post
                    postInfo = info;
                    postInfo['title'] = title
                    res['post']= postInfo
                else :
                    comments.append(info)
                #print '--------------------------------'
                
            res['comments'] = comments
            #print res
            
        except Exception, e:
            print e.message;
            tb = traceback.format_exc()
            print tb
            pass
Esempio n. 22
0
def request_init():
    return Request()
Esempio n. 23
0

if len(sys.argv) < 2:
    print Colors.FAIL + "Please enter a hero name. Eg. 'Faceless Void'" + Colors.ENDC
else:
    hero_name = sys.argv[1]

    client = MongoClient('localhost', 27017)
    print Colors.OKGREEN + "Connected to MongoDB." + Colors.ENDC
    db = client.assignment_1

    heroes_coll = db.heroes
    benchmarks_coll = db.hero_benchmarks
    matchups_coll = db.hero_matchups

    # time.sleep(5) # hack for the mongoDb database to get running

    scrapper = Request()
    heroes = scrapper.getHeroes()

    get_heroes(db)

    selected_hero = get_selected_hero()
    if selected_hero != None:
        print Colors.OKGREEN + "\nDisplaying Hero Data for " + hero_name + ": " + Colors.ENDC
        print pd.DataFrame(selected_hero)
        selected_hero_id = selected_hero['id']

        get_hero_benchmark()
        get_hero_matchups()
Esempio n. 24
0
    def start_shell(self):
        while True:
            cmd = input('kvStore> ').strip()

            if not cmd:
                continue

            if cmd == '!DISCONNECT':
                sys.exit('kvBroker has been disconnected! Bye!')

            else:
                self._heart_beat()
                is_replication_valid = self._is_replication_valid()
                try:
                    req_type, payload = cmd.split(' ', 1)
                except ValueError as e:
                    print('[Error] Payload is missing!')
                    continue

                if req_type == 'PUT':
                    if self._is_replication_supported():
                        print(
                            '[WARNING] Not enough servers to support replication! Please restart the servers!'
                        )
                        continue

                    # Check if a record with the given root key already exists
                    try:
                        self._is_root_key_duplicated(payload)
                    except Exception as e:
                        print(e)
                        continue

                    # Create request
                    req = Request(req_type, payload)
                    # Select random sockets
                    rdm_sockets = random.sample(list(self._sockets.keys()),
                                                self._options.k)

                    # Send request to the selected servers
                    result = {}
                    for ip in rdm_sockets:
                        self._stream(req, ip, result)

                    self.print_result(payload, result)

                elif req_type == 'GET' or req_type == 'QUERY':
                    result = {}
                    req = Request(req_type, payload)

                    for ip in self._sockets.keys():
                        self._stream(req, ip, result)

                    if not is_replication_valid:
                        print(
                            f'[WARNING] Data may be inconsistent. {self._options.k} or more servers are unavailable!'
                        )

                    self.print_result(payload, result)

                elif req_type == 'DELETE':
                    if not self._is_all_servers_available():
                        print(
                            '[WARNING] Cannot perform delete operation. One or more servers are unavailable!'
                        )
                        continue

                    result = {}
                    req = Request(req_type, payload)
                    for ip in self._sockets.keys():
                        self._stream(req, ip, result)

                    self.print_result(payload, result)

                else:
                    print('[INFO] Invalid request type!')
                    print('[INFO] Supported types: PUT | GET | QUERY | DELETE')
Esempio n. 25
0
    def getThreadsInTopic(self, topicUrl, page = 1):
        '''
        Get all threads in a topic
        '''

        res = {}
        html = Request.get_page_content(topicUrl)
        soup = BeautifulSoup(html)
        
        threads = soup.find('ol', {'id' : 'threads'})        
        if threads:
            #find thread
            for thread in threads.findAll('h3', {'class' : 'threadtitle'}):
                #print thread
                tLink = thread.find('a', {'class' : 'title'})
                if (tLink) :
                    tUrl  = tLink['href']
                    pos = tUrl.find('?s=')
                    if pos:
                        tUrl = tUrl[0:pos]
                    tUrl = 'http://www.otofun.net/forums/' + tUrl
                    if tUrl not in self.listUrl:
                        self.listUrl.append(tUrl)
                        print 'Thread ',tUrl
                        print tLink.string
                        print '==========================='

                        query = u"INSERT INTO url SET url = '%s', domain = '%s', created_date = UNIX_TIMESTAMP(NOW()), modified_date = UNIX_TIMESTAMP(NOW()), done = 0, running = 0" %(tUrl, 'otofun.net')
                        self.dbCursor.execute(query)
                        self.connection.commit()

                        # if self.COUNT_TOPIC % 100 == 0:
                        #     import os;
                        #     if not os.path.exists(self.dataDir + str(self.COUNT_TOPIC) + '-' + str(self.COUNT_TOPIC + 100)):
                        #         os.makedirs(self.dataDir + str(self.COUNT_TOPIC) + '-' + str(self.COUNT_TOPIC + 100))
                        #     self.dataFolder = self.dataDir + str(self.COUNT_TOPIC) + '-' + str(self.COUNT_TOPIC + 100)
                        #
                        # self.getThreadDetail(tUrl)
                        # self.COUNT_TOPIC += 1
                        #
                        # print 'Topic number: ', self.COUNT_TOPIC
                        # print 'Post number: ', self.COUNT_POST
                    time.sleep(1)
                print '-----------'

        #find sub-topic
        topics = soup.find('ol', {'class' : 'subforumlist'})
        if topics:
            for topic in topics.findAll('li', {'class' : 'subforum'}):
                tLink = topic.find('a')
                if (tLink) :
                    tUrl  = tLink['href']
                    if tUrl not in self.listUrl:
                        self.listUrl.append(tUrl)
                        if 'http://' not in tUrl:
                            tUrl = self.baseUrl + tUrl
                        print 'Topic Level- ', page, tUrl
                        print tLink.string
                        self.getThreadsInTopic(tUrl)
                print '-----------'
        page = page + 1
        if '?' in topicUrl:
            topicUrl = topicUrl + '&page=' + str(page)
        else:
            topicUrl = topicUrl + '?page=' + str(page)
        self.getThreadsInTopic(tUrl, page)
        return res
Esempio n. 26
0
import asyncio
from config import TEAM_ID
from utils.Request import Request

request = Request()

class Api:
  @staticmethod
  async def getPlayer(nickname):
    return await request.get('/players?nickname=' + nickname + '&game=csgo')

  @staticmethod
  async def getPlayerStats(playerId):
    return await request.get('/players/' + playerId + '/stats/csgo')

  @staticmethod
  async def getTeamMembers():
    return await request.get('/teams/' + TEAM_ID)

  @staticmethod
  async def getPlayerMatch(playerId, rightBound):
    return await request.get('/players/' + playerId + '/history?game=csgo&from=' + str(rightBound) + '&offset=0&limit=1')

  @staticmethod
  async def getMatchInfo(gameId):
    return await request.get('/matches/' + gameId)

  @staticmethod
  async def getMatchStats(gameId):
    return await request.get('/matches/' + gameId + '/stats')