コード例 #1
0
 def load(self, loadUrl):
     o = urlparse(loadUrl)
     print o.scheme
     print o.port
     print o
     o = urlsplit(loadUrl)
     print o.geturl()
     reader = Fetch()
     htmlFile = reader.get(loadUrl)
     soup = BeautifulSoup(htmlFile)
     form = soup.find('form')
     print form
     print form['action']
     print form['method']
     #print form['onsubmit']
     if 'action' in form:
         print form['action']
     if 'method' in form:
         print form['method']
     if 'onsubmit' in form:
         print form['onsubmit']
     data = {"username": "******", "password": "******"}
     response = reader.post('https://210.154.183.61:4443/mantis/login.php',
                            data)
     print response
コード例 #2
0
    def create_socket():
        # Get info about our authentication
        this.id = 1234
        this.token = 1234
        parent_this = this
        parent_this.callbacks = 2

        def callback():
            parent_this.callbacks -= 1
            if parent_this.callbacks == 0:
                parent_this.socket = MessageSocket(lambda parent_this: parent_this.handle_message(this),
                                                   parent_this.token, parent_this.id)

        def data_callback(data):
            parent_this.id = data['id']
            callback()

        def auth_callback(data):
            parent_this.token = data['token']
            callback()

        Fetch.get("data/") \
            .then(lambda response: response.json()) \
            .then(lambda data: data_callback(data))
        Fetch.get("auth/") \
            .then(lambda response: response.json()) \
            .then(lambda data: auth_callback(data))
コード例 #3
0
 def __init__(self, auth:str, logger=logging.getLogger()):
     self.auth = auth
     self.logger = logger
     self.fetch = Fetch(auth, logger)
     self.user = self.fetch.fetch("/v1/me")
     self.userId = self.user["id"]
     self.imageUrl = self.user["images"][0]["url"]
     self.market = self.user["country"]
     logger.debug("Initialized Spotify: {0} @ {1}".format(self.userId, self.market))
     self.fetch.fetch("/v1/me/tracks", limit=1, market=self.market)
コード例 #4
0
    def __init__(self):

        self.config = {
            "max_result": 1000,
            "topic_score": 10.0,
            "threshold": 1.0,
            "base_url": "http://export.arxiv.org/api/query?",
        }
        self.config = Bunch(self.config)
        self.fetch = Fetch(self.config)
コード例 #5
0
    def subscribe_to_feed(self, feed_url):
        feed_id = db.get_feed_id(feed_url)

        if feed_id < 0:
            feed_fetch = Fetch(feed_url)
            feed_id = db.create_new_feed(feed_fetch.get_feed_details())
            db.add_new_item(feed_fetch.get_entries())

        user_feed_id = db.add_user_to_feed(self.uid, feed_id)

        return feed_id
コード例 #6
0
ファイル: user.py プロジェクト: argvk/Rsswala
    def subscribe_to_feed(self,feed_url):
        feed_id = db.get_feed_id(feed_url) 

        if feed_id < 0 :
            feed_fetch = Fetch(feed_url)
            feed_id = db.create_new_feed(feed_fetch.get_feed_details())
            db.add_new_item(feed_fetch.get_entries())

        user_feed_id = db.add_user_to_feed(self.uid,feed_id)

        return feed_id 
コード例 #7
0
ファイル: fetch_test.py プロジェクト: mew7wo/task_api
class FetchTestCase(unittest.TestCase):
    def setUp(self):
        self.fetch = Fetch(username='******', pw='liumengchao')

    def test_get(self):
        resp = self.fetch.get('http://www.douban.com/update/')
        print resp
        resp = self.fetch.get('http://www.douban.com/fdsafdsafdsa/')
        print resp
        resp = self.fetch.get('dajfdsoajfeowaof')
        print resp
コード例 #8
0
ファイル: camera.py プロジェクト: nickklein/life-automation
    def handle(self, deviceJobId):
        cameraFolder = os.listdir(configCamera['CAMERA_FROM_FOLDER'])
        # make sure it's not empty
        if cameraFolder:
            hashname = self.compressFiles()
            checksum = self.checksum(configCamera['CAMERA_TEMP_FOLDER'] +
                                     hashname + ".7z")
            self.uploadFiles(hashname, checksum)
            self.cleanFolders(cameraFolder, hashname)

        Fetch.patch(config['API_URL'] + "/api/device/" + str(deviceJobId) +
                    "/jobs/update?status=done")
コード例 #9
0
ファイル: execute.py プロジェクト: ddusek/everyday_sheets
def execute():
    """Run script every X minutes.
    """
    # Get google api credentials.
    my_creds = Creds()

    # Construct a service for interacting with sheets api.
    service = build('sheets', 'v4', credentials=my_creds.creds)

    print(CONSOLE_COLOR_START_DONE + f'{datetime.now()} running script' +
          CONSOLE_ENDC)
    main_start = timer()

    # Create new spreadsheet.
    start = timer()
    print('creating spreadsheet...')
    sheet = Spreadsheet(service, f'everyday_sheet{datetime.now()}')
    print(CONSOLE_COLOR +
          f'new sheet created in {round(timer() - start, 3)}s' + CONSOLE_ENDC)

    # Fetch data from all data sources defined in json file.
    fetcher = Fetch('reddit_inputs.json')
    start = timer()
    print('fetching data...')
    data = fetcher.fetch_all()
    print(CONSOLE_COLOR + f'data fetched in {round(timer() - start, 3)}s' +
          CONSOLE_ENDC)

    # Convert data into format needed for spreadsheet.
    sheet.convert_data(data)

    # Insert data into spreadsheet.
    start = timer()
    print('inserting data into spreadsheet...')
    sheet.insert_data()
    print(CONSOLE_COLOR +
          f'data inserted into a sheet in {round(timer() - start, 3)}s' +
          CONSOLE_ENDC)

    # Adjust columns size.
    sheet.set_col_size()

    print(CONSOLE_COLOR +
          f'elapsed time since start {round(timer() - main_start, 3)}s' +
          CONSOLE_ENDC)

    print('added ', datetime.now())
    print(CONSOLE_COLOR_START_DONE + f'{datetime.now()} script finished' +
          CONSOLE_ENDC)

    return f'https://docs.google.com/spreadsheets/d/{sheet.sheet_id}/edit#gid=0'
コード例 #10
0
ファイル: document.py プロジェクト: leonardleonard/spyder
    def fetchListPages(self, listtype="html"):
        print "Start to fetch and parse List"
	urls = self.listRule.getListUrls()
        for url in urls:
	    print "Fetching list page: ", url, "charset:", safestr(self.seed["charset"]), "timeout:", safestr(self.seed["timeout"])
            f = Fetch(url, charset = self.seed["charset"], timeout = self.seed["timeout"])
	    if f.isReady():
		doc = f.read()

		if listtype == "html":
		    self.parseListPage(f, doc, url)
		elif listtype == "json":
		    self.parseJsonPage(f, doc, url)

        print "List has finished parsing. It has %s docs." % ansicolor.red(self.__len__())
コード例 #11
0
class ClusterTopic:
    def __init__(self):

        self.config = {
            "max_result": 1000,
            "topic_score": 10.0,
            "threshold": 1.0,
            "base_url": "http://export.arxiv.org/api/query?",
        }
        self.config = Bunch(self.config)
        self.fetch = Fetch(self.config)

    @staticmethod
    def format_json(papers):
        json_out = {"payload": []}
        for topic, paper_data in papers.items():
            topic_data = []
            _paper_dict = []
            for paper, score in topic_data:
                _paper_dict.append({
                    "title": paper.title,
                    "url": paper.id,
                    "summary": paper.summary,
                    "author": "",
                    "score": score,
                })
            json_out["payload"].append({topic: _paper_dict})
        return json_out

    def __call__(self, profile):
        papers = self.fetch._get_clusterd_papers(profile)
        papers_json = self.format_json(papers)
        return papers_json
コード例 #12
0
 def fetchMedia(self):
     f = Fetch(self.mediaUrl)
     if f.connected:
         self.media = f.site
         self.mediaData = self.media.read()
         self.urlinfo = self.media.info()
         self.fetched = True
コード例 #13
0
    def start_backup(self, info, deviceJobId):
        self.clean_up()
        # Only execute if there has been changes in one fo the folders
        if info[0] is not 0:
            Fetch.patch(config['API_URL'] + "/api/device/" + str(deviceJobId) +
                        "/jobs/update?status=inprogress")
            self.compress_files(info[0])
            if info[2] is 'usb':
                self.copy_to_usb()
            if info[2] is 'ftp':
                self.upload_files()
            self.clean_up()

            # Save updated folder sizes to pickle file for next run
            folders_with_stat = self.get_folder_stats()
            self.pickle_dump(info[1], folders_with_stat)
コード例 #14
0
 def get_one_page(self, keyword, page):
     task_manager = TaskManager()
     task_manager.task_begin(keyword, page)
     res = Fetch.get_one_page_by_keyword(keyword, page)
     if res:
         task_manager.task_end(keyword, page)
     else:
         task_manager.task_failed(keyword, page)
コード例 #15
0
ファイル: books_client.py プロジェクト: mew7wo/task_api
 def __init__(self):
     self.__reset()
     self.__read_info()
     self._fetch = Fetch(username='******', pw='liumengchao')
     self._tasks_url = 'http://localhost:8080/id/books/'
     self._url = 'https://api.douban.com/v2/book/user/%s/collections?count=%d&start=%d'
     self._upload_url = 'http://localhost:8080/upload/'
     logging.basicConfig(filename='user_books_error.log', filemod='a+', level=logging.ERROR)
コード例 #16
0
ファイル: followed_client.py プロジェクト: mew7wo/task_api
 def __init__(self):
     self.__reset()
     self.__read_info()
     self._fetch = Fetch(username='******', pw='liumengchao')
     self._tasks_url = 'http://localhost:8080/id/followed/'
     self._url = 'http://www.douban.com/people/%s/contacts'
     self._upload_url = 'http://localhost:8080/upload/'
     logging.basicConfig(filename='followed_error.log', filemod='a+', level=logging.ERROR)
コード例 #17
0
    def parseFeed(self):
        print "Start to fetch and parse Feed list"
        seed = self.seed
        f = Fetch(seed.prefixurl, seed.charset, self.seed.timeout)
        if f.isReady():
            feed = feedparser.parse(f.read())
            items = feed["entries"]
            if len(items) > 0:
                for item in items:
                    _item = Item({"url": item["link"], "type": self.seed_type})

                    if self.guid_rule is None:
                        self.guid_rule = "url"

                    guid = self.getItemGUID(item)
                    self.items[guid] = _item

        print "List has finished parsing. It has %s docs." % ansicolor.red(
            self.__len__())
コード例 #18
0
    def fetchListPages(self, listtype="html"):
        print "Start to fetch and parse List"
        urls = self.listRule.getListUrls()
        for url in urls:
            print "Fetching list page: ", url, "charset:", safestr(
                self.seed["charset"]), "timeout:", safestr(
                    self.seed["timeout"])
            f = Fetch(url,
                      charset=self.seed["charset"],
                      timeout=self.seed["timeout"])
            if f.isReady():
                doc = f.read()

                if listtype == "html":
                    self.parseListPage(f, doc, url)
                elif listtype == "json":
                    self.parseJsonPage(f, doc, url)

        print "List has finished parsing. It has %s docs." % ansicolor.red(
            self.__len__())
コード例 #19
0
ファイル: document.py プロジェクト: leonardleonard/spyder
    def parseFeed(self):
        print "Start to fetch and parse Feed list"
        seed = self.seed
        f = Fetch(seed.prefixurl, seed.charset, self.seed.timeout);
	if f.isReady():
	    feed = feedparser.parse(f.read())
	    items = feed["entries"]
	    if len(items) > 0:
		for item in items:
		    _item = Item({
			"url" : item["link"],
			"type" : self.seed_type
		    })

		    if self.guid_rule is None:
			self.guid_rule = "url"

		    guid = self.getItemGUID(item)
		    self.items[guid] = _item

        print "List has finished parsing. It has %s docs." % ansicolor.red(self.__len__())
コード例 #20
0
ファイル: dollybs.py プロジェクト: kavinsivak/sandbox
 def load(self, loadUrl):
     o = urlparse(loadUrl)
     print o.scheme
     print o.port
     print o
     o = urlsplit(loadUrl)
     print o.geturl()
     reader = Fetch()
     htmlFile = reader.get(loadUrl)
     soup = BeautifulSoup(htmlFile)
     form = soup.find("form")
     print form
     print form["action"]
     print form["method"]
     # print form['onsubmit']
     if "action" in form:
         print form["action"]
     if "method" in form:
         print form["method"]
     if "onsubmit" in form:
         print form["onsubmit"]
コード例 #21
0
ファイル: dollybs.py プロジェクト: gauryn/sandbox
 def load(self, loadUrl):
     o = urlparse(loadUrl)
     print o.scheme
     print o.port
     print o
     o = urlsplit(loadUrl)
     print o.geturl()
     reader = Fetch()
     htmlFile = reader.get(loadUrl)
     soup = BeautifulSoup(htmlFile)
     form = soup.find('form')
     print form
     print form['action']
     print form['method']
     #print form['onsubmit']
     if 'action' in form:
         print form['action']
     if 'method' in form:
         print form['method']
     if 'onsubmit' in form:
         print form['onsubmit']
コード例 #22
0
ファイル: camera.py プロジェクト: nickklein/life-automation
 def uploadFiles(self, hashname, checksum):
     files = {
         'file':
         open(configCamera['CAMERA_TEMP_FOLDER'] + hashname + '.7z', 'rb')
     }
     data = {
         'device_id': config['CLIENT_ID'],
         'type': 'camera',
         'filename': hashname + '.7z',
         'checksum': checksum,
         'status': 'done'
     }
     response = Fetch.post(config['API_URL'] + "/api/files/store", files,
                           data)
コード例 #23
0
    def set_wallpaper(self, img_path):
        if system == "Linux":
            if "~" in img_path:
                img_path = img_path.replace("~", HOME)
            if img_path == ".":
                img_path = start_path
            if os.path.isfile(img_path):
                self.linux_wallpaper(img_path)
            elif os.path.isfile(join(start_path, img_path)):
                self.linux_wallpaper(join(start_path, img_path))
            elif os.path.isdir(img_path):
                Fetch().custom_folder([img_path])
            elif os.path.isdir(join(start_path, img_path)):
                Fetch().custom_folder([join(start_path, img_path)])
            else:
                print(
                    f"{colors['red']}Error, file path not recognized{colors['normal']}"
                )

        else:
            print(
                f"{colors['red']}Sorry, your system is not supported yet.{colors['normal']}"
            )
コード例 #24
0
    def __init__(self, deviceJobId):
        # Small arguement resets active pickle. (In case uploads were aborted somehow)
        if len(sys.argv) > 1 and sys.argv[1] == 'reset':
            print('Reset pickle...')
            self.pickle_dump(configBackup["PICKLE_ACTIVE_FILEPATH"], 0)

        active_script = self.pickle_load(
            configBackup["PICKLE_ACTIVE_FILEPATH"])

        if active_script is 0:
            # Backup is now active. Prevent it from running again
            self.pickle_dump(configBackup["PICKLE_ACTIVE_FILEPATH"], 1)

            #FTP backup
            if self.check_conditions('ftp'):
                backup_info = self.check_filesizes(
                    configBackup["PICKLE_FTP_FILEPATH"], 'ftp')
                self.start_backup(backup_info, deviceJobId)

            #USB backup
            if self.check_conditions('usb'):
                backup_info = self.check_filesizes(
                    configBackup["PICKLE_USB_FILEPATH"], 'usb')
                self.start_backup(backup_info, deviceJobId)

            if self.check_conditions('ftp') or self.check_conditions('usb'):
                Fetch.patch(config['API_URL'] + "/api/device/" +
                            str(deviceJobId) + "/jobs/update?status=done")
                files = {}
                data = {'value': time.strftime('%Y-%m-%d %H:%M:%S')}
                Fetch.post(
                    config['API_URL'] + "/api/device/" +
                    str(config['CLIENT_ID']) +
                    "/settings/backup_last_synced/update", files, data)

            self.pickle_dump(configBackup["PICKLE_ACTIVE_FILEPATH"], 0)
            print('All done!')
コード例 #25
0
ファイル: dolly.py プロジェクト: kavinsivak/sandbox
 def load(self, loadUrl):
     o = urlparse(loadUrl)
     print o.scheme
     print o.port
     print o
     o = urlsplit(loadUrl)
     print o.geturl()
     reader = Fetch()
     htmlFile = reader.get(loadUrl)
     soup = BeautifulSoup(htmlFile)
     form = soup.find("form")
     print form
     print form["action"]
     print form["method"]
     # print form['onsubmit']
     if "action" in form:
         print form["action"]
     if "method" in form:
         print form["method"]
     if "onsubmit" in form:
         print form["onsubmit"]
     data = {"username": "******", "password": "******"}
     response = reader.post("https://210.154.183.61:4443/mantis/login.php", data)
     print response
コード例 #26
0
    def sequetial(self, go_back):
        try:
            with open(wall_data_file, encoding="utf-8") as data:
                saved_walls = json.load(data)
        except (FileNotFoundError, ValueError):
            Fetch().wall_dl()
        """chooses the wallpaper in the order in which they were downloaded"""
        with open(wall_data_file, "r") as data:
            saved_walls = json.load(data)

        with open("point.pickle", "rb+") as wall_point:
            # selection_point stores the value of the current wallpaper
            # it is necessary so that wallpapers don't repeat
            selection_point = pickle.load(wall_point)

            if selection_point > len(saved_walls):
                selection_point = 1
            elif selection_point == len(saved_walls) and go_back == 1:
                selection_point -= 1
            elif selection_point == len(saved_walls) and go_back == 0:
                selection_point = 1
            elif (selection_point < len(saved_walls) and selection_point != 1
                  and go_back == 1):
                selection_point -= 1
            elif selection_point < len(saved_walls) and go_back == 0:
                selection_point += 1
            elif (selection_point < len(saved_walls) and selection_point == 1
                  and go_back == 0):
                selection_point += 1
            elif (selection_point < len(saved_walls) and selection_point == 1
                  and go_back == 1):
                selection_point = len(saved_walls)
            elif (selection_point < len(saved_walls) and selection_point == 0
                  and go_back == 0):
                selection_point = 1
            elif (selection_point < len(saved_walls) and selection_point == 0
                  and go_back == 1):
                selection_point = len(saved_walls)
            img_name = str(saved_walls.get(str(selection_point)))
        # the new value of selection point is stored for the next run
        print(f"selection point is {selection_point}")
        with open("point.pickle", "wb") as point:
            pickle.dump(selection_point, point)
        return join(pictures, str(img_name))
コード例 #27
0
 def main_menu(self, message=message):
     refresh(message)
     choice = input(f"""{colors['green']}
             Welcome to Redpaper. This is a TUI used to
             control the underlying Redpaper program.
             Select an option:\n{colors['normal']}
         {colors['red']} 1 {colors['normal']}: {colors['blue']} Download wallpapers {colors['normal']} \n
         {colors['red']} 2 {colors['normal']}: {colors['blue']} Next wallpaper{colors['normal']}\n
         {colors['red']} 3 {colors['normal']}: {colors['blue']} Previous wallpaper{colors['normal']}\n
         {colors['red']} 4 {colors['normal']}: {colors['blue']} Settings{colors['normal']}\n
         {colors['red']} 5 {colors['normal']}: {colors['blue']} Help {colors['normal']}\n
         {colors['red']} x {colors['normal']}: {colors['blue']} exit {colors['normal']}\n
             >>>  """)
     if choice == "1":
         refresh(message)
         Fetch().wall_dl()
     elif choice == "2":
         message = f"{colors['green']} Changed wallpaper {colors['normal']}\n"
         refresh(message)
         img_path = WallSet().sequetial(0)
         WallSet().set_wallpaper(img_path)
         self.main_menu()
     elif choice == "3":
         message = f"{colors['green']} Changed wallpaper {colors['normal']}\n"
         refresh(message)
         img_path = WallSet().sequetial(1)
         WallSet().set_wallpaper(img_path)
         self.main_menu()
     elif choice == "4":
         message = ""
         Settings().main_settings()
     elif choice == "5":
         # TODO: create a help page
         message = "HELP\n"
         refresh(message)
         print(f"""
             {colors['green']}You can check the wiki for help:
             https://github.com/keystroke3/redpaper/wiki{colors['normal']}"""
               )
         self.main_menu()
     elif choice == "x" or choice == "X":
         clear()
     else:
         Home().main_menu()
コード例 #28
0
 async def on_message(self, message):
     channel = message.channel
     channel.id = 842475416160698379
     if message.content.startswith('tag everyone'):
         self.members_to_ping = Fetch().return_data()
         print(self.members_to_ping)
         self.ids, self.members_to_tag = [], []
         for member in self.members:
             if member[2] in self.members_to_ping or member[
                     0] in self.members_to_ping:
                 self.ids.append(member[1])
                 self.members_to_tag.append(member[2] if member[2] in self.
                                            members_to_ping else member[0])
         print(self.members_to_tag)
         s = ''
         for members in self.ids:
             s += '<@{}>'.format(
                 members) if members == self.ids[-1] else '<@{}>,'.format(
                     members)
         await channel.send("{} Please report for your shift".format(s))
コード例 #29
0
    def __init__(self, item, seed):
        '''
	document base url
	'''
        self.url = item["url"]

        self.data = item

        self.seed = seed

        item["tags"] = ",".join(self.seed.tags)

        #文章采集规则
        self.articleRule = seed.getRule().getArticleRule()

        print "Document %s is fetcing" % ansicolor.green(self.url)
        firstContent = Fetch(self.url,
                             charset=seed["charset"],
                             timeout=seed["timeout"]).read()
        if firstContent:
            self.parseDocument(firstContent)
コード例 #30
0
 async def on_ready(self):
     self.members = []
     self.required_members = []
     # for guild in self.guilds:
     #     for channel in guild.channels:
     #         print(channel.id,channel.name)
     for guild in self.guilds:
         data = guild.members
         for x in data:
             if x.name == 'REAPER':
                 print(x.name, x.id, x.nick)
             self.members.append([x.name, x.id, x.nick])
     print('Logged on as {0}!'.format(self.user))
     # await asyncio.sleep(60 * 50 * 2)
     self.members_to_ping = Fetch().return_data()
     print(self.members_to_ping)
     self.ids, self.members_to_tag = [], []
     for member in self.members:
         if member[2] in self.members_to_ping or member[
                 1] in self.members_to_ping:
             self.ids.append(member[1])
             self.members_to_tag.append(member[2])
コード例 #31
0
from network import Network
from fetch import Fetch

co = Network()
co.send("Test\n")
num = co.receive()
dataFetching = Fetch(num)

data = co.receive()
print(dataFetching.fetchMessage(data))
コード例 #32
0
ファイル: kernel.py プロジェクト: veghen/Y86
    lst.write('E', cur.regE)
    lst.write('M', cur.regM)
    lst.write('W', cur, regW)


mem = Memory()
InsCode = {}
Init(InsCode, mem)
reg = Register()
pipereg = PipeRegister()
tmp_pipereg = PipeRegister()
CC = ConditionCode()
Stat = Status()

PC = 0
while Stat.stat == 'AOK':
    print 'Current Time:', PC
    tmp_pipereg = PipeRegister()
    Fetch(tmp_pipereg, InsCode[hex(PC)], PC)
    Decode(pipereg, tmp_pipereg, reg)
    Execute(pipereg, tmp_pipereg)
    Memory(pipereg, tmp_pipereg)
    WriteBack(pipereg, tmp_pipereg)
    PC = pipereg.regF['predPC']
    Update(cur=tmp_pipereg, lst=pipireg)
    print 'RegF:', reg.regF
    print 'RegD:', reg.regD
    print 'RegE:', reg.regE
    print 'RegM:', reg.regM
    print 'RegW:', reg.regW
コード例 #33
0
def main(args=None):
	parser = argparse.ArgumentParser(description='Simple training script.')
	parser.add_argument('--cls_id', help='class id', type=int)
	parser.add_argument('--version', help='model version', type=float)
	parser.add_argument('--gamma', help='gamma for the SoftL1Loss', type=float, default=9.0)
	parser.add_argument('--lr', help='lr for optimization', type=float, default=1e-4)
	parser.add_argument('--epoches', help='num of epoches for optimization', type=int, default=4)
	parser.add_argument('--resume_epoch', help='trained model for resume', type=int, default=0)
	parser.add_argument('--batch_size', help='batch size for optimization', type=int, default=10)
	parser.add_argument('--checkpoints', help='checkpoints path', type=str, default='voc_checkpoints')
	parser = parser.parse_args(args)

	cls_name = classes[parser.cls_id]
	parser.checkpoints = '_'.join([parser.checkpoints,cls_name])
	if not os.path.isdir(parser.checkpoints):
		os.mkdir(parser.checkpoints)
	print('will save checkpoints in '+parser.checkpoints)
	cls_dir = "../context_profile/voc_detection_{:s}_p10/"\
		.format(cls_name)
	batch_size = parser.batch_size
	print('[data prepare]....')
	dataloader_train = DataLoader(Fetch('train_benign', root_dir=cls_dir), batch_size=batch_size, num_workers=2, shuffle=True)

	print('[model prepare]....')
	use_gpu = torch.cuda.device_count()>0

	model = AutoEncoder(parser.gamma)
	if use_gpu:
		model = torch.nn.DataParallel(model).cuda()
	optimizer = torch.optim.Adam(model.parameters(), lr=parser.lr)
	scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, patience=2, verbose=True)
	if parser.resume_epoch > 0 :
		checkpoint_name = os.path.join(parser.checkpoints, 'model_{:1.1f}_epoch{:d}.pt'.format(parser.version, parser.resume_epoch))
		if not os.path.isfile(checkpoint_name):
			raise ValueError('No checkpoint file {:s}'.format(checkpoint_name))
		model.load_state_dict(torch.load(checkpoint_name))
		print('model loaded from {:s}'.format(checkpoint_name))

	print('[model training]...')
	loss_hist = []
	epoch_loss = []
	num_iter = len(dataloader_train)
	for epoch_num in range(parser.resume_epoch, parser.epoches):
		model.train()
		for iter_num, sample in enumerate(dataloader_train):
			if True:#try:
				optimizer.zero_grad()
				if use_gpu:
					data = sample['data'].cuda().float()
				else:
					data = sample['data'].float()
					
				loss = model(data).mean()
				if bool(loss==0):
					continue 
				loss.backward()
				torch.nn.utils.clip_grad_norm_(model.parameters(), 0.1)
				optimizer.step()
				epoch_loss.append(float(loss))
				loss_hist.append(float(loss))
				if iter_num % 30 == 0:
					print('Epoch {:d}/{:d} | Iteration: {:d}/{:d} | loss: {:1.5f}'.format(
						epoch_num+1, parser.epoches, iter_num+1, num_iter, float(loss)))
				if iter_num % 3000 == 0:
					scheduler.step(np.mean(epoch_loss))
					epoch_loss = []
		if epoch_num < 1:
			continue
		checkpoint_name = os.path.join(parser.checkpoints, 'model_{:1.1f}_epoch{:d}.pt'.format(parser.version, epoch_num+1))
		torch.save(model.state_dict(), checkpoint_name)
		print('Model saved as {:s}'.format(checkpoint_name))

	np.save('loss_hist.npy', loss_hist)
コード例 #34
0
ファイル: main.py プロジェクト: AugustineLe0501/Fetch_Demo
import rospy
import time
import math
import cv2
import tf

from sensor_msgs.msg import Image
from ar_track_alvar_msgs.msg import AlvarMarkers
from geometry_msgs.msg import PoseStamped
from tf.transformations import euler_from_quaternion, quaternion_from_euler
from fetch import Fetch

if ((__name__ == "__main__") and (not rospy.is_shutdown())):
    rospy.init_node("fetch_builder",anonymous=True)

    Fetch_Robot = Fetch()
    rospy.loginfo("Initialization")

    Fetch_Robot.Head.look_at(0.7,0,0.5,"base_link")
    rospy.loginfo("Till head")

    Fetch_Robot.Gripper.Open()
    rospy.loginfo("Gripper Open")

    Fetch_Robot.Arm.Tuck()
    rospy.loginfo("Tuck Arm")

    rospy.sleep(rospy.Duration(2))
    #Take images
    RGB_image = Fetch_Robot.GetRGBImage()
    rospy.loginfo("Get RGB image")
コード例 #35
0
ファイル: books_client.py プロジェクト: mew7wo/task_api
class BooksTask:
    ''' get books '''

    def __init__(self):
        self.__reset()
        self.__read_info()
        self._fetch = Fetch(username='******', pw='liumengchao')
        self._tasks_url = 'http://localhost:8080/id/books/'
        self._url = 'https://api.douban.com/v2/book/user/%s/collections?count=%d&start=%d'
        self._upload_url = 'http://localhost:8080/upload/'
        logging.basicConfig(filename='user_books_error.log', filemod='a+', level=logging.ERROR)

    def __del__(self):
        self.__save_info()

    def __reset(self):
        self._status = 'free'
        self._free_tasks = set()
        self._done_tasks = set()

    def __read_info(self):
        if os.path.exists('books_task_config.cfg'):
            with open('books_task_config.cfg', 'r') as f:
                cfg = json.loads(f.read())
                self._status = cfg.get('status')
                self._free_tasks = set(cfg.get('free_tasks'))
                self._done_tasks = set(cfg.get('done_tasks'))

    def __save_info(self):
        with open('books_task_config.cfg', 'w') as f:
            cfg = {}
            cfg['status'] = self._status
            cfg['free_tasks'] = list(self._free_tasks)
            cfg['done_tasks'] = list(self._done_tasks)
            f.write(json.dumps(cfg))

    def __get_tasks(self):
        if self._status == 'free':
            print 'getting task....'
            resp = requests.get(self._tasks_url)
            js = resp.json()
            for t in js.get('tasks'):
                self._free_tasks.add(t)
            self._status = 'running'


    def __do_tasks(self):
        with open('books.txt', 'a') as f:
            for t in self._free_tasks:
                if t not in self._done_tasks:
                    print 'fetch %s....' % t
                    books = self.__get_books(t)
                    obj = {'_id':t, 'books':books}
                    f.write(json.dumps(obj) + '\n')
                    self._done_tasks.add(t)
                     

    def __get_books(self, user):
        books = []
        count = 100
        for i in range(20):
            url = self._url % (user, count, i*count)
            content = self._fetch.get(url, sleeptime=6)
            js = json.loads(content.decode('utf-8', 'ignore'))
            books.extend(js.get('collections'))
            if (i+1)*count >= js.get('total'):
                break

        return books


    def __upload_tasks(self):
        tasks = {'type':'books', 'data':[]}
        with open('books.txt', 'r') as f:
            for line in f:
                obj = json.loads(line.rstrip('\n'))
                tasks['data'].append(obj)

        while True:
            print 'uploading task ....'
            data = json.dumps(tasks)
            headers = {'Content-type':'application/json; charset=utf8'}
            resp = requests.put(self._upload_url, data=data, headers=headers)
            js = resp.json()
            if js.get('code') == 200:
                self.__reset()
                os.remove('books.txt')
                break

    def run(self):
        while True:
            try:
                self.__get_tasks()
                self.__do_tasks()
                self.__upload_tasks()
            except KeyboardInterrupt:
                break
            except Exception, e:
                logging.error(repr(e))
コード例 #36
0
def newsapi(topic):
    """Return newsapi json data according to topic.
    """
    fetcher = Fetch()
    data = fetcher.fetch_newsapi(topic)
    return jsonify(data)
コード例 #37
0
def coinpaprika():
    """Return coinpaprika json data.
    """
    fetcher = Fetch()
    data = fetcher.fetch_coinpaprika()
    return jsonify(data)
コード例 #38
0
ファイル: server.py プロジェクト: housne/tucao
        data[i]['date'] = time.mktime(item['date'].timetuple())
    return jsonResponse(data=data, extra_data=[{'total': total}])

@app.route('/api/news/<int:id>')
def news(id):
    news = News(news_id=id)
    data = news.get()
    if data is None:
        return jsonResponse(type='404')
    return jsonResponse(data=data)

@app.route('/rss')
def rss():
    news = News()
    data = news.sort()
    response = make_response(render_template('rss.xml', data=data, site_url=site_url))
    response.headers['Content-Type'] = 'application/atom+xml; charset=utf-8'
    return response


@app.route('/', defaults={'path': ''})
@app.route('/<path:path>')
def catch_all(path):
    return render_template('index.html')


if __name__ == '__main__':
    fetch = Fetch()
    fetch.start()
    app.run()
コード例 #39
0
ファイル: setup.py プロジェクト: housne/tucao
	body = Column(String)
	title = Column(String)
	image_source = Column(String)
	image = Column(String)
	thumbnail = Column(String)
	date = Column(DateTime)


# def create_conf_file():
# 	access_key = input('qiniu access key')
# 	secrect_key = input('qiniu secrect key')
# 	bucket_name = input('qiniu bucket name')
# 	bucket_host = input('qiniu bucket host')
# 	site_url = input('site url include http')
# 	conf_file = open(os.path.dirname(__file__) + 'conf.py', 'w+')
# 	data = '''
# 	access_key="%s"\n
# 	secrect_key="%s"\n
# 	qiniu_bucket_name=%s\n
# 	qiniu_bucket_host=%s\n
# 	site_url=%s
# 	''' %(access_key, secrect_key, bucket_name, qiniu_bucket_host, site_url)
# 	conf_file.write(data)
# 	conf_file.close()


if __name__ == '__main__':
	#create_conf_file()
	Base.metadata.create_all(engine)
	fetch = Fetch()
	fetch.init_fetch()
コード例 #40
0
ファイル: followed_client.py プロジェクト: mew7wo/task_api
class FollowedTask:
    ''' get douban user followed'''
    def __init__(self):
        self.__reset()
        self.__read_info()
        self._fetch = Fetch(username='******', pw='liumengchao')
        self._tasks_url = 'http://localhost:8080/id/followed/'
        self._url = 'http://www.douban.com/people/%s/contacts'
        self._upload_url = 'http://localhost:8080/upload/'
        logging.basicConfig(filename='followed_error.log', filemod='a+', level=logging.ERROR)

    def __del__(self):
        self.__save_info()

    def __reset(self):
        self._status = 'free'
        self._free_tasks = set()
        self._done_tasks = set()

    def __read_info(self):
        if os.path.exists('user_followed_config.cfg'):
            with open('user_followed_config.cfg', 'r') as f:
                cfg = json.loads(f.read())
                self._status = cfg.get('status')
                self._free_tasks = set(cfg.get('free_tasks'))
                self._done_tasks = set(cfg.get('done_tasks'))

    def __save_info(self):
        with open('user_followed_config.cfg', 'w') as f:
            cfg = {}
            cfg['status'] = self._status
            cfg['free_tasks'] = list(self._free_tasks)
            cfg['done_tasks'] = list(self._done_tasks)
            f.write(json.dumps(cfg))

    def __get_followed(self, user):
        page = self._fetch.get(self._url % user, sleeptime=2.1)
        followed = user_followed_parser(page)
        return followed

    def __get_tasks(self):
        if self._status == 'free':
            print 'get tasks.....'
            resp = requests.get(self._tasks_url)
            js = resp.json() 
            self._free_tasks = js.get('tasks')    
            self._status = 'running'

    def __do_tasks(self):
        with open('followed.txt', 'a') as f:
            for t in self._free_tasks:
                if t not in self._done_tasks:
                    print 'fetching %s' % t
                    obj = {'_id':t}
                    obj['followed'] = self.__get_followed(t)
                    f.write(json.dumps(obj) + '\n')
                    self._done_tasks.add(t)

    def __upload_tasks(self):
        tasks = {'type':'followed', 'data':[]}
        with open('followed.txt', 'r') as f:
            for line in f:
                obj = json.loads(line.rstrip('\n'))        
                tasks['data'].append(obj)

        data = json.dumps(tasks)
        headers = {'Content-type':'application/json; charset=utf8'}
        while True:
            print 'uploading '
            resp = requests.put(self._upload_url, data=data, headers=headers)
            js = resp.json()
            if js.get('code') == 200:
                os.remove('followed.txt')
                self.__reset()
                break
            
    def run(self):
        while True:
            try:
                self.__get_tasks()
                self.__do_tasks()
                self.__upload_tasks()
            except KeyboardInterrupt:
                break
            except Exception, e:
                logging.error(repr(e))
コード例 #41
0
import datetime as dt
from fetch import Fetch
from display import Display
import pprint

if __name__ == '__main__':

    # Performs queries:
    fetchy = Fetch()
    #fetchy.fetchTweets("prayforsyria", 1000, begindate=dt.date(2011,1,21), enddate=dt.date.today())

    # Defines words that we want to use in the visualization. All other words will be ignored:
    chosenWords = [
        "Syria", "people", "you", "all", "no", "we", "world", "will", "about",
        "innocent", "Allah", "paz", "just", "pray", "heart", "Syrian", "mundo",
        "children", "who", "one", "don't", "please", "help", "Pray", "more",
        "like", "apoyo", "oración", "#Aleppo", "going", "war", "God", "uno",
        "need", "prayers", "ayuno", "happening", "killed", "being", "peace",
        "sad", "NO", "know", "now", "because", "stop", "many", "everyone",
        "live", "unimos", "really", "lives", "orar", "much", "love",
        "suffering", "protect", "guerra", "morning", "even", "todos", "hope",
        "country", "#LLAP", "forget", "never", "over", "every", "still",
        "brothers", "think", "llamado", "ayunar", "human", "time", "feel",
        "praying", "keep", "bomb", "bombing", "kids", "Virgen", "today",
        "Assad", "better", "diligencia", "killing", "breaks", "trabajo",
        "video", "life", "firmemente", "rezar", "where", "lost"
        "kill", "pidiendo", "humanity", "deserve", "always", "gente", "little",
        "take", "safe", "end", "say", "sisters", "doesn't", "any", "dying",
        "dead", "after", "things", "good", "Trump", "some", "child", "needs",
        "heartbreaking", "other", "Dios", "care", "Syrians", "US", "personas",
        "news", "civilians", "não", "against", "inocentes", "unidos", "media",
コード例 #42
0
#import sys
#sys.path.append('/home/ubuntu/projects/financial')
from fetch import Fetch

fetch = Fetch()
params = [('sc', 431)]

stocklist = fetch.fetch_stocks(params)
print stocklist
コード例 #43
0
ファイル: fetch_test.py プロジェクト: mew7wo/task_api
 def setUp(self):
     self.fetch = Fetch(username='******', pw='liumengchao')