コード例 #1
0
ファイル: test_fullrun.py プロジェクト: sriiora/tcf
 def test_02_power_on(self):
     sp = commonl.subpython(
         self.srcdir + "/tcf --config-path : --url http://localhost:%d "
         "power-on %s"\
         % (self.port, self.target))
     self.assertEqual(sp.join(), 0, msg=sp.output_str)
     logi("letting it run three seconds")
     time.sleep(3)
コード例 #2
0
ファイル: test_fullrun.py プロジェクト: intel/tcf
 def test_02_power_on(self):
     sp = commonl.subpython(
         self.srcdir + "/tcf --config-path : --url http://localhost:%d "
         "power-on %s"\
         % (self.port, self.target))
     self.assertEqual(sp.join(), 0, msg = sp.output_str)
     logi("letting it run three seconds")
     time.sleep(3)
コード例 #3
0
ファイル: episodes.py プロジェクト: sina-cb/fardaastationapi
def create_database():
    """
    If this script is run directly, create all the tables necessary to run the
    application.
    """
    try:
        Episodes.create_table()
    except:
        logi("Error happened.")

    print("All tables created")
コード例 #4
0
    def get_update():
        timestamp = request.args.get('timestamp', '')

        if timestamp == '':
            logi('Default timestamp')
            timestamp = 0
        else:
            timestamp = long(timestamp)

        result = find_updates(timestamp)

        return jsonify(result)
コード例 #5
0
def update_episodes(create_db=False):
    # Get the last episodes posted in the webpage.
    radio_farda_base_url = "https://www.radiofarda.com/"
    fardaa_station_base_url = urljoin(radio_farda_base_url, "z/20317")
    page = requests.get(fardaa_station_base_url)
    episodes = \
        BeautifulSoup(page.content, 'html.parser').find_all(id='episodes')[
            0].find_all(id='items')[0].find_all('a', class_='img-wrap')

    if create_db:
        episodes = read_from_file()

    # Download the corresponding pages and get the
    # information for each episode and add to the
    # database.
    timestamp = int(time.time())
    for base_uri in episodes:
        if not create_db:
            base_uri = base_uri['href']

        # Actually try to add the episode to MySQL
        try:
            episode_page = requests.get(urljoin(radio_farda_base_url,
                                                base_uri))
            episode_page = BeautifulSoup(episode_page.content, 'html.parser')

            image_uri = \
                episode_page.find_all('a', class_='html5PlayerImage')[
                    0].find('img')['src']

            both_download_links = \
                episode_page.find_all('div', class_='media-download')[
                    0].find_all('ul', class_='subitems')[
                    0].find_all('li', 'subitem')

            low_quality = both_download_links[0].find('a')['href']

            high_quality = ''
            if len(both_download_links) > 1:
                high_quality = both_download_links[1].find('a')['href']

            publish_date = episode_page.find_all(
                'div', 'published')[0].find('time').text

            timestamp_aired = episode_page.find_all(
                'div', 'published')[0].find('time')['datetime']
            k = timestamp_aired.rfind('+')
            timestamp_aired = timestamp_aired[:k]
            timestamp_aired = long(
                datetime.strptime(timestamp_aired,
                                  '%Y-%m-%dT%H:%M:%S').strftime('%s'))

            title = episode_page.find_all('div',
                                          'hdr-container')[0].find('h1').text

            import peewee
            try:
                episode = Episodes.insert(timestamp=timestamp,
                                          title=title.strip(),
                                          date=publish_date.strip(),
                                          low_quality=low_quality.strip(),
                                          high_quality=high_quality.strip(),
                                          image_uri=image_uri.strip(),
                                          timestamp_aired=timestamp_aired,
                                          base_uri=base_uri.strip())
                episode.execute()
            except peewee.IntegrityError as e:
                logi(e)
                logi('Duplicate entry found, updating the entry.')
                episode = Episodes.update(
                    timestamp=timestamp,
                    title=title.strip(),
                    date=publish_date.strip(),
                    low_quality=low_quality.strip(),
                    high_quality=high_quality.strip(),
                    image_uri=image_uri.strip(),
                    timestamp_aired=timestamp_aired).where(
                        Episodes.base_uri == base_uri.strip())
                episode.execute()
        except IndexError:
            logi("Index Error at: " + base_uri)
        finally:
            logi("Fetched: " + base_uri)