Example #1
0
 def pythonvalue(self, value):
     if value.startswith('PT-'):
         value = value.replace('PT-', 'PT')
         result = isodate.parse_duration(value)
         return datetime.timedelta(0 - result.total_seconds())
     else:
         return isodate.parse_duration(value)
Example #2
0
    def test_get_prep_value_is_isomorphic(self):

        duration = isodate.parse_duration('P1M')
        self.assertEqual('P1M', self.field.get_prep_value(duration))

        duration = isodate.parse_duration('P1.0M')
        self.assertEqual('P1.0M', self.field.get_prep_value(duration))
Example #3
0
 def check_epsilon(self):
     epsilon = self.get_epsilon()
     try:
         isodate.parse_duration(epsilon)
     except isodate.ISO8601Error:
         return False, 'The specified epsilon value "%s" does not conform to the ISO8601 format.' % epsilon
     return True, ''
Example #4
0
def iso_duur_naar_seconden(string):
    """
    Vertaal een ISO tijdsduur naar seconden.
    Deze functie houdt rekening met negatieve duur
    (in tegenstelling tot isodate).
    """

    if len(string) > 0:
        if string[0] == '-':
            return isodate.parse_duration(string[1:]).seconds * -1

    return isodate.parse_duration(string).seconds
Example #5
0
def create_mathtestcase(dur1, dur2, resadd, ressub, resge):
    """
    Create a TestCase class for a specific test.

    This allows having a separate TestCase for each test tuple from the
    MATH_TEST_CASES list, so that a failed test won't stop other tests.
    """

    dur1 = parse_duration(dur1)
    dur2 = parse_duration(dur2)
    resadd = parse_duration(resadd)
    ressub = parse_duration(ressub)

    class TestMathDuration(unittest.TestCase):
        """
        A test case template test addition, subtraction and >
        operators for Duration objects.
        """

        def test_add(self):
            """
            Test operator + (__add__, __radd__)
            """
            self.assertEqual(dur1 + dur2, resadd)

        def test_sub(self):
            """
            Test operator - (__sub__, __rsub__)
            """
            self.assertEqual(dur1 - dur2, ressub)

        def test_ge(self):
            """
            Test operator > and <
            """

            def dogetest():
                """ Test greater than."""
                return dur1 > dur2

            def doletest():
                """ Test less than."""
                return dur1 < dur2

            if resge is None:
                self.assertRaises(TypeError, dogetest)
                self.assertRaises(TypeError, doletest)
            else:
                self.assertEqual(dogetest(), resge)
                self.assertEqual(doletest(), not resge)

    return unittest.TestLoader().loadTestsFromTestCase(TestMathDuration)
Example #6
0
def scrape_recipes():
    for recipe in recipes:
        ## NAME ##
        name = recipe.find("meta",itemprop='name')
        if name:
            recipe_name = name["content"]
        else:
            name = recipe.find(itemprop='name')
            recipe_name = extract_text(name)

        print "recipe name %s"%recipe_name

        ## INGREDIENTS ##
        ingredients = recipe.find_all(itemprop='ingredients')
        recipe_ingredients = []
        for ingredient in ingredients:
           recipe_ingredients.append(extract_text(ingredient).replace("\n",''))

        ## INSTRUCTIONS ##
        instructions = recipe.find_all(itemprop='recipeInstructions')
        recipe_instructions = ""
        for instruction in instructions:
            recipe_instructions = recipe_instructions + extract_text(instruction) + "\n"
   
        ## TIME ##
        for content_type,key in {'meta':'content','time':'content','time':'datetime'}.iteritems():
            try:
                total_time = recipe.find(content_type,itemprop='totalTime')
                if total_time != None:
                    total_time = isodate.parse_duration(total_time[key]).total_seconds()
                else:
                    prep_time = recipe.find(content_type,itemprop="prepTime")
                    if prep_time != None:
                        prep_time = isodate.parse_duration(prep_time[key]).total_seconds()
                    cook_time = recipe.find(content_type,itemprop="cookTime")
                    if cook_time != None:
                        cook_time = isodate.parse_duration(cook_time[key]).total_seconds()
    
                if total_time or (prep_time and cook_time):
                    total_time = int(prep_time) + int(cook_time) if total_time == None else int(total_time)
                    break
            except:
                continue

        if total_time:
            quick_meal = True if total_time <= int(quick_meal_minutes*60) else False
        else:
            quick_meal = False

        return (recipe_name,recipe_ingredients,recipe_instructions,quick_meal)
Example #7
0
    def create_pleblist_song_info(youtube_id):
        if PleblistManager.youtube is None:
            log.warning("youtube was not initialized")
            return False

        try:
            video_response = (
                PleblistManager.youtube.videos().list(id=str(youtube_id), part="snippet,contentDetails").execute()
            )
        except HttpError as e:
            log.exception("???")
            log.info(e.content)
            log.info(e.resp)
            log.info(e.uri)

        log.debug(video_response)

        if len(video_response.get("items", [])) == 0:
            log.warning("FeelsBadMan")
            return False

        video = video_response["items"][0]

        title = video["snippet"]["title"]
        duration = int(isodate.parse_duration(video["contentDetails"]["duration"]).total_seconds())
        default_thumbnail = video["snippet"]["thumbnails"]["default"]["url"]

        return PleblistSongInfo(youtube_id, title, duration, default_thumbnail)
def call(salty_inst, c_msg, balancer, **kwargs):
    video_ids = re.findall(regexes.YOUTUBE_URL, c_msg["message"])
    if not video_ids:
        return False, "No video ids"
    seen_ids = set()
    seen_add = seen_ids.add
    video_ids = [x for x in video_ids if not (x in seen_ids or seen_add(x))]
    parts = ["snippet", "statistics", "contentDetails"]
    final_list = []
    success, response = salty_inst.youtube_api.get_videos(video_ids, parts, **kwargs)
    if not success:
        return False, \
            "Error retrieving info from youtube API ({0})".format(response.status_code)

    if len(response["items"]) == 0:
        return False, "No valid ID's found."

    for i in response["items"]:
        final_list.append("[{0}] {1} uploaded by {2}. Views: {3}".format(
            time_formatter.format_time(isodate.parse_duration(i["contentDetails"]["duration"]).seconds),
            i["snippet"]["title"],
            i["snippet"]["channelTitle"],
            i["statistics"]["viewCount"]
        ))
    return True, " | ".join(final_list)
Example #9
0
    def to_message(video):
        """
        Converts the JSON returned from the YouTube API to a Video message.

        :param video: JSON data of a video.
        :return: Video message data object.
        """
        videoMessage = Video()

        # ID's are in various structures in the YouTube JSON data.
        try:
            id = video['id']['videoId']
        except:
            id = video['id']

        videoMessage.id = id
        videoMessage.title = video['snippet']['title']
        videoMessage.url = 'https://www.youtube.com/watch?v=%s' % id
        videoMessage.channel_id = video['snippet']['channelId']
        videoMessage.published_at = video['snippet']['publishedAt']
        videoMessage.live_broadcast_content = video['snippet']['liveBroadcastContent']
        videoMessage.channel_title = video['snippet']['channelTitle']
        videoMessage.description = video['snippet']['description']

        thumbnail = ThumbnailMessage()
        thumbnail.default = video['snippet']['thumbnails']['default']['url']
        thumbnail.high = video['snippet']['thumbnails']['high']['url']
        thumbnail.medium = video['snippet']['thumbnails']['medium']['url']
        videoMessage.thumbnail = thumbnail

        if 'contentDetails' in video:
            videoMessage.duration_ISO = video['contentDetails']['duration']
            videoMessage.duration = int(isodate.parse_duration(video['contentDetails']['duration']).total_seconds())

        return videoMessage
Example #10
0
 def _step_loop(self, step_xml, tasklists_env, var_env={}):
     num_repeat_str = step_xml.get("repeat")
     if num_repeat_str is not None:
         try:
             num_repeat = int(num_repeat_str)
         except ValueError:
             logging.error("counted loop has malformed attribute iterations (%s), skipping", num_repeat_str)
             return
         self.schedule_loop_counted(step_xml, tasklists_env, num_repeat, var_env)
         return
     duration = step_xml.get("duration")
     if duration is not None:
         deadline = time.time() + isodate.parse_duration(duration).total_seconds()
         self.schedule_loop_until(step_xml, tasklists_env, deadline, var_env)
         return
     listing = step_xml.get("list")
     listParam = step_xml.get("param")
     if listing is not None and listParam is not None:
         self.schedule_loop_listing(step_xml, tasklists_env, listing, listParam, var_env)
         return
     if listing is None and listParam is not None:
         raise Exception("missing list definition")
     if listParam is None and listing is not None:
         raise Exception("missing parameter definition")
     until = step_xml.get("until")
     if until is not None:
         deadline = parse(until)
         deadline = time.mktime(deadline.timetuple())
         self.schedule_loop_until(step_xml, tasklists_env, deadline, var_env)
         return
     raise Exception("not implemented")
Example #11
0
        def create_invite(self):
            '''
            ``POST /invitation``

            creates a new invitation

            Args:

                subject: the title of the invitation
                length: the duration of the meeting to plan

            Returns:

                uuid of the event (Code: 201)
            '''
            try:
                subject = request.args['subject']
                length = parse_duration(request.args['length'])
            except KeyError as err:
                raise ParseError('Missing {} parameter for the method.'.format(err.args[0])) from err

            except ISO8601Error as err:
                raise MisformattedError('Wrong date format. It shall be ISO8601 compliant.')

            return calenvite.create_invite(subject, length), 201
Example #12
0
File: views.py Project: amtopel/epi
def render_feed(request, playlist_data, channel_data):
    """Render the RSS feed from the playlist data,
    channel data, and desired podcast type (audio or video)."""

    video_ids = [item['snippet']['resourceId']['videoId'] for item in playlist_data['items']]
    videos_data = yt_api_call('videos', 'snippet,contentDetails', 'id', ','.join(video_ids))

    for item in videos_data['items']:

        #Reformat publication dates for RSS (RFC 2822)
        date_ISO = item['snippet']['publishedAt']
        parsed = parse_datetime(date_ISO)
        date_RFC = email.utils.format_datetime(parsed)
        item['snippet']['publishedAt'] = date_RFC

        #Reformat file duration for iTunes
        duration_iso = item['contentDetails']['duration']
        parsed = isodate.parse_duration(duration_iso)
        item['contentDetails']['duration'] = str(parsed)

    #Add channel data to video data to make single context
    videos_data['channel_data'] = channel_data

    #Add media type
    podcast_type = 'audio' if 'a' in request.GET else 'video'
    videos_data['podcast_type'] = podcast_type

    #Add media extension
    videos_data['media_extension'] = 'm4a' if podcast_type == 'audio' else 'mp4'

    return render(request, 'epi/feed.xml', videos_data)
Example #13
0
    def parse_duration(self, tstring):
        tdelta = isodate.parse_duration(tstring)
        d = {}
        fmt = []
        if tdelta.days == 1:
            fmt.append("{days} day")
            d["days"] = tdelta.days
        if tdelta.days > 1:
            fmt.append("{days} days")
            d["days"] = tdelta.days

        hours, rem = divmod(tdelta.seconds, 3600)
        if hours == 1:
            fmt.append("{hours} hour")
            d["hours"] = hours
        if hours > 1:
            fmt.append("{hours} hours")
            d["hours"] = hours

        minutes, _ = divmod(rem, 60)
        if minutes == 1:
            fmt.append("{minutes} minute")
            d["minutes"] = minutes
        if minutes > 1:
            fmt.append("{minutes} minutes")
            d["minutes"] = minutes

        fmt = ', '.join(fmt)
        return fmt.format(**d)
Example #14
0
    def __init__(self, url):
        if youtube_key is None: raise
        id = re_youtube.search(url).group(1)
        data = json.load(urllib2.urlopen(youtube_api_url.format(id=id, key=youtube_key)))["items"][0]
        title = data["snippet"]["title"]
        try: description = data["snippet"]["localized"]["description"]
        except: description = ""
        try:
            likes = data["statistics"]["likeCount"]
            dislikes = data["statistics"]["dislikeCount"]
            rating = num_to_km(int(likes)) + "/" + num_to_km(int(dislikes))
        except Exception as e:
            rating = u"—"
        viewcount = int(data["statistics"].get("viewCount", 0))
        viewcount = num_to_km(viewcount)
        try:
            duration = data["contentDetails"]["duration"]
            duration = isodate.parse_duration(duration)
            duration = u"∞" if duration == datetime.timedelta(0) else s_to_ms(duration.total_seconds())
        except Exception as e:
            duration = u"∞"


        date = datetime.datetime.strptime(data["snippet"]["publishedAt"], "%Y-%m-%dT%H:%M:%S.000Z")
        date = date_to_readabledate(date)

        self.shortargs = (title, duration, rating, viewcount, date[0], date[1])
        if description: self.longargs = (title, duration, rating, viewcount, date[0], date[1], clean(description))
        else: self.longtemplate, self.longargs = self.shorttemplate, self.shortargs
Example #15
0
    def create_or_update(self, channel, data):
        '''
        Creates or updates a `Video` object with the data given, for the
        channel given.
        '''
        # Fetch video details, if it exists.
        duration = parse_duration(data['contentDetails']['duration'])

        return Video.objects.update_or_create(
            youtubeid=data['id'],
            defaults={
                'uploader': channel,
                'title': data['snippet']['title'],
                'category_id': data['snippet']['categoryId'],
                'description': data['snippet']['description'],
                'duration': duration.total_seconds(),
                'view_count': data.get('statistics', {}).get('viewCount'),
                'favorite_count': data.get(
                    'statistics', {}).get('favoriteCount'),
                'uploaded': dateutil.parser.parse(
                    data['snippet']['publishedAt']),
                'updated': dateutil.parser.parse(
                    data['snippet']['publishedAt']),
            },
        )[0]
Example #16
0
def await_resources(ec2, resource_function, resource_type, state_field, timeout, delay):
    log = ec2_log
    duration = None
    end = None
    states = {}

    if timeout:
        duration = isodate.parse_duration(timeout)
        end = datetime.now() + duration

    while True:
        states = {}
        resources = resource_function()
        for resource in resources:
            states.setdefault(getattr(resource, state_field), []).append(resource)

        # TODO: this should allow arbitrary resources via respective target state(s).
        if not "pending" in states:
            log.info("... {0} transitioned ({1})".format(resource_type, format_states(states)))
            break

        if end and datetime.now() > end:
            message = "FAILED to transition all {0} after {2} ({1})!".format(resource_type, format_states(states),
                                                                             isodate.duration_isoformat(duration))
            log.info("... " + message)
            raise bc.BotocrossAwaitTimeoutError(message)

        log.info("... {0} still transitioning ({1}) ...".format(resource_type, format_states(states)))
        time.sleep(delay)

    return states
Example #17
0
    def test_converts_given_relativedelta_to_a_duration(self):
        utils.convert_relativedelta_to_duration = Mock(return_value=isodate.parse_duration('P1M'))
        delta = relativedelta(months=1)
        self.field.get_prep_value(delta)

        self.assertTrue(utils.convert_relativedelta_to_duration.called)
        self.assertTrue(utils.convert_relativedelta_to_duration.called_with(delta))
Example #18
0
    def update_tracks(self):
        self.logger.info('Checking tracks for radio %s', self.radio_id)

        session = self.database.get_session(self.session_id)
        tracks = session['playlists'][self.playlist_name]['tracks']
        # Only allow youtube tracks for now
        tracks = [track for track in tracks if track['link'].startswith('youtube:')]

        # Have the tracks changed?
        if [t['link'] for t in self.tracks] != [t['link'] for t in tracks]:
            # Update stats
            yt_api_key = self.config.get('sources', 'youtube_api_key')
            sc_api_key = self.config.get('sources', 'soundcloud_api_key')

            for track in tracks:
                url = YOUTUBE_STATS_URL.format(api_key=yt_api_key, id=track['link'][8:])
                response = yield get_request(url)
                response_dict = response.json
                track['duration'] = int(isodate.parse_duration(response_dict['items'][0]['contentDetails']['duration']).total_seconds())
                track['musicinfo'] = track.get('musicinfo', {})
                track['musicinfo']['playback_count'] = response_dict['items'][0]['statistics']['viewCount']
                track['musicinfo']['comment_count'] = response_dict['items'][0]['statistics']['commentCount']

            # Update tracks
            self.tracks = tracks
            self.start_time = int(time.time())

            returnValue(True)
        returnValue(False)
Example #19
0
    def create_pleblist_song_info(youtube_id):
        import isodate
        from apiclient.errors import HttpError

        if PleblistManager.youtube is None:
            log.warning('youtube was not initialized')
            return False

        try:
            video_response = PleblistManager.youtube.videos().list(
                    id=str(youtube_id),
                    part='snippet,contentDetails'
                    ).execute()
        except HttpError as e:
            log.exception('???')
            log.info(e.content)
            log.info(e.resp)
            log.info(e.uri)

        if len(video_response.get('items', [])) == 0:
            log.warning('Got no valid responses for {}'.format(youtube_id))
            return False

        video = video_response['items'][0]

        title = video['snippet']['title']
        duration = int(isodate.parse_duration(video['contentDetails']['duration']).total_seconds())
        default_thumbnail = video['snippet']['thumbnails']['default']['url']

        return PleblistSongInfo(
                youtube_id,
                title,
                duration,
                default_thumbnail,
                )
Example #20
0
	def post(self):
		url = self.get_argument('url')
		match = re.match('^https?://www.youtube.com/watch\?v=([^?&]*)', url)
		error = ''

		if match:
			id = match.group(1)

			if not filter(lambda x: x['id'] == id, queue):
				movie = api.videos().list(part = 'snippet,contentDetails', id = id).execute()

				if len(movie['items']):
					movie = movie['items'][0]
					title = movie['snippet']['title']
					duration = str(isodate.parse_duration(movie['contentDetails']['duration']))

					queue.append({'id': id, 'title': title, 'duration': duration,})
					broadcast()
				else:
					error = 'Not found'
			else:
				error = 'The movie is already in queue'
		else:
			error = 'Invalid URL'

		return self.render(os.path.join('templates', 'index.html'), error = error)
Example #21
0
 def populate(self, day, sleepless, begin, end, amount, alone, place):
     self.day = isodate.parse_date(day)
     self.alone = alone
     self.place = place
     self.sleepless = sleepless
     if self.sleepless:
         self.to_bed = None
         self.to_rise = None
         self.amount = isodate.parse_duration("PT0H0M")
     else:
         self.to_bed = isodate.parse_datetime(begin)
         self.to_rise = isodate.parse_datetime(end)
         if amount == "":
             self.amount = self.to_rise - self.to_bed
         else:
             self.amount = isodate.parse_duration(amount)
Example #22
0
	def connectionLost(self, reason):
		if environ.get("INSIDE_NOSETEST"):
			print "connectionLost: %d %s" % (self.response.code, self.response.phrase)
			print "Headers: %s" % repr(self.response.headers)
			print self.body

		if test_hook_exception is not None:
			self.finished.errback(VideoError("Test hook error", test_hook_exception))
			return

		try:
			response = json.loads(self.body)
			video_data = response["items"][0]

			if not video_data["status"]["embeddable"]:
				self.finished.errback(VideoError("Cannot add that video, embedding is disabled."))
			elif video_data["status"]["privacyStatus"] == "private":
				self.finished.errback(VideoError("Cannot add that video, it is private."))
			elif "regionRestriction" in video_data["contentDetails"]:
				self.finished.errback(VideoError("Cannot add that video, it has regional restrictions."))
			else:
				self.video_info.title = video_data["snippet"]["title"]
				self.video_info.duration = parse_duration(video_data["contentDetails"]["duration"]).seconds

				self.finished.callback(self.video_info)
		except (ValueError, KeyError, IndexError) as e:
			self.finished.errback(VideoError("Unable to find youtube video info.", e))
Example #23
0
def get_video_description(video_id):
    global time_last_request
    time_elapsed = time.time() - time_last_request
    if time_elapsed > 10:

        time_last_request = time.time()
    else:
        #return "This looks like a YouTube video. However, the YT api have been called too much, I'm sorry I won't be able to fetch details for you."
        return None
    json = requests.get(api_url.format(video_id, dev_key)).json()

    if json.get('error'):
        if json['error']['code'] == 403:
            return err_no_api
        else:
            return

    data = json['items']
    snippet = data[0]['snippet']
    statistics = data[0]['statistics']
    content_details = data[0]['contentDetails']

    out = '\x02{}\x02'.format(snippet['title'])

    if not content_details.get('duration'):
        return out

    length = isodate.parse_duration(content_details['duration'])
    out += ' - length \x02{}\x02'.format(timeformat.format_time(int(length.total_seconds()), simple=True))
    total_votes = float(statistics['likeCount']) + float(statistics['dislikeCount'])

    if total_votes != 0:
        # format
        likes = pluralize(int(statistics['likeCount']), "like")
        dislikes = pluralize(int(statistics['dislikeCount']), "dislike")

        percent = 100 * float(statistics['likeCount']) / total_votes
        likes = parse("$(dark_green)" + likes + "$(clear)")
        dislikes = parse("$(dark_red)" + dislikes + "$(clear)")
        out += ' - {}, {} (\x02{:.1f}\x02%)'.format(likes,
                                                    dislikes, percent)

    if 'viewCount' in statistics:
        views = int(statistics['viewCount'])
        out += ' - \x02{:,}\x02 view{}'.format(views, "s"[views == 1:])

    uploader = snippet['channelTitle']

    upload_time = time.strptime(snippet['publishedAt'], "%Y-%m-%dT%H:%M:%S.000Z")
    out += ' - \x02{}\x02 on \x02{}\x02'.format(uploader,
                                                time.strftime("%Y.%m.%d", upload_time))

    if 'contentRating' in content_details:
        out += ' - \x034NSFW\x02'

    # return re.sub(
    #		r'(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))+(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:\'".,<>?«»“”‘’]))',
    #		'[URL]', out)

    return out.replace("youtu", "you tu") #nup. No spam please
def update_matrix(filepath):
    with open(filepath, 'r') as filedata:
        for line in filedata:
            video = json.loads(line.rstrip())
            if video['insights']['dailyWatch'] == 'N':
                continue
            duration = isodate.parse_duration(video['contentDetails']['duration']).seconds
            # for videos shorter than 1 hour
            if duration < 3600:
                published_at = video['snippet']['publishedAt'][:10]
                if published_at[:4] == '2016':
                    start_date = video['insights']['startDate']
                    time_diff = (
                    datetime(*map(int, start_date.split('-'))) - datetime(*map(int, published_at.split('-')))).days
                    days = read_as_int_array(video['insights']['days']) + time_diff
                    views = read_as_int_array(video['insights']['dailyView'])
                    watches = read_as_float_array(video['insights']['dailyWatch'])
                    watch_percent = safe_div(watches * 60, views * duration)
                    for idx, age in enumerate(days):
                        # for videos before age 400 days
                        if 0 <= age < 400:
                            # age idx
                            age_idx = age / 10
                            # duration idx
                            duration_idx = duration / 60
                            if watch_percent[idx] <= 1:
                                wp_value = watch_percent[idx]
                            else:
                                wp_value = 1
                            if duration_idx not in age_duration_matrix[age_idx].keys():
                                age_duration_matrix[age_idx][duration_idx] = []
                            age_duration_matrix[age_idx][duration_idx].append(wp_value)
Example #25
0
    def create_or_update(self, channel, data):
        # Fetch video details, if it exists.
        youtubeid = data['id']
        video = Video.objects.filter(youtubeid=youtubeid).first()
        duration = parse_duration(data['contentDetails']['duration'])

        if not video:
            # Create the video.
            video = Video.objects.create(
                youtubeid=youtubeid,
                uploader=channel,
                title=data['snippet']['title'],
                category_id=data['snippet']['categoryId'],
                description=data['snippet']['description'],
                duration=duration.total_seconds(),
                view_count=data['statistics']['viewCount'],
                favorite_count=data['statistics']['favoriteCount'],
                comment_count=data['statistics']['commentCount'],
                uploaded=dateutil.parser.parse(data['snippet']['publishedAt']),
                updated=dateutil.parser.parse(data['snippet']['publishedAt']),
            )
        else:
            video.title = data['snippet']['title']
            video.description = data['snippet']['description']
            video.view_count = data['statistics']['viewCount']
            video.favorite_count = data['statistics']['favoriteCount']
            video.comment_count = data['statistics']['commentCount']
            video.updated = dateutil.parser.parse(
                data['snippet']['publishedAt'])
            video.save()

        return video
Example #26
0
    def _set_properties(self, properties, overwrite=False):
        resource_attrs = self.get_resource_attributes()
        for name, value in properties.items():
            name = self.from_camel_case(name)

            # if this attribute is already set, don't overwrite it
            if not overwrite:
                if name in self.writable_attrs and name in self.__dict__:
                    continue

            if name in resource_attrs:
                value = self._wrap_resource_attr(resource_attrs[name],
                    value)
                if hasattr(resource_attrs[name], '_set_parent_and_name'):
                    value._set_parent_and_name(self, name)
            elif isinstance(value, dict) and 'href' in value:
                # No idea what kind of resource it is, but let's load it
                # it anyways.
                value = Resource(self._client, href=value['href'])
            elif name in ['created_at', 'modified_at']:
                value = parse(value)
            elif name in self.timedelta_attrs:
                value = parse_duration(value)

            self.__dict__[name] = value
Example #27
0
def generate_user(config, user):
    url = "https://www.soundcloud.com/" + user

    data = rssit.util.download(url)

    soup = bs4.BeautifulSoup(data, 'lxml')

    author = html.unescape(soup.find("meta", attrs={"property": "og:title"})["content"])

    if config["author_username"]:
        author = user

    description = html.unescape(soup.find("p", attrs={"itemprop": "description"}).text).strip()
    if len(description) <= 0:
        description = "%s's soundcloud" % user

    feed = {
        "title": author,
        "description": description,
        "url": url,
        "author": user,
        "entries": []
    }

    tracks = soup.findAll("article", attrs={"itemprop": "track"})
    for track in tracks:
        tracka = track.find("a", attrs={"itemprop": "url"})
        trackname = tracka.text
        trackurl = urllib.parse.urljoin(url, tracka["href"])
        date = parse(track.find("time").text)

        title = trackname
        duration_delta = isodate.parse_duration(track.find("meta", attrs={"itemprop": "duration"})["content"])
        duration_seconds = duration_delta.total_seconds()
        duration_text = "[%s:%s:%s]" % (
            str(int(duration_seconds / 3600)).zfill(2),
            str(int((duration_seconds % 3600) / 60)).zfill(2),
            str(int(duration_seconds % 60)).zfill(2)
        )

        content = "<p>%s <a href='%s'>%s</a> by <a href='%s'>%s</a></p>" % (
            duration_text,

            trackurl,
            trackname,

            url,
            author
        )

        feed["entries"].append({
            "url": trackurl,
            "title": title,
            "content": content,
            "author": user,
            "date": date,
        })

    return ("feed", feed)
Example #28
0
def parse_isoduration(iso_duration):
    """parse the given iso8601 duration string into a python timedelta object"""
    delta = None

    try:
        delta = isodate.parse_duration(iso_duration)
    except Exception, e:
        log.msg(e.message, level=log.WARNING)
Example #29
0
 def extract_signals(self):
     """Extract eiEventSignals from the received eiEvent, populating the temporary EiEvent."""
     if not self.ei_event.eiEventSignals:
         raise OpenADRInterfaceException('At least one event signal is required.', OADR_BAD_SIGNAL)
     if not self.ei_event.eiEventSignals.eiEventSignal:
         raise OpenADRInterfaceException('At least one event signal is required.', OADR_BAD_SIGNAL)
     signals_dict = {s.signalID: self.extract_signal(s) for s in self.ei_event.eiEventSignals.eiEventSignal}
     self.event.signals = json.dumps(signals_dict)
     # Sum of all signal interval durations must equal the event duration.
     signals_duration = timedelta(seconds=0)
     for signal in self.ei_event.eiEventSignals.eiEventSignal:
         for interval in signal.intervals.interval:
             signals_duration += isodate.parse_duration(interval.duration.duration)
     event_duration = isodate.parse_duration(self.event.duration)
     if signals_duration != event_duration:
         err_msg = 'Total signal interval durations {} != event duration {}'.format(signals_duration, event_duration)
         raise OpenADRException(err_msg, OADR_BAD_SIGNAL)
 def parse_isoduration(self, datetime):
     try:
         return int(parse_duration(datetime).total_seconds())
     except:
         try:
             return int(datetime)
         except:
             return None
Example #31
0
#!/usr/bin/env python3

import sys
from isodate import parse_duration

duration = parse_duration(sys.stdin.read())

hours, remainder = divmod(duration.total_seconds(), 3600)
minutes, seconds = divmod(remainder, 60)
if hours > 0:
    print('%dh %dm' % (hours, minutes))
elif minutes > 0:
    print('%dm' % (minutes))
else:
    print('%ds' % (seconds))
Example #32
0
 def test_xmlvalue(self):
     instance = builtins.Duration()
     value = isodate.parse_duration('P0Y1347M0D')
     assert instance.xmlvalue(value) == 'P1347M'
Example #33
0
def parse_video_duration(duration):
    """ Returns a datetime object for a video duration in ISO8601 format
    
        duration: the length of the video
    """
    return parse_duration(duration)
 def is_iso8601_duration(self, to_validate: str) -> bool:
     try:
         return bool(isodate.parse_duration(to_validate))
     except Exception:
         return False
Example #35
0
 def pythonvalue(self, value):
     return isodate.parse_duration(value)
Example #36
0
    def fetch_youtube(self, address):
        username = None
        channel_id = None
        list_id = None

        if 'gdata.youtube.com' in address:
            try:
                username_groups = re.search(
                    'gdata.youtube.com/feeds/\w+/users/(\w+)/', address)
                if not username_groups:
                    return
                username = username_groups.group(1)
            except IndexError:
                return
        elif 'youtube.com/feeds/videos.xml?user='******'user'][0]
            except IndexError:
                return
        elif 'youtube.com/feeds/videos.xml?channel_id=' in address:
            try:
                channel_id = urlparse.parse_qs(
                    urlparse.urlparse(address).query)['channel_id'][0]
            except (IndexError, KeyError):
                return
        elif 'youtube.com/playlist' in address:
            try:
                list_id = urlparse.parse_qs(
                    urlparse.urlparse(address).query)['list'][0]
            except IndexError:
                return
        elif 'youtube.com/feeds/videos.xml?playlist_id' in address:
            try:
                list_id = urlparse.parse_qs(
                    urlparse.urlparse(address).query)['playlist_id'][0]
            except IndexError:
                return

        if channel_id:
            video_ids_xml = requests.get(
                "https://www.youtube.com/feeds/videos.xml?channel_id=%s" %
                channel_id,
                verify=False)
            channel_json = requests.get(
                "https://www.googleapis.com/youtube/v3/channels?part=snippet&id=%s&key=%s"
                % (channel_id, settings.YOUTUBE_API_KEY))
            channel = json.decode(channel_json.content)
            try:
                username = channel['items'][0]['snippet']['title']
                description = channel['items'][0]['snippet']['description']
            except (IndexError, KeyError):
                return
        elif list_id:
            playlist_json = requests.get(
                "https://www.googleapis.com/youtube/v3/playlists?part=snippet&id=%s&key=%s"
                % (list_id, settings.YOUTUBE_API_KEY))
            playlist = json.decode(playlist_json.content)
            try:
                username = playlist['items'][0]['snippet']['title']
                description = playlist['items'][0]['snippet']['description']
            except (IndexError, KeyError):
                return
            channel_url = "https://www.youtube.com/playlist?list=%s" % list_id
        elif username:
            video_ids_xml = requests.get(
                "https://www.youtube.com/feeds/videos.xml?user=%s" % username,
                verify=False)
            description = "YouTube videos uploaded by %s" % username
        else:
            return

        if list_id:
            playlist_json = requests.get(
                "https://www.googleapis.com/youtube/v3/playlistItems?part=snippet&playlistId=%s&key=%s"
                % (list_id, settings.YOUTUBE_API_KEY))
            playlist = json.decode(playlist_json.content)
            try:
                video_ids = [
                    video['snippet']['resourceId']['videoId']
                    for video in playlist['items']
                ]
            except (IndexError, KeyError):
                return
        else:
            if video_ids_xml.status_code != 200:
                return
            video_ids_soup = BeautifulSoup(video_ids_xml.content)
            channel_url = video_ids_soup.find('author').find('uri').getText()
            video_ids = []
            for video_id in video_ids_soup.findAll('yt:videoid'):
                video_ids.append(video_id.getText())

        videos_json = requests.get(
            "https://www.googleapis.com/youtube/v3/videos?part=contentDetails%%2Csnippet&id=%s&key=%s"
            % (','.join(video_ids), settings.YOUTUBE_API_KEY))
        videos = json.decode(videos_json.content)
        if 'error' in videos:
            logging.debug(" ***> ~FRYoutube returned an error: ~FM~SB%s" %
                          (videos))
            return

        data = {}
        data['title'] = ("%s's YouTube Videos" %
                         username if 'Uploads' not in username else username)
        data['link'] = channel_url
        data['description'] = description
        data['lastBuildDate'] = datetime.datetime.utcnow()
        data[
            'generator'] = 'NewsBlur YouTube API v3 Decrapifier - %s' % settings.NEWSBLUR_URL
        data['docs'] = None
        data['feed_url'] = address
        rss = feedgenerator.Atom1Feed(**data)

        for video in videos['items']:
            thumbnail = video['snippet']['thumbnails'].get('maxres')
            if not thumbnail:
                thumbnail = video['snippet']['thumbnails'].get('high')
            if not thumbnail:
                thumbnail = video['snippet']['thumbnails'].get('medium')
            duration_sec = isodate.parse_duration(
                video['contentDetails']['duration']).seconds
            if duration_sec >= 3600:
                hours = (duration_sec / 3600)
                minutes = (duration_sec - (hours * 3600)) / 60
                seconds = duration_sec - (hours * 3600) - (minutes * 60)
                duration = "%s:%s:%s" % (hours, '{0:02d}'.format(minutes),
                                         '{0:02d}'.format(seconds))
            else:
                minutes = duration_sec / 60
                seconds = duration_sec - (minutes * 60)
                duration = "%s:%s" % ('{0:02d}'.format(minutes),
                                      '{0:02d}'.format(seconds))
            content = """<div class="NB-youtube-player"><iframe allowfullscreen="true" src="%s?iv_load_policy=3"></iframe></div>
                         <div class="NB-youtube-stats"><small>
                             <b>From:</b> <a href="%s">%s</a><br />
                             <b>Duration:</b> %s<br />
                         </small></div><hr>
                         <div class="NB-youtube-description">%s</div>
                         <img src="%s" style="display:none" />""" % (
                ("https://www.youtube.com/embed/" + video['id']),
                channel_url,
                username,
                duration,
                linkify(linebreaks(video['snippet']['description'])),
                thumbnail['url'] if thumbnail else "",
            )

            link = "http://www.youtube.com/watch?v=%s" % video['id']
            story_data = {
                'title': video['snippet']['title'],
                'link': link,
                'description': content,
                'author_name': username,
                'categories': [],
                'unique_id': "tag:youtube.com,2008:video:%s" % video['id'],
                'pubdate':
                dateutil.parser.parse(video['snippet']['publishedAt']),
            }
            rss.add_item(**story_data)

        return rss.writeString('utf-8')
Example #37
0
def youtime(text, reply):
    """<query> - Gets the total run time of the first YouTube search result for <query>."""
    if not dev_key:
        return "This command requires a Google Developers Console API key."

    try:
        request = requests.get(search_api_url,
                               params={
                                   "q": text,
                                   "key": dev_key,
                                   "type": "video"
                               })
        request.raise_for_status()
    except Exception:
        reply("Error performing search.")
        raise

    json = requests.get(search_api_url,
                        params={
                            "q": text,
                            "key": dev_key,
                            "type": "video"
                        }).json()

    if json.get('error'):
        if json['error']['code'] == 403:
            return err_no_api
        else:
            return 'Error performing search.'

    if json['pageInfo']['totalResults'] == 0:
        return 'No results found.'

    video_id = json['items'][0]['id']['videoId']

    request = requests.get(api_url.format(video_id, dev_key))
    request.raise_for_status()

    json = request.json()

    if json.get('error'):
        return
    data = json['items']
    snippet = data[0]['snippet']
    content_details = data[0]['contentDetails']
    statistics = data[0]['statistics']

    if not content_details.get('duration'):
        return

    length = isodate.parse_duration(content_details['duration'])
    l_sec = int(length.total_seconds())
    views = int(statistics['viewCount'])
    total = int(l_sec * views)

    length_text = timeformat.format_time(l_sec, simple=True)
    total_text = timeformat.format_time(total, accuracy=8)

    return 'The video \x02{}\x02 has a length of {} and has been viewed {:,} times for ' \
           'a total run time of {}!'.format(snippet['title'], length_text, views,
                                            total_text)
Example #38
0
    def get(self, key: str, convert=True) -> typing.Any:
        key = key.lower()
        if key not in self.all_keys:
            raise InvalidConfigError(f'Configuration "{key}" is invalid.')
        if key not in self._cache:
            self._cache[key] = deepcopy(self.defaults[key])
        value = self._cache[key]

        if not convert:
            return value

        if key in self.colors:
            try:
                return int(value.lstrip("#"), base=16)
            except ValueError:
                logger.error("Invalid %s provided.", key)
            value = int(self.remove(key).lstrip("#"), base=16)

        elif key in self.time_deltas:
            if not isinstance(value, isodate.Duration):
                try:
                    value = isodate.parse_duration(value)
                except isodate.ISO8601Error:
                    logger.warning(
                        "The {account} age limit needs to be a "
                        'ISO-8601 duration formatted duration, not "%s".',
                        value,
                    )
                    value = self.remove(key)

        elif key in self.booleans:
            try:
                value = strtobool(value)
            except ValueError:
                value = self.remove(key)

        elif key in self.enums:
            if value is None:
                return None
            try:
                value = self.enums[key](value)
            except ValueError:
                logger.warning("Invalid %s %s.", key, value)
                value = self.remove(key)

        elif key in self.force_str:
            # Temporary: as we saved in int previously, leading to int32 overflow,
            #            this is transitioning IDs to strings
            new_value = {}
            changed = False
            for k, v in value.items():
                new_v = v
                if isinstance(v, list):
                    new_v = []
                    for n in v:
                        if n != -1 and not isinstance(n, str):
                            changed = True
                            n = str(n)
                        new_v.append(n)
                new_value[k] = new_v

            if changed:
                # transition the database as well
                self.set(key, new_value)

            value = new_value

        return value
Example #39
0
check = simulation_api.check_simulation_setup(project_id, simulation_id)
warnings = [entry for entry in check.entries if entry.severity == 'WARNING']
print(f'Simulation check warnings: {warnings}')
errors = [entry for entry in check.entries if entry.severity == 'ERROR']
if errors:
    raise Exception('Simulation check failed', check)

# Estimate simulation
try:
    estimation = simulation_api.estimate_simulation_setup(
        project_id, simulation_id)
    print(f'Simulation estimation: {estimation}')
    too_expensive = estimation.compute_resource.value > 10.0
    if too_expensive:
        raise Exception('Too expensive', estimation)
    max_runtime = isodate.parse_duration(
        estimation.duration.interval_max).total_seconds()
    max_runtime = max_runtime + 600  # 10 min buffer
except ApiException as ae:
    if ae.status == 422:
        max_runtime = 36000
        print(
            f'Simulation estimation not available, assuming max runtime of {max_runtime} seconds'
        )
    else:
        raise ae

# Create simulation run
simulation_run = SimulationRun(name='Run 1')
simulation_run = simulation_run_api.create_simulation_run(
    project_id, simulation_id, simulation_run)
run_id = simulation_run.run_id
Example #40
0
    async def process_modmail(self, message):
        """Processes messages sent to the bot."""
        sent_emoji, blocked_emoji = await self.retrieve_emoji()

        account_age = self.config.get('account_age')
        if account_age is None:
            account_age = isodate.duration.Duration()
        else:
            try:
                account_age = isodate.parse_duration(account_age)
            except isodate.ISO8601Error:
                logger.warning(
                    'The account age limit needs to be a '
                    'ISO-8601 duration formatted duration string '
                    f'greater than 0 days, not "%s".', str(account_age))
                del self.config.cache['account_age']
                await self.config.update()
                account_age = isodate.duration.Duration()

        reason = self.blocked_users.get(str(message.author.id))
        if reason is None:
            reason = ''
        try:
            min_account_age = message.author.created_at + account_age
        except ValueError as e:
            logger.warning(e.args[0])
            del self.config.cache['account_age']
            await self.config.update()
            min_account_age = message.author.created_at

        if min_account_age > datetime.utcnow():
            # user account has not reached the required time
            reaction = blocked_emoji
            changed = False
            delta = human_timedelta(min_account_age)

            if str(message.author.id) not in self.blocked_users:
                new_reason = f'System Message: New Account. Required to wait for {delta}.'
                self.config.blocked[str(message.author.id)] = new_reason
                await self.config.update()
                changed = True

            if reason.startswith('System Message: New Account.') or changed:
                await message.channel.send(embed=discord.Embed(
                    title='Message not sent!',
                    description=f'Your must wait for {delta} '
                    f'before you can contact {self.user.mention}.',
                    color=discord.Color.red()))

        elif str(message.author.id) in self.blocked_users:
            reaction = blocked_emoji
            if reason.startswith('System Message: New Account.'):
                # Met the age limit already
                reaction = sent_emoji
                del self.config.blocked[str(message.author.id)]
                await self.config.update()
            else:
                end_time = re.search(r'%(.+?)%$', reason)
                if end_time is not None:
                    after = (datetime.fromisoformat(end_time.group(1)) -
                             datetime.utcnow()).total_seconds()
                    if after <= 0:
                        # No longer blocked
                        reaction = sent_emoji
                        del self.config.blocked[str(message.author.id)]
                        await self.config.update()
        else:
            reaction = sent_emoji

        if reaction != 'disable':
            try:
                await message.add_reaction(reaction)
            except (discord.HTTPException, discord.InvalidArgument):
                pass

        if str(message.author.id) not in self.blocked_users:
            thread = await self.threads.find_or_create(message.author)
            await thread.send(message)
Example #41
0
    def __init__(self, instance_name, time, engine, update_callback, context,
                 params):
        super(Disruptive, self).__init__(instance_name, time, engine,
                                         update_callback, context, params)
        self.sensor_type = params["disruptive"].get("sensor_type", None)
        self.site_prefix = params["disruptive"].get("site_prefix", "Fridge ")
        if params["disruptive"].get("site_prefix", None) is not None:
            self.set_property(
                "site", self.site_prefix + str(Disruptive.site_count)
            )  # This mechanism is superceded now by Models and should be removed
        if self.sensor_type is None:
            if not Disruptive.odd_site:  # Alternate type
                self.sensor_type = "temperature"
            else:
                self.sensor_type = "proximity"
            # self.sensor_type = weighted_choice([("ccon",5), ("temperature",38), ("proximity",33), ("touch",2)])
        self.set_property(
            "sensorType",
            self.sensor_type)  # DT's official property for sensor type
        self.set_property(
            "device_type", "DT_" +
            self.sensor_type)  # In DP demos we tend to use this property

        if (self.sensor_type != "ccon"):
            engine.register_event_in(BATTERY_INTERVAL, self.tick_battery, self,
                                     self)
            if (params["disruptive"].get("send_network_status", False)):
                engine.register_event_in(NETWORK_INTERVAL, self.tick_network,
                                         self, self)
        if (self.sensor_type == "ccon"):
            engine.register_event_in(CELLULAR_INTERVAL, self.tick_cellular,
                                     self, self)
        if (self.sensor_type == "temperature"):
            self.nominal_temperature = params["disruptive"].get(
                "nominal_temp", DEFAULT_NOMINAL_TEMP_C)
            if isinstance(self.nominal_temperature, list):
                choice = random.randint(0, len(self.nominal_temperature) - 1)
                self.nominal_temperature = self.nominal_temperature[choice]
            if "nominal_temp" in params["disruptive"]:
                self.set_property("nominal_temp", self.nominal_temperature)
            self.set_temperature(self.nominal_temperature)
            self.temperature_deviation = params["disruptive"].get(
                "nominal_temp_deviation", DEFAULT_TEMP_DEVIATION)
            if isinstance(self.temperature_deviation, list):
                self.temperature_deviation = self.temperature_deviation[
                    choice]  # Matches temp choice above
            if "site_type" in params["disruptive"]:
                site_type = params["disruptive"]["site_type"][choice]
                self.set_property("site_type", site_type)
            engine.register_event_in(TEMPERATURE_INTERVAL,
                                     self.tick_temperature, self, self)
            self.having_cooling_failure = False
        if (self.sensor_type == "proximity"):
            self.set_property("objectPresent", "PRESENT")  # Door starts closed
            self.schedule_next_presence_event()

        self.cooling_mtbf = params["disruptive"].get("cooling_mtbf", None)
        if self.cooling_mtbf:
            self.cooling_mtbf = isodate.parse_duration(
                self.cooling_mtbf).total_seconds()
        self.cooling_ttf = isodate.parse_duration(params["disruptive"].get(
            "cooling_TTF", "P3D")).total_seconds()

        Disruptive.odd_site = not Disruptive.odd_site
        if not Disruptive.odd_site:
            Disruptive.site_count += 1
Example #42
0
 def objectify(cls, v, _):
     return parse_duration(v)
Example #43
0
def duration_format_checker(instance):
    if isinstance(instance, (datetime.timedelta, int, float)):
        return True
    if isinstance(instance, six.string_types):
        return isodate.parse_duration(instance)
Example #44
0
 def get_duration_seconds(self):
     duration = parse_duration(self.get_duration())
     return duration.total_seconds()
Example #45
0
def youtube_list(type, key, filter, keyword):

    client = get_authenticated_service()
    lstVideo = []
    lstComment = []
    nextPageToken = ''
    number = 0

    while nextPageToken is not None:
        if type == "channel_id":
            channel_response = client.search().list(
                channelId=key,
                part='id,snippet',
                order='viewCount',
                maxResults=50,
                pageToken=nextPageToken,
            ).execute()
        elif type == "channel_name":
            channel_response = client.channels().list(
                forUsername=key,
                part='id,snippet',
                order='viewCount',
                maxResults=50,
                pageToken=nextPageToken,
            ).execute()
        elif type == "keyword":
            channel_response = client.search().list(
                q=key,
                part='id,snippet',
                order='viewCount',
                maxResults=50,
                pageToken=nextPageToken,
            ).execute()
        else:
            print(
                "ERROR -- Please Enter argument Channel ID or Search Keyword")

        # print(json.dumps(channel_response,indent=2))

        if (channel_response['items']):

            if 'nextPageToken' not in channel_response:
                nextPageToken = None
            else:
                nextPageToken = channel_response['nextPageToken']

            for channel_result in channel_response.get('items', []):

                # print(channel_result)
                if (channel_result['id']['kind'] == 'youtube#video'):
                    number += 1
                    videoTitle = channel_result['snippet']['title']
                    videoTitle = videoTitle.replace("&quot;", "")
                    regexIgnore = re.compile(pattern=r'(?:' + keyword + ')')
                    videoTitle_keyword = regexIgnore.findall(videoTitle)

                    if (filter == "include" and videoTitle_keyword) or (
                            filter == "exclude" and not videoTitle_keyword):
                        # if 'ทาโร' in videoTitle:
                        videoId = channel_result['id']['videoId']
                        print(videoTitle)
                        # print(videoId)
                        videoList = videos_list_by_id(
                            client,
                            part='snippet,contentDetails,statistics',
                            id=videoId,
                            maxResults=50)
                        publishedTime = iso8601.parse_date(
                            channel_result['snippet']['publishedAt'])
                        publishedTime = str(publishedTime)
                        publishedTime = str(
                            datetime.strptime(publishedTime,
                                              "%Y-%m-%d %H:%M:%S+00:00"))
                        channelTitle = videoList['items'][0]['snippet'][
                            'channelTitle']
                        duration = isodate.parse_duration(
                            videoList['items'][0]['contentDetails']
                            ['duration'])

                        snippet = videoList['items'][0]['snippet']
                        if 'tags' in snippet:
                            tags = snippet['tags']
                            tags = '|'.join(tags)
                        else:
                            tags = 'NONE'

                        if 'dislikeCount' in videoList['items'][0][
                                'statistics']:
                            dislikeCount = videoList['items'][0]['statistics'][
                                'dislikeCount']
                        else:
                            dislikeCount = 0

                        if 'likeCount' in videoList['items'][0]['statistics']:
                            likeCount = videoList['items'][0]['statistics'][
                                'likeCount']
                        else:
                            likeCount = 0

                        if 'commentCount' in videoList['items'][0][
                                'statistics']:
                            commentCount = videoList['items'][0]['statistics'][
                                'commentCount']
                        else:
                            commentCount = 0

                        if 'favoriteCount' in videoList['items'][0][
                                'statistics']:
                            favoriteCount = videoList['items'][0][
                                'statistics']['favoriteCount']
                        else:
                            favoriteCount = 0

                        results_json_video = {
                            'ID':
                            videoId,
                            'URL':
                            'https://www.youtube.com/watch?v=' + videoId,
                            'message':
                            videoTitle,
                            'post_time':
                            publishedTime,
                            'definition':
                            videoList['items'][0]['contentDetails']
                            ['definition'],
                            'duration':
                            duration,
                            'liveBroadcastContent':
                            videoList['items'][0]['snippet']
                            ['liveBroadcastContent'],
                            'channel_name':
                            channelTitle,
                            'categoryId':
                            videoList['items'][0]['snippet']['categoryId'],
                            'viewCount':
                            videoList['items'][0]['statistics']['viewCount'],
                            'commentCount':
                            commentCount,
                            'dislikeCount':
                            dislikeCount,
                            'favoriteCount':
                            favoriteCount,
                            'likeCount':
                            likeCount,
                            'tags':
                            tags,
                            'data_from':
                            'Youtube',
                            'get_by':
                            'Channel'
                        }
                        lstVideo.append(results_json_video)
                        print(commentCount)
                        # if commentCount == '0':
                        #     print("IF")
                        if commentCount != '0':
                            # print("ELSE")
                            commentNextPageToken = ''
                            numberComment = 0
                            while commentNextPageToken is not None:
                                comment = comment_threads_list_by_video_id(
                                    client,
                                    part='snippet,replies',
                                    videoId=videoId,
                                    pageToken=commentNextPageToken)
                                if (comment['items']):

                                    if 'nextPageToken' not in comment:
                                        commentNextPageToken = None
                                    else:
                                        commentNextPageToken = comment[
                                            'nextPageToken']

                                    for comment_result in comment.get(
                                            'items', []):
                                        # print(json.dumps(comment_result,indent=2))
                                        commentText = comment_result[
                                            'snippet']['topLevelComment'][
                                                'snippet']['textDisplay']
                                        # commentText_with_KW = regEXComment(commentText)
                                        # if commentText_with_KW:
                                        commentId = comment_result['snippet'][
                                            'topLevelComment']['id']
                                        numberComment += 1
                                        commentText = remove_tags(
                                            str(commentText))
                                        updatedAt = iso8601.parse_date(
                                            comment_result['snippet']
                                            ['topLevelComment']['snippet']
                                            ['updatedAt'])
                                        updatedAt = str(updatedAt)
                                        updatedAt = str(
                                            datetime.strptime(
                                                updatedAt,
                                                "%Y-%m-%d %H:%M:%S+00:00"))
                                        if (commentText):
                                            results_json = {
                                                'ID':
                                                videoId,
                                                'URL':
                                                'https://www.youtube.com/watch?v='
                                                + videoId,
                                                'message':
                                                commentText,
                                                'message_id':
                                                commentId,
                                                'authorDisplayName':
                                                comment_result['snippet']
                                                ['topLevelComment']['snippet']
                                                ['authorDisplayName'],
                                                'authorChannelUrl':
                                                comment_result['snippet']
                                                ['topLevelComment']['snippet']
                                                ['authorChannelUrl'],
                                                'likeCount':
                                                comment_result['snippet']
                                                ['topLevelComment']['snippet']
                                                ['likeCount'],
                                                'comment_time':
                                                updatedAt,
                                                'data_from':
                                                'Youtube',
                                                'get_by':
                                                'Channel'
                                            }
                                            lstComment.append(results_json)

                                        else:
                                            break

                                else:
                                    break

        else:
            break
    # print(json.dumps(lstVideo,indent=2))
    # return [channelTitle,lstVideo,typeOption]
    return [channelTitle, lstVideo, lstComment]
Example #46
0
    async def process_modmail(self, message: discord.Message) -> None:
        """Processes messages sent to the bot."""
        sent_emoji, blocked_emoji = await self.retrieve_emoji()
        now = datetime.utcnow()

        account_age = self.config.get("account_age")
        guild_age = self.config.get("guild_age")
        if account_age is None:
            account_age = isodate.duration.Duration()
        else:
            try:
                account_age = isodate.parse_duration(account_age)
            except isodate.ISO8601Error:
                logger.warning(
                    "The account age limit needs to be a "
                    "ISO-8601 duration formatted duration string "
                    'greater than 0 days, not "%s".',
                    str(account_age),
                )
                del self.config.cache["account_age"]
                await self.config.update()
                account_age = isodate.duration.Duration()

        if guild_age is None:
            guild_age = isodate.duration.Duration()
        else:
            try:
                guild_age = isodate.parse_duration(guild_age)
            except isodate.ISO8601Error:
                logger.warning(
                    "The guild join age limit needs to be a "
                    "ISO-8601 duration formatted duration string "
                    'greater than 0 days, not "%s".',
                    str(guild_age),
                )
                del self.config.cache["guild_age"]
                await self.config.update()
                guild_age = isodate.duration.Duration()

        reason = self.blocked_users.get(str(message.author.id))
        if reason is None:
            reason = ""

        try:
            min_account_age = message.author.created_at + account_age
        except ValueError as exc:
            logger.warning(exc.args[0])
            del self.config.cache["account_age"]
            await self.config.update()
            min_account_age = now

        try:
            member = self.guild.get_member(message.author.id)
            if member:
                min_guild_age = member.joined_at + guild_age
            else:
                min_guild_age = now
        except ValueError as exc:
            logger.warning(exc.args[0])
            del self.config.cache["guild_age"]
            await self.config.update()
            min_guild_age = now

        if min_account_age > now:
            # User account has not reached the required time
            reaction = blocked_emoji
            changed = False
            delta = human_timedelta(min_account_age)

            if str(message.author.id) not in self.blocked_users:
                new_reason = (
                    f"System Message: New Account. Required to wait for {delta}."
                )
                self.config.blocked[str(message.author.id)] = new_reason
                await self.config.update()
                changed = True

            if reason.startswith("System Message: New Account.") or changed:
                await message.channel.send(embed=discord.Embed(
                    title="Message not sent!",
                    description=f"Your must wait for {delta} "
                    f"before you can contact {self.user.mention}.",
                    color=discord.Color.red(),
                ))

        elif min_guild_age > now:
            # User has not stayed in the guild for long enough
            reaction = blocked_emoji
            changed = False
            delta = human_timedelta(min_guild_age)

            if str(message.author.id) not in self.blocked_users:
                new_reason = (
                    f"System Message: Recently Joined. Required to wait for {delta}."
                )
                self.config.blocked[str(message.author.id)] = new_reason
                await self.config.update()
                changed = True

            if reason.startswith(
                    "System Message: Recently Joined.") or changed:
                await message.channel.send(embed=discord.Embed(
                    title="Message not sent!",
                    description=f"Your must wait for {delta} "
                    f"before you can contact {self.user.mention}.",
                    color=discord.Color.red(),
                ))

        elif str(message.author.id) in self.blocked_users:
            reaction = blocked_emoji
            if reason.startswith(
                    "System Message: New Account.") or reason.startswith(
                        "System Message: Recently Joined."):
                # Met the age limit already
                reaction = sent_emoji
                del self.config.blocked[str(message.author.id)]
                await self.config.update()
            else:
                end_time = re.search(r"%(.+?)%$", reason)
                if end_time is not None:
                    after = (datetime.fromisoformat(end_time.group(1)) -
                             now).total_seconds()
                    if after <= 0:
                        # No longer blocked
                        reaction = sent_emoji
                        del self.config.blocked[str(message.author.id)]
                        await self.config.update()
        else:
            reaction = sent_emoji

        if reaction != "disable":
            try:
                await message.add_reaction(reaction)
            except (discord.HTTPException, discord.InvalidArgument):
                pass

        if str(message.author.id) not in self.blocked_users:
            thread = await self.threads.find_or_create(message.author)
            await thread.send(message)
Example #47
0
    def check_schedule(self):
        msgs = []
        schedule = self.get_schedule()

        if schedule is not None:
            if not CronSlices.is_valid(schedule):
                try:
                    repeat, start_time, interval = schedule.split(
                        "/")  # the parts have separate validators
                except ValueError:
                    return (
                        False,
                        ('The specified schedule "%s" is neither a valid cron schedule nor a valid'
                         " ISO 8601 schedule" % schedule),
                    )

                # an empty start time is not valid ISO8601 but Chronos accepts it: '' == current time
                if start_time == "":
                    msgs.append(
                        'The specified schedule "%s" does not contain a start time'
                        % schedule)
                else:
                    # Check if start time contains time zone information
                    try:
                        dt = isodate.parse_datetime(start_time)
                        if not hasattr(dt, "tzinfo"):
                            msgs.append(
                                'The specified start time "%s" must contain a time zone'
                                % start_time)
                    except isodate.ISO8601Error as exc:
                        msgs.append(
                            'The specified start time "%s" in schedule "%s" does '
                            "not conform to the ISO 8601 format:\n%s" %
                            (start_time, schedule, str(exc)))

                parsed_interval = None
                try:
                    # 'interval' and 'duration' are interchangeable terms
                    parsed_interval = isodate.parse_duration(interval)
                except isodate.ISO8601Error:
                    msgs.append('The specified interval "%s" in schedule "%s" '
                                "does not conform to the ISO 8601 format." %
                                (interval, schedule))

                # don't allow schedules more frequent than every minute we have
                # to be careful here, since the isodate library returns
                # different datatypes according to whether there is a
                # yearly/monthly period (and if that year or month period is
                # 0).  unfortunately, the isodate library *is* okay with you
                # specifying fractional and negative periods. Chronos's parser
                # will barf at a fractional period, but be okay with a negative
                # one, so if someone does try to do something like "R1//P0.01M"
                # then the API request to upload the job will fail.  TODO:
                # detect when someone is trying to add a fractional period?
                if (parsed_interval
                        and isinstance(parsed_interval, datetime.timedelta)
                        and parsed_interval < datetime.timedelta(seconds=60)):
                    msgs.append(
                        'Unsupported interval "%s": jobs must be run at an interval of > 60 seconds'
                        % interval)

                if not self._check_schedule_repeat_helper(repeat):
                    msgs.append('The specified repeat "%s" in schedule "%s" '
                                "does not conform to the ISO 8601 format." %
                                (repeat, schedule))

        return len(msgs) == 0, "\n".join(msgs)
Example #48
0
def getDurationInSecs(time):
  duration = isodate.parse_duration(time)
  return int(duration.total_seconds())
Example #49
0
 def duration_from_period_and_units(period, retention_period_unit):
     iso8601_str = "P{}{}".format(period,
                                  retention_period_unit.iso8601_symbol)
     duration = isodate.parse_duration(iso8601_str)
     return duration
Example #50
0
 def duration(duration):
     return parse_duration(duration)
Example #51
0
    def _get_initial_info(self, video_id, params):
        info = {}
        max_attempts = params.get('max_attempts')
        retry_timeout = params.get('retry_timeout')

        # TODO remove duplication - many similar methods
        json_data = self._attempt_fb_retrieve(
            self._VIDEO_PAGE_TAHOE_TEMPLATE.format(video_id),
            max_attempts,
            retry_timeout,
            True,
            headers=self._FB_HEADERS,
            data=self.data)
        # print(json_data)

        markup = multi_get(json_data, 'payload', 'video', 'markup', '__html')
        video_markup = ET.fromstring(markup)
        tags = [
            x.text for x in video_markup.findall('.//span[@class="_50f7"]')
        ]
        if len(tags) >= 2:
            info['title'] = tags[0]
            info['username'] = tags[1]
        else:
            video_page_url = self._VIDEO_URL_FORMAT.format(video_id)
            for attempt_number in attempts(max_attempts):
                try:
                    html = self._session_get(video_page_url).text
                    match = get_title_of_webpage(html)
                    if match:
                        title_info = match.split(' - ', 1)
                        if len(title_info) == 2:
                            info['username'] = title_info[0]
                            info['title'] = title_info[1]
                    break
                except RequestException as e:
                    self.retry(attempt_number, max_attempts, e, retry_timeout)

        instances = multi_get(json_data, 'jsmods', 'instances')

        video_data = {}
        for item in instances:
            if try_get(item, lambda x: x[1][0]) == 'VideoConfig':
                video_item = item[2][0]
                if video_item.get('video_id'):
                    video_data = video_item['videoData'][0]
                    # print(video_data)
                    break
        # print(video_data)
        if not video_data:
            print('unable to get video data')
            raise Exception

        dash_manifest = video_data.get('dash_manifest')

        if dash_manifest:  # when not live, this returns
            dash_manifest_xml = ET.fromstring(dash_manifest)
            info['duration'] = isodate.parse_duration(
                dash_manifest_xml.attrib['mediaPresentationDuration']
            ).total_seconds()

        info['is_live'] = video_data['is_live_stream']
        return info
Example #52
0
 def get_child_iso_duration(self, element, childtag):
     s = self.get_child_text(element, childtag)
     if s is None:
         return -1
     return (isodate.parse_duration(s)).total_seconds()
Example #53
0
 def test_pythonvalue(self):
     instance = builtins.Duration()
     expected = isodate.parse_duration('P0Y1347M0D')
     value = 'P0Y1347M0D'
     assert instance.pythonvalue(value) == expected
Example #54
0
    async def search_(self, ctx, *args):
        """Search song by keyword and do start song selection"""

        # get keyword from args
        keyword = "".join([word + " " for word in args])

        print(ctx.guild.name, ctx.author.name, keyword)

        # check if inpuy keyword is url
        if 'www' in keyword or 'youtu' in keyword or 'http' in keyword:
            # handle url
            await self.handle_url(ctx, keyword)
            return

        # search video by keyword
        print(self.ayt.session)
        response = await self.ayt.search(q=keyword)
        search_result = []
        for item in response['items']:
            search_result.append(YoutubeVideo().parse(item))

        # build embed
        embed = discord.Embed(
            title='Song Selection | Reply the song number to continue',
            description='prefix: n> | search_limit: 7',
            color=discord.Colour(value=11735575).orange())

        # Converts search_result into a string
        song_list = "".join([
            "{}. **[{}]({})**\n".format(i + 1, video.title, video.url)
            for i, video in enumerate(search_result)
        ])

        # fill embed
        embed.add_field(name='search result for ' + keyword,
                        value=song_list,
                        inline=False)
        embed.set_thumbnail(url=search_result[0].thumbnails['high']['url'])
        embed.set_footer(
            text='Song selection | Type the entry number to continue')
        embedded_list = await ctx.send(embed=embed)

        # wait for author response
        request_channel = ctx.message.channel
        request_author = ctx.author

        def check(m):
            try:  # '/^*[0-9][0-9 ]*$/'
                picked_entry_number = int(m.content)
                return m.channel == request_channel and m.author == request_author
            except:
                return False

        try:
            msg = await self.client.wait_for('message',
                                             check=check,
                                             timeout=10.0)
        except:
            # TIMEOUT ERROR EXCEPTION
            await embedded_list.delete()
            return

        # Check duration.
        choosen_video = search_result[int(msg.content) - 1]
        try:
            content_details = await self.ayt.get_video_detail(
                video_id=choosen_video.id)
        except Exception as e:
            await embedded_list.delete()
            await ctx.send(':x: | Cannot extract content details')
            return

        duration = parse_duration(
            content_details['items'][0]['contentDetails']['duration'])

        if duration.seconds > 900:
            await ctx.send(
                ':x: | Cannot play video with duration longer than 10 minutes.'
            )
            await embedded_list.delete()
            return

        duration = str(timedelta(seconds=duration.seconds))
        choosen_video.duration = duration
        await self.play(ctx=ctx, video=choosen_video)
Example #55
0
            videoCount = videoCount + 1

for videos in videoList:
    try:
        api_key = "AIzaSyCAExHHkEP_iB9RYmHXlaXC-pjPkj9RWQw"
        searchUrl = "https://www.googleapis.com/youtube/v3/videos?id=" + videos + "&key=" + api_key + "&part=contentDetails"
        response = urllib.request.urlopen(searchUrl)
        bla = response.read()
        encoding = response.info().get_content_charset('utf-8')
        data = json.loads(bla.decode(encoding))
        all_data = data['items']
        contentDetails = all_data[0]['contentDetails']
        duration = contentDetails['duration']

        duration = isodate.parse_duration(duration)
        durationseconds = duration.total_seconds()
        if maxLength != -1:
            if durationseconds > maxLength:
                durationseconds = maxLength
        video_dur = str(datetime.timedelta(seconds=durationseconds))
        print(video_dur)

        totalTimeSeconds = totalTimeSeconds + durationseconds
        videoProcessed = videoProcessed + 1
        print("Video", videoProcessed, "out of", len(videoList),
              "videos processed")
    except:
        print("Skipped video", videoList.index(videos))
        videoSkipped = videoSkipped + 1
        videoProcessed = videoProcessed + 1
Example #56
0
def search_func(request):

    #default search keywords is "Google", and maximum result number is 25
    search_keywords = "Google"
    max_results = 25

    #making connection to database rds with the following configurations
    USERNAME = ''
    PASSWORD = ''
    DB_NAME = ''

    print 'connecting to rds instance'

    #connecting to MySQL database with above configurations
    conn = MySQLdb.connect(host='',
                           user=USERNAME,
                           passwd=PASSWORD,
                           db=DB_NAME,
                           port=3306,
                           use_unicode=False,
                           charset='utf8')

    print 'connected to rds'
    # get the cursor
    cursor = conn.cursor()

    # set the developer key, etc for Youtube API
    DEVELOPER_KEY = ""
    YOUTUBE_API_SERVICE_NAME = "youtube"
    YOUTUBE_API_VERSION = "v3"

    # prepare for searching by Youtube API
    youtube = build(YOUTUBE_API_SERVICE_NAME,
                    YOUTUBE_API_VERSION,
                    developerKey=DEVELOPER_KEY)

    rows = ''
    checkbox_viewCount = 0
    checkbox_relevance = 0
    checkbox_age = 0
    # Call the search.list method to retrieve results matching the specified
    # query term.
    if request.method == 'POST':
        print 'come to post'
        # check which check box is clicked and which check box is not checked
        if len(request.POST.getlist('check_box_viewCount')) != 0:
            checkbox_viewCount = 1
        if len(request.POST.getlist('check_box_relevance')) != 0:
            checkbox_relevance = 1
        if len(request.POST.getlist('check_box_age')) != 0:
            checkbox_age = 1

        # use the database to store the check box value
        cursor.execute(
            """UPDATE check_box set viewCount = %s, relevance=%s,age=%s""",
            (checkbox_viewCount, checkbox_relevance, checkbox_age))
        conn.commit()

        # do a search by Youtube API
        search_response = youtube.search().list(
            q=request.POST['search'],
            part="id,snippet",
            maxResults=max_results).execute()

        # delete all rows in the original table video, which helps to make search results up to data every time
        cursor.execute('TRUNCATE video')
        conn.commit()

        # get the results by search().list and videos().list, and insert them into the MySQL database
        for search_result in search_response.get("items", []):
            if search_result["id"]["kind"] == "youtube#video":
                search_response_statistics = youtube.videos().list(
                    part="statistics,contentDetails,topicDetails",
                    id=search_result["id"]["videoId"]).execute()

                ts = search_result["snippet"]["publishedAt"]
                dt = datetime.datetime.strptime(
                    ts[:-5], '%Y-%m-%dT%H:%M:%S'
                )  #+ datetime.timedelta(hours=int(ts[-5:-3]), minutes=int(ts[-2:]))*int(ts[-6:-5]+'1')
                PublishedAtInseconds = time.mktime(dt.timetuple())

                for search_result_statistics in search_response_statistics.get(
                        "items", []):
                    # if the desired attribute (topicID) does not exist, set null to this value
                    if len(search_result_statistics.get(
                            "topicDetails", [])) == 0 or len(
                                search_result_statistics.get(
                                    "topicDetails", []).get("topicIds",
                                                            [])) == 0:
                        cursor.execute(
                            """INSERT INTO video VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
                            (search_result["id"]["videoId"],
                             search_result["snippet"]["title"],
                             search_result["snippet"]["publishedAt"],
                             int(PublishedAtInseconds),
                             search_result["snippet"]["thumbnails"]["default"]
                             ["url"], search_result_statistics["statistics"]
                             ["viewCount"],
                             search_result_statistics["statistics"]
                             ["likeCount"],
                             search_result_statistics["statistics"]
                             ["dislikeCount"],
                             str(
                                 parse_duration(
                                     search_result_statistics["contentDetails"]
                                     ["duration"])), 'no'))
                        conn.commit()
                    else:
                        cursor.execute(
                            """INSERT INTO video VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)""",
                            (search_result["id"]["videoId"],
                             search_result["snippet"]["title"],
                             search_result["snippet"]["publishedAt"],
                             int(PublishedAtInseconds),
                             search_result["snippet"]["thumbnails"]["default"]
                             ["url"], search_result_statistics["statistics"]
                             ["viewCount"],
                             search_result_statistics["statistics"]
                             ["likeCount"],
                             search_result_statistics["statistics"]
                             ["dislikeCount"],
                             str(
                                 parse_duration(
                                     search_result_statistics["contentDetails"]
                                     ["duration"])),
                             search_result_statistics["topicDetails"]
                             ["topicIds"][0]))
                        conn.commit()

        # set ready_to_process bit, letting the Java code know that it is time to collect data from the database
        cursor.execute("""UPDATE processing_tags set ready_to_process = %s""",
                       1)
        conn.commit()

        # select the ready_to_display. If this bit is set, the video data has been processed and can be shown to the page
        cursor.execute('SELECT ready_to_display FROM processing_tags')
        row = cursor.fetchone()
        print row[0]
        #    while row[0] == 0:
        #      print ''

        if checkbox_viewCount == 1:
            cursor.execute('SELECT * FROM video ORDER BY viewCount DESC')
            rows = cursor.fetchall()
        else:
            if checkbox_age == 1:
                cursor.execute(
                    'SELECT * FROM video ORDER BY publishedAtInseconds DESC')
                rows = cursor.fetchall()
            else:
                if checkbox_relevance == 1:
                    cursor.execute(
                        'SELECT * FROM video_results INNER JOIN video_trend_results ON video_results.video_id = video_trend_results.video_id ORDER BY weight DESC'
                    )
                    rows = cursor.fetchall()
                else:
                    cursor.execute('SELECT * FROM video')
                    rows = cursor.fetchall()

        cursor.execute('TRUNCATE video_results')
        conn.commit()

        # set all bits to initial value, which is zero
        cursor.execute("""UPDATE processing_tags set ready_to_display = %s""",
                       0)
        cursor.execute("""UPDATE processing_tags set ready_to_process = %s""",
                       0)
        conn.commit()
    # send the video data to page
    return render(request, 'search_videos.html', {'videos': rows})
def youtube_search():
    youtube = build(YOUTUBE_API_SERVICE_NAME,
                    YOUTUBE_API_VERSION,
                    developerKey=DEVELOPER_KEY)

    #this searches for 5 youtube vids with a random string of the music
    #category and places the result in search_response we're only using
    # a three letter string this length could be experimented with to find what
    #will give you better or more "random" results
    search_response = youtube.search().list(q=randomword(3),
                                            part="id, snippet",
                                            type="video",
                                            videoCategoryId=MUSIC,
                                            maxResults=5,
                                            videoEmbeddable='true',
                                            order='date').execute()

    videos = []
    channelIds = []
    userVids = []
    userUploads = []

    for search_result in search_response.get("items", []):
        #pprint.pprint(search_result)
        if search_result["id"]["kind"] == "youtube#video":

            channelId = search_result["snippet"]["channelId"]
            #print("\n\nchannelId: " + channelId + "\n\n")

            channelIDDetails = youtube.channels().list(
                part="contentDetails", id=channelId).execute().get("items")

            uploads = channelIDDetails[0]["contentDetails"][
                "relatedPlaylists"]["uploads"]

            uploadData = youtube.playlistItems().list(
                part="id, snippet", maxResults=5,
                playlistId=uploads).execute().get("items", [])

            tempUserVids = []
            userVids = []
            for item in uploadData:
                if item["kind"] == "youtube#playlistItem" and item["snippet"][
                        "position"] > 0:
                    tempUserVids.append(
                        item["snippet"]["resourceId"]["videoId"])
            userVids.append(random.sample(tempUserVids, len(tempUserVids)))

            #flatten the user vids list
            flatUserVids = []
            for sublist in userVids:
                for item in sublist:
                    flatUserVids.append(item)

            #getting the video information
            videoStat = []
            for vid in flatUserVids:
                videoStat.append(youtube.videos().list(
                    part='status, snippet, contentDetails',
                    id=vid).execute().get("items"))

            for stat in videoStat:
                length = isodate.parse_duration(
                    stat[0]["contentDetails"]["duration"])
                #print(length)
                privacy = stat[0]["status"]["privacyStatus"]
                category = stat[0]["snippet"]["categoryId"]
                maxMins = dt.timedelta(minutes=8)
                minMins = dt.timedelta(minutes=1)
                if privacy == "public" and category == MUSIC and length > minMins and length < maxMins:
                    if stat[0] == None:
                        break
                    else:
                        return stat[0]["id"]

            continue
def index(request):

    videos = []

    if request.method == 'POST':
        search_url = 'https://www.googleapis.com/youtube/v3/search'
        video_url = 'https://www.googleapis.com/youtube/v3/videos'

        search_params = {
            'parts': 'snippet',
            'q': request.POST['search'],
            'key': settings.YOUTUBE_DATA_API_KEY,
            'maxResults': 9,
            'type': 'video'
        }

        r = requests.get(search_url, params=search_params)

        results = r.json()['items']

        video_ids = []

        for result in results:
            video_ids.append(result['id']['videoId'])

        if request.POST['submit'] == 'lucky':
            return redirect(
                f'https://www.youtube.com/watch?v={ video_ids[0] }')

        video_params = {
            'key': settings.YOUTUBE_DATA_API_KEY,
            'part': 'snippet,contentDetails',
            'id': ','.join(video_ids),
            'maxResults': 9,
        }

        r = requests.get(video_url, params=video_params)

        results = r.json()['items']

        for result in results:
            video_data = {
                'title':
                result['snippet']['title'],
                'id':
                result['id'],
                'url':
                f'https://www.youtube.com/watch?v={ result["id"] }',
                'duration':
                int(
                    parse_duration(
                        result['contentDetails']['duration']).total_seconds()
                    // 60),
                'thumbnail':
                result['snippet']['thumbnails']['high']['url']
            }
            videos.append(video_data)

    context = {'videos': videos}

    return render(request, 'search/index.html', context)
Example #59
0
    async def process_rp3(self):
        logger.debug("[process_rp3] Entered")

        for rp3key in list(self.data.rp3keys):
            logger.debug(f"[process_rp3] Processing key {rp3key}")
            rp3data = self.data.rp3keys[rp3key]
            api = slapi_rp3(rp3key)
            for tripname in ','.join(set(
                    rp3data["trips"].split(','))).split(','):
                logger.debug(f"[process_rp3] Processing trip {tripname}")
                newdata = self.data.rp3[tripname]
                positions = tripname.split('-')

                try:
                    apidata = await api.request(positions[0], positions[1], '',
                                                '', '', '')
                    newdata['trips'] = []

                    #Parse every trip
                    for trip in apidata["Trip"]:
                        newtrip = {'fares': [], 'legs': []}

                        # Loop all fares and add
                        for fare in trip['TariffResult']['fareSetItem'][0][
                                'fareItem']:
                            newfare = {}
                            newfare['name'] = fare['name']
                            newfare['desc'] = fare['desc']
                            newfare['price'] = int(fare['price']) / 100
                            newtrip['fares'].append(newfare)

                        # Add legs to trips
                        for leg in trip['LegList']['Leg']:
                            newleg = {}
                            #Walking is done by humans. And robots. Robots are scary.
                            if leg["type"] == "WALK":
                                newleg['name'] = leg['name']
                                newleg['line'] = 'Walk'
                                newleg['direction'] = 'Walk'
                                newleg['category'] = 'WALK'
                            else:
                                newleg['name'] = leg['Product']['name']
                                newleg['line'] = leg['Product']['line']
                                newleg['direction'] = leg['direction']
                                newleg['category'] = leg['category']
                            newleg['from'] = leg['Origin']['name']
                            newleg['to'] = leg['Destination']['name']
                            newleg[
                                'time'] = f"{leg['Origin']['date']} {leg['Origin']['time']}"
                            newtrip['legs'].append(newleg)

                        #Make some shortcuts for data
                        newtrip['first_leg'] = newtrip['legs'][0]['name']
                        newtrip['time'] = newtrip['legs'][0]['time']
                        newtrip['price'] = newtrip['fares'][0]['price']
                        newtrip['duration'] = str(
                            isodate.parse_duration(trip['duration']))
                        newtrip['transfers'] = trip['transferCount']
                        newdata['trips'].append(newtrip)

                    #Add shortcuts to info in the first trip if it exists
                    newdata[
                        'transfers'] = newdata['trips'][0]['transfers'] or 0
                    newdata['price'] = newdata['trips'][0]['price'] or ''
                    newdata['time'] = newdata['trips'][0]['time'] or ''
                    newdata['duration'] = newdata['trips'][0]['duration'] or ''
                    newdata[
                        'first_leg'] = newdata['trips'][0]['first_leg'] or ''

                    newdata['attribution'] = "Stockholms Lokaltrafik"
                    newdata['last_updated'] = now().strftime(
                        '%Y-%m-%d %H:%M:%S')
                    newdata['api_result'] = "Success"
                except Exception as e:
                    logger.debug(f"[process_rp3] Error occured: {str(e)}")
                    newdata['api_result'] = "Error"
                    newdata['api_error'] = str(e)

                newdata['api_lastrun'] = now().strftime('%Y-%m-%d %H:%M:%S')
                self.data.rp3[tripname] = newdata

                logger.debug(f"[process_rp3] Completed trip {tripname}")

            logger.debug(f"[process_rp3] Completed key {rp3key}")

        logger.debug("[process_rp3] Completed")
Example #60
0
 def test_without_minutes(self):
     self.assertEqual(isodate.parse_duration('PT10S').total_seconds(), 10)