From 04b5a5f9ac44b184434f8af4d62ba344738ba1da Mon Sep 17 00:00:00 2001 From: Draper <27962761+Drapersniper@users.noreply.github.com> Date: Sat, 15 Feb 2020 05:06:03 +0000 Subject: [PATCH] [Streams] Significantly reduce number the quota usage for YouTube Data api (#3237) * chore Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * Pre-tests Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * *sigh* Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * Streams + black formatting Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * *sigh* lets not spam the logs shall we Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * lets be extra sure Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * chore Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * [Streams] Fix Twitch token for streamalert (#2) * [Streams] Fix Twitch token for streamalert * [Streams] Fix Twitch token for streamalert Co-authored-by: PredaaA <46051820+PredaaA@users.noreply.github.com> --- changelog.d/audio/3201.feature.1.rst | 1 + changelog.d/streams/3237.enhance.rst | 1 + changelog.d/streams/3237.misc.rst | 1 + redbot/cogs/streams/__init__.py | 1 - redbot/cogs/streams/streams.py | 51 +++++++++++---- redbot/cogs/streams/streamtypes.py | 93 +++++++++++++++++++++------- 6 files changed, 111 insertions(+), 37 deletions(-) create mode 100644 changelog.d/audio/3201.feature.1.rst create mode 100644 changelog.d/streams/3237.enhance.rst create mode 100644 changelog.d/streams/3237.misc.rst diff --git a/changelog.d/audio/3201.feature.1.rst b/changelog.d/audio/3201.feature.1.rst new file mode 100644 index 000000000..7c5beb4ca --- /dev/null +++ b/changelog.d/audio/3201.feature.1.rst @@ -0,0 +1 @@ +``[p]remove`` command now accepts an URL or Index, if an URL is used it will remove all tracks in the queue with that URL. \ No newline at end of file diff --git a/changelog.d/streams/3237.enhance.rst b/changelog.d/streams/3237.enhance.rst new file mode 100644 index 000000000..f437043f9 --- /dev/null +++ b/changelog.d/streams/3237.enhance.rst @@ -0,0 +1 @@ +Added ``[p]streamset timer`` command, which can be used to control how often the cog checks for livestreams. \ No newline at end of file diff --git a/changelog.d/streams/3237.misc.rst b/changelog.d/streams/3237.misc.rst new file mode 100644 index 000000000..b6c984c03 --- /dev/null +++ b/changelog.d/streams/3237.misc.rst @@ -0,0 +1 @@ +Changed the YouTube streams logic to use an RSS instead of the search endpoint, significantly reducing quota usage. \ No newline at end of file diff --git a/redbot/cogs/streams/__init__.py b/redbot/cogs/streams/__init__.py index 64cc09acd..0d3e130ee 100644 --- a/redbot/cogs/streams/__init__.py +++ b/redbot/cogs/streams/__init__.py @@ -3,5 +3,4 @@ from .streams import Streams async def setup(bot): cog = Streams(bot) - await cog.initialize() bot.add_cog(cog) diff --git a/redbot/cogs/streams/streams.py b/redbot/cogs/streams/streams.py index 751e7d9a3..3f413b65f 100644 --- a/redbot/cogs/streams/streams.py +++ b/redbot/cogs/streams/streams.py @@ -25,10 +25,7 @@ from . import streamtypes as _streamtypes from collections import defaultdict import asyncio import re -from typing import Optional, List, Tuple - -CHECK_DELAY = 60 - +from typing import Optional, List, Tuple, Union _ = Translator("Streams", __file__) @@ -36,7 +33,7 @@ _ = Translator("Streams", __file__) @cog_i18n(_) class Streams(commands.Cog): - global_defaults = {"tokens": {}, "streams": []} + global_defaults = {"refresh_timer": 300, "tokens": {}, "streams": []} guild_defaults = { "autodelete": False, @@ -51,12 +48,10 @@ class Streams(commands.Cog): def __init__(self, bot: Red): super().__init__() - self.db = Config.get_conf(self, 26262626) + self.db: Config = Config.get_conf(self, 26262626) self.db.register_global(**self.global_defaults) - self.db.register_guild(**self.guild_defaults) - self.db.register_role(**self.role_defaults) self.bot: Red = bot @@ -66,7 +61,10 @@ class Streams(commands.Cog): self.yt_cid_pattern = re.compile("^UC[-_A-Za-z0-9]{21}[AQgw]$") - def check_name_or_id(self, data: str): + self._ready_event: asyncio.Event = asyncio.Event() + self._init_task: asyncio.Task = self.bot.loop.create_task(self.initialize()) + + def check_name_or_id(self, data: str) -> bool: matched = self.yt_cid_pattern.fullmatch(data) if matched is None: return True @@ -74,12 +72,17 @@ class Streams(commands.Cog): async def initialize(self) -> None: """Should be called straight after cog instantiation.""" + await self.bot.wait_until_ready() await self.move_api_keys() self.streams = await self.load_streams() self.task = self.bot.loop.create_task(self._stream_alerts()) + self._ready_event.set() - async def move_api_keys(self): + async def cog_before_invoke(self, ctx: commands.Context): + await self._ready_event.wait() + + async def move_api_keys(self) -> None: """Move the API keys from cog stored config to core bot config if they exist.""" tokens = await self.db.tokens() youtube = await self.bot.get_shared_api_tokens("youtube") @@ -100,8 +103,11 @@ class Streams(commands.Cog): await self.check_online(ctx, stream) @commands.command() + @commands.cooldown(1, 30, commands.BucketType.guild) async def youtubestream(self, ctx: commands.Context, channel_id_or_name: str): """Check if a YouTube channel is live.""" + # TODO: Write up a custom check to look up cooldown set by botowner + # This check is here to avoid people spamming this command and eating up quota apikey = await self.bot.get_shared_api_tokens("youtube") is_name = self.check_name_or_id(channel_id_or_name) if is_name: @@ -128,7 +134,11 @@ class Streams(commands.Cog): stream = PicartoStream(name=channel_name) await self.check_online(ctx, stream) - async def check_online(self, ctx: commands.Context, stream): + async def check_online( + self, + ctx: commands.Context, + stream: Union[PicartoStream, MixerStream, HitboxStream, YoutubeStream, TwitchStream], + ): try: info = await stream.is_online() except OfflineStream: @@ -318,6 +328,18 @@ class Streams(commands.Cog): """Set tokens for accessing streams.""" pass + @streamset.command(name="timer") + @checks.is_owner() + async def _streamset_refresh_timer(self, ctx: commands.Context, refresh_time: int): + """Set stream check refresh time.""" + if refresh_time < 60: + return await ctx.send(_("You cannot set the refresh timer to less than 60 seconds")) + + await self.db.refresh_timer.set(refresh_time) + await ctx.send( + _("Refresh timer set to {refresh_time} seconds".format(refresh_time=refresh_time)) + ) + @streamset.command() @checks.is_owner() async def twitchtoken(self, ctx: commands.Context): @@ -546,7 +568,7 @@ class Streams(commands.Cog): await self.check_streams() except asyncio.CancelledError: pass - await asyncio.sleep(CHECK_DELAY) + await asyncio.sleep(await self.db.refresh_timer()) async def check_streams(self): for stream in self.streams: @@ -656,7 +678,10 @@ class Streams(commands.Cog): raw_stream["_messages_cache"].append(msg) token = await self.bot.get_shared_api_tokens(_class.token_name) if token: - raw_stream["token"] = token + if _class.__name__ == "TwitchStream": + raw_stream["token"] = token.get("client_id") + else: + raw_stream["token"] = token streams.append(_class(**raw_stream)) return streams diff --git a/redbot/cogs/streams/streamtypes.py b/redbot/cogs/streams/streamtypes.py index 2a142aa1c..d5553f211 100644 --- a/redbot/cogs/streams/streamtypes.py +++ b/redbot/cogs/streams/streamtypes.py @@ -1,3 +1,13 @@ +import json +import logging +import xml.etree.ElementTree as ET +from random import choice +from string import ascii_letters +from typing import ClassVar, Optional, List + +import aiohttp +import discord + from .errors import ( StreamNotFound, APIError, @@ -6,12 +16,6 @@ from .errors import ( InvalidTwitchCredentials, ) from redbot.core.i18n import Translator -from random import choice, sample -from string import ascii_letters -from typing import ClassVar, Optional -import discord -import aiohttp -import json TWITCH_BASE_URL = "https://api.twitch.tv" TWITCH_ID_ENDPOINT = TWITCH_BASE_URL + "/kraken/users?login=" @@ -22,13 +26,24 @@ YOUTUBE_BASE_URL = "https://www.googleapis.com/youtube/v3" YOUTUBE_CHANNELS_ENDPOINT = YOUTUBE_BASE_URL + "/channels" YOUTUBE_SEARCH_ENDPOINT = YOUTUBE_BASE_URL + "/search" YOUTUBE_VIDEOS_ENDPOINT = YOUTUBE_BASE_URL + "/videos" +YOUTUBE_CHANNEL_RSS = "https://www.youtube.com/feeds/videos.xml?channel_id={channel_id}" _ = Translator("Streams", __file__) +log = logging.getLogger("redbot.cogs.Streams") + def rnd(url): """Appends a random parameter to the url to avoid Discord's caching""" - return url + "?rnd=" + "".join([choice(ascii_letters) for i in range(6)]) + return url + "?rnd=" + "".join([choice(ascii_letters) for _loop_counter in range(6)]) + + +def get_video_ids_from_feed(feed): + root = ET.fromstring(feed) + rss_video_ids = [] + for child in root.iter("{http://www.w3.org/2005/Atom}entry"): + for i in child.iter("{http://www.youtube.com/xml/schemas/2015}videoId"): + yield i.text class Stream: @@ -69,6 +84,9 @@ class YoutubeStream(Stream): def __init__(self, **kwargs): self.id = kwargs.pop("id", None) self._token = kwargs.pop("token", None) + self.not_livestreams: List[str] = [] + self.livestreams: List[str] = [] + super().__init__(**kwargs) async def is_online(self): @@ -80,26 +98,55 @@ class YoutubeStream(Stream): elif not self.name: self.name = await self.fetch_name() - url = YOUTUBE_SEARCH_ENDPOINT - params = { - "key": self._token["api_key"], - "part": "snippet", - "channelId": self.id, - "type": "video", - "eventType": "live", - } async with aiohttp.ClientSession() as session: - async with session.get(url, params=params) as r: - data = await r.json() - if "items" in data and len(data["items"]) == 0: - raise OfflineStream() - elif "items" in data: - vid_id = data["items"][0]["id"]["videoId"] - params = {"key": self._token["api_key"], "id": vid_id, "part": "snippet"} + async with session.get(YOUTUBE_CHANNEL_RSS.format(channel_id=self.id)) as r: + rssdata = await r.text() + + if self.not_livestreams: + self.not_livestreams = list(dict.fromkeys(self.not_livestreams)) + + if self.livestreams: + self.livestreams = list(dict.fromkeys(self.livestreams)) + + for video_id in get_video_ids_from_feed(rssdata): + if video_id in self.not_livestreams: + log.debug(f"video_id in not_livestreams: {video_id}") + continue + log.debug(f"video_id not in not_livestreams: {video_id}") + params = { + "key": self._token["api_key"], + "id": video_id, + "part": "id,liveStreamingDetails", + } + async with aiohttp.ClientSession() as session: + async with session.get(YOUTUBE_VIDEOS_ENDPOINT, params=params) as r: + data = await r.json() + stream_data = data.get("items", [{}])[0].get("liveStreamingDetails", {}) + log.debug(f"stream_data for {video_id}: {stream_data}") + if ( + stream_data + and stream_data != "None" + and stream_data.get("actualEndTime", None) is None + and stream_data.get("concurrentViewers", None) is not None + ): + if video_id not in self.livestreams: + self.livestreams.append(data["items"][0]["id"]) + else: + self.not_livestreams.append(data["items"][0]["id"]) + if video_id in self.livestreams: + self.livestreams.remove(video_id) + log.debug(f"livestreams for {self.name}: {self.livestreams}") + log.debug(f"not_livestreams for {self.name}: {self.not_livestreams}") + # This is technically redundant since we have the + # info from the RSS ... but incase you dont wanna deal with fully rewritting the + # code for this part, as this is only a 2 quota query. + if self.livestreams: + params = {"key": self._token["api_key"], "id": self.livestreams[-1], "part": "snippet"} async with aiohttp.ClientSession() as session: async with session.get(YOUTUBE_VIDEOS_ENDPOINT, params=params) as r: data = await r.json() return self.make_embed(data) + raise OfflineStream() def make_embed(self, data): vid_data = data["items"][0] @@ -162,7 +209,7 @@ class TwitchStream(Stream): self.id = await self.fetch_id() url = TWITCH_STREAMS_ENDPOINT + self.id - header = {"Client-ID": str(self._token), "Accept": "application/vnd.twitchtv.v5+json"} + header = {"Client-ID": str(self._token)} async with aiohttp.ClientSession() as session: async with session.get(url, headers=header) as r: