From 911aed5fe23df9a80ffb910f9fdee9906a1a4280 Mon Sep 17 00:00:00 2001 From: palmtree5 <3577255+palmtree5@users.noreply.github.com> Date: Wed, 23 Oct 2019 19:45:25 -0800 Subject: [PATCH 01/41] [Docs] Getting Started Guide improvements (#3083) * Add MS Azure to the list of hosting providers * Fix some typos, wording, incorrect commands * towncrier * Update docs/getting_started.rst Co-Authored-By: Michael H --- changelog.d/3083.docs.rst | 1 + docs/getting_started.rst | 16 ++++++++-------- docs/host-list.rst | 3 +++ 3 files changed, 12 insertions(+), 8 deletions(-) create mode 100644 changelog.d/3083.docs.rst diff --git a/changelog.d/3083.docs.rst b/changelog.d/3083.docs.rst new file mode 100644 index 000000000..7860990c1 --- /dev/null +++ b/changelog.d/3083.docs.rst @@ -0,0 +1 @@ +Fix some typos and wording, add MS Azure to host list \ No newline at end of file diff --git a/docs/getting_started.rst b/docs/getting_started.rst index 6e45d8215..e02f355a4 100644 --- a/docs/getting_started.rst +++ b/docs/getting_started.rst @@ -115,8 +115,8 @@ to use one, do it like this: ``[p]cleanup messages 10`` Cogs ---- -Red is built with cogs, fancy term for plugins. They are -modules that enhance the Red functionalities. They contain +Red is built with cogs, a fancy term for plugins. They are +modules that add functionality to Red. They contain commands to use. Red comes with 19 cogs containing the basic features, such @@ -162,10 +162,10 @@ there are hundreds of cogs available! .. 26-cogs not available, let's use my repo :3 -Cogs comes with repositories. A repository is a container of cogs +Cogs come in repositories. A repository is a container of cogs that you can install. Let's suppose you want to install the ``say`` cog from the repository ``Laggrons-Dumb-Cogs``. You'll first need -to install the repository. +to add the repository. .. code-block:: none @@ -173,7 +173,7 @@ to install the repository. .. note:: You may need to specify a branch. If so, add its name after the link. -Then you can add the cog +Then you can install the cog .. code-block:: none @@ -195,7 +195,7 @@ the level of permission needed for a command. Bot owner ~~~~~~~~~ -The bot owner can access all commands on every guild. He can also use +The bot owner can access all commands on every guild. They can also use exclusive commands that can interact with the global settings or system files. @@ -214,7 +214,7 @@ Administrator ~~~~~~~~~~~~~ The administrator is defined by its roles. You can set multiple admin roles -with the ``[p]addadminrole`` and ``[p]removeadminrole`` commands. +with the ``[p]set addadminrole`` and ``[p]set removeadminrole`` commands. For example, in the mod cog, an admin can use the ``[p]modset`` command which defines the cog settings. @@ -224,7 +224,7 @@ Moderator ~~~~~~~~~ A moderator is a step above the average users. You can set multiple moderator -roles with the ``[p]addmodrole`` and ``[p]removemodrole`` commands. +roles with the ``[p]set addmodrole`` and ``[p]set removemodrole`` commands. For example, in the mod cog (again), a mod will be able to mute, kick and ban; but he won't be able to modify the cog settings with the ``[p]modset`` command. diff --git a/docs/host-list.rst b/docs/host-list.rst index 47398d898..3abc73022 100644 --- a/docs/host-list.rst +++ b/docs/host-list.rst @@ -56,6 +56,9 @@ Others |`Google Cloud |Same as AWS, but it's Google. | |`_| | +-------------------------------------+-----------------------------------------------------+ +|`Microsoft Azure |Same as AWS, but it's Microsoft. | +|`_ | | ++-------------------------------------+-----------------------------------------------------+ |`LowEndBox `_ |A curator for lower specced servers. | +-------------------------------------+-----------------------------------------------------+ From a729a474b167fb7c581fb7c7ac7d0cccca27a128 Mon Sep 17 00:00:00 2001 From: Kowlin Date: Mon, 4 Nov 2019 22:52:01 +0100 Subject: [PATCH 02/41] Added documentation for PM2 (#2105) * Added PM2 documentation * Grammar fix * Build error fix. * Just work T_T * Update docs/autostart_pm2.rst Co-Authored-By: Vexed <51716387+Vexed01@users.noreply.github.com> * Update docs/autostart_pm2.rst Co-Authored-By: Vexed <51716387+Vexed01@users.noreply.github.com> * Create 2105.docs.rst --- changelog.d/2105.docs.rst | 1 + docs/autostart_pm2.rst | 42 +++++++++++++++++++++++++++++++++++++++ docs/index.rst | 1 + 3 files changed, 44 insertions(+) create mode 100644 changelog.d/2105.docs.rst create mode 100644 docs/autostart_pm2.rst diff --git a/changelog.d/2105.docs.rst b/changelog.d/2105.docs.rst new file mode 100644 index 000000000..12b75c1d2 --- /dev/null +++ b/changelog.d/2105.docs.rst @@ -0,0 +1 @@ +Added documentation for PM2 support. diff --git a/docs/autostart_pm2.rst b/docs/autostart_pm2.rst new file mode 100644 index 000000000..b7e28e38d --- /dev/null +++ b/docs/autostart_pm2.rst @@ -0,0 +1,42 @@ +.. pm2 service guide + +============================================== +Setting up auto-restart using pm2 on Linux +============================================== + +.. note:: This guide is for setting up PM2 on a Linux environment. This guide assumes that you already have a working Red instance. + +-------------- +Installing PM2 +-------------- + +Start by installing Node.JS and NPM via your favorite package distributor. From there run the following command: + +:code:`npm install pm2 -g` + +After PM2 is installed, run the following command to enable your Red instance to be managed by PM2. Replace the brackets with the required information. +You can add additional Red based arguments after the instance, such as :code:`--dev`. + +:code:`pm2 start redbot --name "" --interpreter "" -- --no-prompt` + +.. code-block:: none + + Arguments to replace. + + --name "" + A name to identify the bot within pm2, this is not your Red instance. + + --interpreter "" + The location of your Python interpreter, to find out where that is use the following command: + which python3.6 + + + The name of your Red instance. + +------------------------------ +Ensuring that PM2 stays online +------------------------------ + +To make sure that PM2 stays online and persistence between machine restarts, run the following commands: + +:code:`pm2 save` & :code:`pm2 startup` diff --git a/docs/index.rst b/docs/index.rst index bcec8b9f2..85c3f217e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -16,6 +16,7 @@ Welcome to Red - Discord Bot's documentation! install_linux_mac venv_guide autostart_systemd + autostart_pm2 .. toctree:: :maxdepth: 2 From ddd9c4c6b0380bfff8d6184afa899ef5524ebafb Mon Sep 17 00:00:00 2001 From: Michael H Date: Mon, 4 Nov 2019 17:09:01 -0500 Subject: [PATCH 03/41] [Permissions] Ensure defaults are cleared when clearing all rules (#3041) - fixes #3037 --- changelog.d/permissions/3037.bugfix.rst | 1 + redbot/cogs/permissions/permissions.py | 2 +- redbot/core/bot.py | 10 +++++++--- redbot/core/commands/requires.py | 12 ++++++++---- 4 files changed, 17 insertions(+), 8 deletions(-) create mode 100644 changelog.d/permissions/3037.bugfix.rst diff --git a/changelog.d/permissions/3037.bugfix.rst b/changelog.d/permissions/3037.bugfix.rst new file mode 100644 index 000000000..c783beaa1 --- /dev/null +++ b/changelog.d/permissions/3037.bugfix.rst @@ -0,0 +1 @@ +defaults are cleared properly when clearing all rules \ No newline at end of file diff --git a/redbot/cogs/permissions/permissions.py b/redbot/cogs/permissions/permissions.py index 2e0aa9422..e3cd995b0 100644 --- a/redbot/cogs/permissions/permissions.py +++ b/redbot/cogs/permissions/permissions.py @@ -544,7 +544,7 @@ class Permissions(commands.Cog): Handles config. """ - self.bot.clear_permission_rules(guild_id) + self.bot.clear_permission_rules(guild_id, preserve_default_rule=False) for category in (COG, COMMAND): async with self.config.custom(category).all() as all_rules: for name, rules in all_rules.items(): diff --git a/redbot/core/bot.py b/redbot/core/bot.py index acdffc957..796ed7a27 100644 --- a/redbot/core/bot.py +++ b/redbot/core/bot.py @@ -779,7 +779,7 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d for subcommand in set(command.walk_commands()): subcommand.requires.reset() - def clear_permission_rules(self, guild_id: Optional[int]) -> None: + def clear_permission_rules(self, guild_id: Optional[int], **kwargs) -> None: """Clear all permission overrides in a scope. Parameters @@ -789,11 +789,15 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d ``None``, this will clear all global rules and leave all guild rules untouched. + **kwargs + Keyword arguments to be passed to each required call of + ``commands.Requires.clear_all_rules`` + """ for cog in self.cogs.values(): - cog.requires.clear_all_rules(guild_id) + cog.requires.clear_all_rules(guild_id, **kwargs) for command in self.walk_commands(): - command.requires.clear_all_rules(guild_id) + command.requires.clear_all_rules(guild_id, **kwargs) def add_permissions_hook(self, hook: commands.CheckPredicate) -> None: """Add a permissions hook. diff --git a/redbot/core/commands/requires.py b/redbot/core/commands/requires.py index c5b4187a4..b6f188de1 100644 --- a/redbot/core/commands/requires.py +++ b/redbot/core/commands/requires.py @@ -398,11 +398,9 @@ class Requires: else: rules[model_id] = rule - def clear_all_rules(self, guild_id: int) -> None: + def clear_all_rules(self, guild_id: int, *, preserve_default_rule: bool = True) -> None: """Clear all rules of a particular scope. - This will preserve the default rule, if set. - Parameters ---------- guild_id : int @@ -410,6 +408,12 @@ class Requires: `Requires.GLOBAL`, this will clear all global rules and leave all guild rules untouched. + Other Parameters + ---------------- + preserve_default_rule : bool + Whether to preserve the default rule or not. + This defaults to being preserved + """ if guild_id: rules = self._guild_rules.setdefault(guild_id, _RulesDict()) @@ -417,7 +421,7 @@ class Requires: rules = self._global_rules default = rules.get(self.DEFAULT, None) rules.clear() - if default is not None: + if default is not None and preserve_default_rule: rules[self.DEFAULT] = default def reset(self) -> None: From b8cbaa2fa04f7451d8a44d2308182451845f97f4 Mon Sep 17 00:00:00 2001 From: Michael H Date: Tue, 5 Nov 2019 08:13:32 -0500 Subject: [PATCH 04/41] Merge 3.1.7 (#3098) * uvloop + python3.8 * Lavalink bump to 3.2.1_846 * [Release] 3.1.7 - Handles a dependency issue for python3.8 - Updates the Lavalink jar used - This include's Nin's stat fix - Streaming from Soundcloud is working again, at least for now. * 3.1.7 --- redbot/__init__.py | 2 +- redbot/cogs/audio/manager.py | 2 +- setup.cfg | 4 +++- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/redbot/__init__.py b/redbot/__init__.py index fffb3af7c..2f62106d6 100644 --- a/redbot/__init__.py +++ b/redbot/__init__.py @@ -173,7 +173,7 @@ class VersionInfo: ) -__version__ = "3.1.6" +__version__ = "3.1.7" version_info = VersionInfo.from_str(__version__) # Filter fuzzywuzzy slow sequence matcher warning diff --git a/redbot/cogs/audio/manager.py b/redbot/cogs/audio/manager.py index 06c61d428..14175275d 100644 --- a/redbot/cogs/audio/manager.py +++ b/redbot/cogs/audio/manager.py @@ -18,7 +18,7 @@ from redbot.core import data_manager from .errors import LavalinkDownloadFailed JAR_VERSION = "3.2.1" -JAR_BUILD = 823 +JAR_BUILD = 846 LAVALINK_DOWNLOAD_URL = ( f"https://github.com/Cog-Creators/Lavalink-Jars/releases/download/{JAR_VERSION}_{JAR_BUILD}/" f"Lavalink.jar" diff --git a/setup.cfg b/setup.cfg index ae033b1e1..929144bab 100644 --- a/setup.cfg +++ b/setup.cfg @@ -49,7 +49,9 @@ install_requires = Red-Lavalink==0.4.0 schema==0.7.0 tqdm==4.35.0 - uvloop==0.13.0; sys_platform != "win32" and platform_python_implementation == "CPython" + # Below is due to an issue with uvloop < 0.14 with python 3.8, move both to 0.14 at full release + uvloop==0.13.0; sys_platform != "win32" and platform_python_implementation == "CPython" and python_version<"3.8" + uvloop==0.14.0rc2; sys_platform != "win32" and platform_python_implementation == "CPython" and python_version>="3.8" websockets==6.0 yarl==1.3.0 From 7e9b1b87e6f2ec64eeb8d5f15730509a033ce8da Mon Sep 17 00:00:00 2001 From: Toby Harradine Date: Thu, 7 Nov 2019 04:24:54 +1100 Subject: [PATCH 05/41] Allow keeping data in `redbot-setup delete` (#2965) * Allow keeping data in `redbot-setup delete` Signed-off-by: Toby Harradine * Add changelog entry Signed-off-by: Toby Harradine --- changelog.d/2962.enhance.rst | 1 + redbot/setup.py | 30 ++++++++++++++++++++++++++---- 2 files changed, 27 insertions(+), 4 deletions(-) create mode 100644 changelog.d/2962.enhance.rst diff --git a/changelog.d/2962.enhance.rst b/changelog.d/2962.enhance.rst new file mode 100644 index 000000000..1323dd867 --- /dev/null +++ b/changelog.d/2962.enhance.rst @@ -0,0 +1 @@ +```redbot-setup delete`` now has the option to leave Red's data untouched on database backends. diff --git a/redbot/setup.py b/redbot/setup.py index b67e3282f..2655b0961 100644 --- a/redbot/setup.py +++ b/redbot/setup.py @@ -301,12 +301,18 @@ async def create_backup(instance: str) -> None: async def remove_instance( instance, interactive: bool = False, + delete_data: Optional[bool] = None, _create_backup: Optional[bool] = None, drop_db: Optional[bool] = None, remove_datapath: Optional[bool] = None, ): data_manager.load_basic_configuration(instance) + if interactive is True and delete_data is None: + delete_data = click.confirm( + "Would you like to delete this instance's data?", default=False + ) + if interactive is True and _create_backup is None: _create_backup = click.confirm( "Would you like to make a backup of the data for this instance?", default=False @@ -321,7 +327,8 @@ async def remove_instance( else: driver_cls = drivers.get_driver_class(backend) - await driver_cls.delete_all_data(interactive=interactive, drop_db=drop_db) + if delete_data is True: + await driver_cls.delete_all_data(interactive=interactive, drop_db=drop_db) if interactive is True and remove_datapath is None: remove_datapath = click.confirm( @@ -376,6 +383,16 @@ def cli(ctx, debug): default=True, help="Don't ask for user input during the process.", ) +@click.option( + "--delete-data/--no-delete-data", + "delete_data", + is_flag=True, + default=None, + help=( + "Delete this instance's data. " + "If these options and --no-prompt are omitted, you will be asked about this." + ), +) @click.option( "--backup/--no-backup", "_create_backup", @@ -392,7 +409,8 @@ def cli(ctx, debug): default=None, help=( "Drop the entire database constaining this instance's data. Has no effect on JSON " - "instances. If these options and --no-prompt are omitted, you will be asked about this." + "instances, or if --no-delete-data is set. If these options and --no-prompt are omitted," + "you will be asked about this." ), ) @click.option( @@ -401,19 +419,23 @@ def cli(ctx, debug): default=None, help=( "Remove this entire instance's datapath. If these options and --no-prompt are omitted, " - "you will be asked about this." + "you will be asked about this. NOTE: --remove-datapath will override --no-delete-data " + "for JSON instances." ), ) def delete( instance: str, interactive: bool, + delete_data: Optional[bool], _create_backup: Optional[bool], drop_db: Optional[bool], remove_datapath: Optional[bool], ): loop = asyncio.get_event_loop() loop.run_until_complete( - remove_instance(instance, interactive, _create_backup, drop_db, remove_datapath) + remove_instance( + instance, interactive, delete_data, _create_backup, drop_db, remove_datapath + ) ) From e79a08e392f1a7614b6ba1cb6bc639d4727f2ab0 Mon Sep 17 00:00:00 2001 From: Jeremiah Boby Date: Wed, 6 Nov 2019 17:29:01 +0000 Subject: [PATCH 06/41] Add autostart documentation for venv users (#3028) * Add documentation for venv users Resolves #3005 * Add changes to changelog.d * Use "redenv" over "path/to/venv" --- changelog.d/3005.docs.rst | 1 + docs/autostart_systemd.rst | 18 +++++++++++++++--- 2 files changed, 16 insertions(+), 3 deletions(-) create mode 100644 changelog.d/3005.docs.rst diff --git a/changelog.d/3005.docs.rst b/changelog.d/3005.docs.rst new file mode 100644 index 000000000..e08a0706d --- /dev/null +++ b/changelog.d/3005.docs.rst @@ -0,0 +1 @@ +Adds autostart documentation for Red users who installed it inside a virtual environment. diff --git a/docs/autostart_systemd.rst b/docs/autostart_systemd.rst index 1af55e478..29d404c44 100644 --- a/docs/autostart_systemd.rst +++ b/docs/autostart_systemd.rst @@ -8,11 +8,23 @@ Setting up auto-restart using systemd on Linux Creating the service file ------------------------- -Create the new service file: +In order to create the service file, you will first need the location of your :code:`redbot` binary. + +.. code-block:: bash + + # If redbot is installed in a virtualenv + source redenv/bin/activate + + # If you are using pyenv + pyenv shell + + which redbot + +Then create the new service file: :code:`sudo -e /etc/systemd/system/red@.service` -Paste the following and replace all instances of :code:`username` with the username your bot is running under (hopefully not root): +Paste the following and replace all instances of :code:`username` with the username, and :code:`path` with the location you obtained above: .. code-block:: none @@ -21,7 +33,7 @@ Paste the following and replace all instances of :code:`username` with the usern After=multi-user.target [Service] - ExecStart=/home/username/.local/bin/redbot %I --no-prompt + ExecStart=path %I --no-prompt User=username Group=username Type=idle From d85fb260e74bffcf9374b44285ad6d461d9eb7dd Mon Sep 17 00:00:00 2001 From: aikaterna <20862007+aikaterna@users.noreply.github.com> Date: Wed, 6 Nov 2019 14:41:18 -0800 Subject: [PATCH 07/41] [Audio] Expose FriendlyException on play command (#3085) * [Audio] Expose FriendlyException on play command * Add changelog --- changelog.d/audio/3085.enhance.1.rst | 1 + redbot/cogs/audio/audio.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 changelog.d/audio/3085.enhance.1.rst diff --git a/changelog.d/audio/3085.enhance.1.rst b/changelog.d/audio/3085.enhance.1.rst new file mode 100644 index 000000000..4106637b2 --- /dev/null +++ b/changelog.d/audio/3085.enhance.1.rst @@ -0,0 +1 @@ +Expose FriendlyExceptions to users on the play command. diff --git a/redbot/cogs/audio/audio.py b/redbot/cogs/audio/audio.py index e25cbda83..e442b80af 100644 --- a/redbot/cogs/audio/audio.py +++ b/redbot/cogs/audio/audio.py @@ -2835,6 +2835,8 @@ class Audio(commands.Cog): if not tracks: self._play_lock(ctx, False) embed = discord.Embed(title=_("Nothing found."), colour=await ctx.embed_colour()) + if result.exception_message: + embed.set_footer(text=result.exception_message) if await self.config.use_external_lavalink() and query.is_local: embed.description = _( "Local tracks will not work " From e2c8b1100870511aaa575ce807f9009e5a5dba99 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Fri, 8 Nov 2019 02:36:16 +0100 Subject: [PATCH 08/41] [V3 Downloader] Revision tracking (#2571) * feat(downloader): Install cog from specific commit in repo (initial commit) - Repo and Installable have commit property now - New class inheriting from Installable - InstalledCog (old one from converters.py removed) - New Repo.checkout() method, which is also async ctx manager ref #2527 * fix(downloader): Keep information about repo's branch in config - This is needed to make sure that repo can go back from detached state in some rare unexpected cases - current branch is determined by `git symbolic-ref` now as this command errors for detached HEAD * feat(downloader): Update repo without cogs, update single cog The most important part of issue #2527 has been added here - `[p]repo update` command added - new conf format - nested dictionary repo_name->cog_name->cog_json installed libraries are now kept in conf too - `InstalledCog` renamed to `InstalledModule` - installed libraries use this class - `Downloader.installed_libraries()` and `Downloader.installed_modules()` added - `Downloader._add_to_installed()` and `Downloader._remove_from_installed()` now accept list of modules, of both cogs and libraries - `[p]cog install` tells about fails of copying cog and installing shared libraries - `[p]cog update` will truly update only chosen cogs (if provided) or cogs that need update - pinned cogs aren't checked - before update, repos are updated - to determine if update is needed `Repo.get_modified_modules()` is used - `[p]cog pin` and `[p]cog unpin` commands for pinning/unpinning cogs added - `Repo.checkout()` allows to choose ctx manager exit's checkout revision - `Repo.install_cog()` returns `InstalledModule` now and raises CopyingError (maybe breaking?) - `Repo.install_libraries()` returns 2-tuple of installed and failed libraries (maybe breaking?) - `RepoManager.get_all_cogs()` added, which returns cogs from all repos - `RepoManager.repos` property added, which contains tuple of `Repo` * test(downloader): Repo.current_branch() throws an exception, when branch can't be determined * style(downloader): rename _add_to_installed to _save_to_installed This method is used for both adding and updating existing modules in Config * refactor(downloader): add ctx.typing() for few commands `[p]cog install` is nested hell, can't wait for moving install logic to separate method * fix(downloader): refactor and fix `set` usage * perf(downloader): update commits for ALL checked modules to omit diffs next time This will also disable running git diff for cogs that have the same commit as the latest one * style(downloader): few style improvements - use of mutable object in method definition - make Repo._get_full_sha1() public method - too long line - don't use len to check if sequence is empty * feat(downloader): add `[p]cog updateallfromrepos` and `[p]cog updatetoversion` commands - moved cog update logic into `Downloader._cog_update_logic()` (lack of better name) - splitted whole cog update process into smaller methods - might still need some improvements - added new methods to `Repo` class: - `is_on_branch()` to check if repo is currently checked out to branch - `is_ancestor()` to check if one commit is ancestor of the other - fix for `Downloader._available_updates()` behaviour broken by commit 5755ab08ba67556b3863e907c6f44d80f4f13d88 * feat(downloader): try to find last commit where module is still present Enhancements: - `Installable` now has `repo` attribute containing repo object or `None` if repo is missing - `Downloader._install_cogs()` and `Downloader._reinstall_libraries()` are able to install modules from different commits of repo - `Repo.checkout()` as ctx manager will now exit to commit which was active before checking out - unification of `rev` and `hash` terms: All function parameters are explicitly called `hash`, if it can only be commit's full sha1 hash or `rev` if it can be anything that names a commit object, see [link](https://git-scm.com/docs/git-rev-parse#_specifying_revisions) - new `Repo.get_last_module_occurence()` method, which gets module's Installable from last commit in which it still occurs * docs(downloader): Add basic description for `InstalledModule` * fix(downloader): cog ignored during updates if its commit was missing After config format update, commit string is empty until update and when such cog was checked and it wasn't available in repo anymore, it was ignored * refactor(downloader): Installing cogs from specific rev will pin them * perf(downloader): Don't checkout when current commit equals target hash - changes to `Repo.checkout()`: - `exit_to_rev` is now keyword only argument - added `force_checkout` to force checkout even if `Repo.commit` value is the same as target hash * refactor(downloader): Repo._run() stderr is redirected to debug log now - added two keyword arguments: - `valid_exit_codes` which specifies valid exit codes, used to determine if stderr should be sent as debug or error level in logging - `debug_only` which specifies if stderr can be sent only as debug level in logging * style(downloader): stop using `set` as arg name in `_load_repos()` * feat(downloader): pass multiple cogs to `[p]cog (un)pin` * refactor(downloader): accept module name instead of instance, fix spelling * style(downloader): few small style changes * fix(downloader): add type annotations + fixes based on them - fix wrong type annotations and add a lot of new ones - add checks for `Installable.repo` being `None` - fix wrong return type in `Downloader._install_requirements` - show repo names correctly when updating all repos - fix error when some requirement fails to install BREAKING CHANGE: - type of `Repo.available_modules` is now consistent (always `tuple`) * tests: use same event loop policy as in Red's code * enhance(downloader): fully handle ambiguous revisions * build(deps): add pytest-mock dependency to tests extra * fix(downloader): minor fixes * feat(downloader): add tool for editing Downloader's test repo This script aims to help update the human-readable version of repo used for git integration tests in ``redbot/tests/downloader_testrepo.export`` by exporting/importing it in/from provided directory. Note ---- Editing `downloader_git_test_repo.export` file manually is strongly discouraged, especially editing any part of commit directives as that causes a change in the commit's hash. Another problem devs could encounter when trying to manually edit that file are editors that will use CRLF instead of LF for new line character(s) and therefore break it. I also used `.gitattributes` to prevent autocrlf from breaking testrepo. Also, if Git ever changes currently used SHA-1 to SHA-256 we will have to update old hashes with new ones. But it's a small drawback, when we can have human-readable version of repo. Known limitations ----------------- ``git fast-export`` exports commits without GPG signs so this script disables it in repo's config. This also means devs shouldn't use ``--gpg-sign`` flag in ``git commit`` within the test repo. * tests(downloader): add git tests and test repo for them Also added Markdown file that is even more clear than export file on what the test repo contains. This is manually created but can be automated on later date. * test(downloader): add more tests related to RepoManager These tests use expected output that is already guaranteed by git tests. * chore(CODEOWNERS): add jack1142 to Downloader's folders I know this doesn't actually give any benefit to people that don't have write permission to the repo but I saw other big fella devs doing this, so I think this might be advisable. * enhance(downloader): allow easy schema updates in future * enhance(downloader): more typing fixes, add comments for clarity * feat(downloader): add python and bot version check to update process follow-up on #2605, this commit fully fixes #1866 * chore(changelog): add towncrier entries * fix(downloader): use `*args` instead of `commands.Greedy` * fix(downloader): hot-reload issue - `InstallableType` now inherits from `IntEnum` There's desync of `InstallableType` class types due to hot-reload and `IntEnum` allows for equality check between different types * enhance(downloader): ensure there's no cog with same name installed should fix #2927 * fix(downloader): last few changes before marking as ready for review --- .github/CODEOWNERS | 5 +- changelog.d/2571.misc.rst | 1 + changelog.d/downloader/1866.enhance.rst | 1 + changelog.d/downloader/2527.docs.rst | 1 + changelog.d/downloader/2527.enhance.1.rst | 1 + changelog.d/downloader/2527.enhance.2.rst | 1 + changelog.d/downloader/2527.feature.1.rst | 1 + changelog.d/downloader/2527.feature.2.rst | 1 + changelog.d/downloader/2527.feature.3.rst | 1 + changelog.d/downloader/2527.feature.4.rst | 1 + changelog.d/downloader/2527.feature.5.rst | 1 + changelog.d/downloader/2527.feature.6.rst | 1 + changelog.d/downloader/2527.misc.1.rst | 4 + changelog.d/downloader/2571.bugfix.1.rst | 1 + changelog.d/downloader/2571.bugfix.2.rst | 1 + changelog.d/downloader/2571.dep.rst | 1 + changelog.d/downloader/2571.enhance.rst | 1 + changelog.d/downloader/2571.misc.rst | 1 + changelog.d/downloader/2927.bugfix.rst | 1 + docs/framework_downloader.rst | 6 + redbot/__init__.py | 23 +- redbot/__main__.py | 14 +- redbot/cogs/downloader/checks.py | 2 +- redbot/cogs/downloader/converters.py | 6 +- redbot/cogs/downloader/downloader.py | 1114 ++++++++++++++++----- redbot/cogs/downloader/errors.py | 38 + redbot/cogs/downloader/installable.py | 107 +- redbot/cogs/downloader/json_mixins.py | 13 +- redbot/cogs/downloader/repo_manager.py | 679 +++++++++++-- redbot/pytest/.gitattributes | 1 + redbot/pytest/downloader.py | 136 ++- redbot/pytest/downloader_testrepo.export | 134 +++ redbot/pytest/downloader_testrepo.md | 102 ++ setup.cfg | 2 + tests/cogs/downloader/test_downloader.py | 335 ++++++- tests/cogs/downloader/test_git.py | 452 +++++++++ tests/cogs/downloader/test_installable.py | 8 +- tests/conftest.py | 3 + tools/edit_testrepo.py | 172 ++++ tools/primary_deps.ini | 1 + 40 files changed, 2922 insertions(+), 452 deletions(-) create mode 100644 changelog.d/2571.misc.rst create mode 100644 changelog.d/downloader/1866.enhance.rst create mode 100644 changelog.d/downloader/2527.docs.rst create mode 100644 changelog.d/downloader/2527.enhance.1.rst create mode 100644 changelog.d/downloader/2527.enhance.2.rst create mode 100644 changelog.d/downloader/2527.feature.1.rst create mode 100644 changelog.d/downloader/2527.feature.2.rst create mode 100644 changelog.d/downloader/2527.feature.3.rst create mode 100644 changelog.d/downloader/2527.feature.4.rst create mode 100644 changelog.d/downloader/2527.feature.5.rst create mode 100644 changelog.d/downloader/2527.feature.6.rst create mode 100644 changelog.d/downloader/2527.misc.1.rst create mode 100644 changelog.d/downloader/2571.bugfix.1.rst create mode 100644 changelog.d/downloader/2571.bugfix.2.rst create mode 100644 changelog.d/downloader/2571.dep.rst create mode 100644 changelog.d/downloader/2571.enhance.rst create mode 100644 changelog.d/downloader/2571.misc.rst create mode 100644 changelog.d/downloader/2927.bugfix.rst create mode 100644 redbot/pytest/.gitattributes create mode 100644 redbot/pytest/downloader_testrepo.export create mode 100644 redbot/pytest/downloader_testrepo.md create mode 100644 tests/cogs/downloader/test_git.py create mode 100644 tools/edit_testrepo.py diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index ef2d440ba..7dac8bd23 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -33,7 +33,7 @@ redbot/cogs/audio/* @aikaterna redbot/cogs/bank/* @tekulvw redbot/cogs/cleanup/* @palmtree5 redbot/cogs/customcom/* @palmtree5 -redbot/cogs/downloader/* @tekulvw +redbot/cogs/downloader/* @tekulvw @jack1142 redbot/cogs/economy/* @palmtree5 redbot/cogs/filter/* @palmtree5 redbot/cogs/general/* @palmtree5 @@ -49,6 +49,9 @@ redbot/cogs/warnings/* @palmtree5 # Docs docs/* @tekulvw @palmtree5 +# Tests +tests/cogs/downloader/* @jack1142 + # Setup, instance setup, and running the bot setup.py @tekulvw redbot/__init__.py @tekulvw diff --git a/changelog.d/2571.misc.rst b/changelog.d/2571.misc.rst new file mode 100644 index 000000000..f071ffc3f --- /dev/null +++ b/changelog.d/2571.misc.rst @@ -0,0 +1 @@ +Tests now use same event loop policy as Red's code. \ No newline at end of file diff --git a/changelog.d/downloader/1866.enhance.rst b/changelog.d/downloader/1866.enhance.rst new file mode 100644 index 000000000..3c75d90ca --- /dev/null +++ b/changelog.d/downloader/1866.enhance.rst @@ -0,0 +1 @@ +Downloader will now check if Python and bot version match requirements in ``info.json`` during update. \ No newline at end of file diff --git a/changelog.d/downloader/2527.docs.rst b/changelog.d/downloader/2527.docs.rst new file mode 100644 index 000000000..3b4b47f61 --- /dev/null +++ b/changelog.d/downloader/2527.docs.rst @@ -0,0 +1 @@ +Added :func:`redbot.cogs.downloader.repo_manager.InstalledModule` to Downloader's framework docs. \ No newline at end of file diff --git a/changelog.d/downloader/2527.enhance.1.rst b/changelog.d/downloader/2527.enhance.1.rst new file mode 100644 index 000000000..5666bd733 --- /dev/null +++ b/changelog.d/downloader/2527.enhance.1.rst @@ -0,0 +1 @@ +User can now pass multiple cog names to ``[p]cog install``. \ No newline at end of file diff --git a/changelog.d/downloader/2527.enhance.2.rst b/changelog.d/downloader/2527.enhance.2.rst new file mode 100644 index 000000000..23b990d6d --- /dev/null +++ b/changelog.d/downloader/2527.enhance.2.rst @@ -0,0 +1 @@ +When passing cogs to ``[p]cog update`` command, it will now only update those cogs, not all cogs from the repo these cogs are from. \ No newline at end of file diff --git a/changelog.d/downloader/2527.feature.1.rst b/changelog.d/downloader/2527.feature.1.rst new file mode 100644 index 000000000..342b8910e --- /dev/null +++ b/changelog.d/downloader/2527.feature.1.rst @@ -0,0 +1 @@ +Added ``[p]repo update [repos]`` command that allows you to update repos without updating cogs from them. \ No newline at end of file diff --git a/changelog.d/downloader/2527.feature.2.rst b/changelog.d/downloader/2527.feature.2.rst new file mode 100644 index 000000000..116d40435 --- /dev/null +++ b/changelog.d/downloader/2527.feature.2.rst @@ -0,0 +1 @@ +Added ``[p]cog installversion `` command that allows you to install cogs from specified revision (commit, tag, branch) of given repo. When using this command, cog will automatically be pinned. \ No newline at end of file diff --git a/changelog.d/downloader/2527.feature.3.rst b/changelog.d/downloader/2527.feature.3.rst new file mode 100644 index 000000000..b9567b967 --- /dev/null +++ b/changelog.d/downloader/2527.feature.3.rst @@ -0,0 +1 @@ +Added ``[p]cog pin `` and ``[p]cog unpin `` for pinning cogs. Cogs that are pinned will not be updated when using update commands. \ No newline at end of file diff --git a/changelog.d/downloader/2527.feature.4.rst b/changelog.d/downloader/2527.feature.4.rst new file mode 100644 index 000000000..c50b44623 --- /dev/null +++ b/changelog.d/downloader/2527.feature.4.rst @@ -0,0 +1 @@ +Added ``[p]cog checkforupdates`` command that will tell which cogs can be updated (including pinned cog) without updating them. \ No newline at end of file diff --git a/changelog.d/downloader/2527.feature.5.rst b/changelog.d/downloader/2527.feature.5.rst new file mode 100644 index 000000000..dc08620a0 --- /dev/null +++ b/changelog.d/downloader/2527.feature.5.rst @@ -0,0 +1 @@ +Added ``[p]cog updateallfromrepos `` command that will update all cogs from given repos. \ No newline at end of file diff --git a/changelog.d/downloader/2527.feature.6.rst b/changelog.d/downloader/2527.feature.6.rst new file mode 100644 index 000000000..d5eb16a8e --- /dev/null +++ b/changelog.d/downloader/2527.feature.6.rst @@ -0,0 +1 @@ +Added ``[p]cog updatetoversion [cogs]`` command that updates all cogs or ones of user's choosing to chosen revision of given repo. \ No newline at end of file diff --git a/changelog.d/downloader/2527.misc.1.rst b/changelog.d/downloader/2527.misc.1.rst new file mode 100644 index 000000000..8b1d6db9e --- /dev/null +++ b/changelog.d/downloader/2527.misc.1.rst @@ -0,0 +1,4 @@ +Added :func:`redbot.cogs.downloader.installable.InstalledModule` which is used instead of :func:`redbot.cogs.downloader.installable.Installable` when we refer to installed cog or shared library. +Therefore: + - ``to_json`` and ``from_json`` methods were moved from :func:`redbot.cogs.downloader.installable.Installable` to :func:`redbot.cogs.downloader.installable.InstalledModule` + - return types changed for :func:`redbot.cogs.converter.InstalledCog.convert`, :func:`redbot.cogs.downloader.Downloader.installed_cogs`, :func:`redbot.cogs.downloader.Repo.install_cog` to use :func:`redbot.cogs.downloader.installable.InstalledModule`. \ No newline at end of file diff --git a/changelog.d/downloader/2571.bugfix.1.rst b/changelog.d/downloader/2571.bugfix.1.rst new file mode 100644 index 000000000..342f97e42 --- /dev/null +++ b/changelog.d/downloader/2571.bugfix.1.rst @@ -0,0 +1 @@ +Made regex for repo names use raw string to stop ``DeprecationWarning`` about invalid escape sequence. \ No newline at end of file diff --git a/changelog.d/downloader/2571.bugfix.2.rst b/changelog.d/downloader/2571.bugfix.2.rst new file mode 100644 index 000000000..74bf663c0 --- /dev/null +++ b/changelog.d/downloader/2571.bugfix.2.rst @@ -0,0 +1 @@ +Downloader will no longer allow to install cog that is already installed. \ No newline at end of file diff --git a/changelog.d/downloader/2571.dep.rst b/changelog.d/downloader/2571.dep.rst new file mode 100644 index 000000000..34ab16ae6 --- /dev/null +++ b/changelog.d/downloader/2571.dep.rst @@ -0,0 +1 @@ +Added ``pytest-mock`` requirement to ``tests`` extra. \ No newline at end of file diff --git a/changelog.d/downloader/2571.enhance.rst b/changelog.d/downloader/2571.enhance.rst new file mode 100644 index 000000000..bc65a7a84 --- /dev/null +++ b/changelog.d/downloader/2571.enhance.rst @@ -0,0 +1 @@ +Added error messages for failures during installing/reinstalling requirements and copying cogs and shared libraries. \ No newline at end of file diff --git a/changelog.d/downloader/2571.misc.rst b/changelog.d/downloader/2571.misc.rst new file mode 100644 index 000000000..0acd4199f --- /dev/null +++ b/changelog.d/downloader/2571.misc.rst @@ -0,0 +1 @@ +Added more Downloader tests for Repo logic and git integration. New git tests use a test repo file that can be generated using new tool at ``tools/edit_testrepo.py``. \ No newline at end of file diff --git a/changelog.d/downloader/2927.bugfix.rst b/changelog.d/downloader/2927.bugfix.rst new file mode 100644 index 000000000..c79b243e3 --- /dev/null +++ b/changelog.d/downloader/2927.bugfix.rst @@ -0,0 +1 @@ +Downloader will no longer allow to install cog with same name as other that is installed. \ No newline at end of file diff --git a/docs/framework_downloader.rst b/docs/framework_downloader.rst index 6d4a5a45b..e7b3de700 100644 --- a/docs/framework_downloader.rst +++ b/docs/framework_downloader.rst @@ -68,6 +68,12 @@ Installable .. autoclass:: Installable :members: +InstalledModule +^^^^^^^^^^^^^^^ + +.. autoclass:: InstalledModule + :members: + .. automodule:: redbot.cogs.downloader.repo_manager Repo diff --git a/redbot/__init__.py b/redbot/__init__.py index 2f62106d6..a7982024c 100644 --- a/redbot/__init__.py +++ b/redbot/__init__.py @@ -1,3 +1,4 @@ +import asyncio as _asyncio import re as _re import sys as _sys import warnings as _warnings @@ -15,8 +16,13 @@ from typing import ( MIN_PYTHON_VERSION = (3, 7, 0) -__all__ = ["MIN_PYTHON_VERSION", "__version__", "version_info", "VersionInfo"] - +__all__ = [ + "MIN_PYTHON_VERSION", + "__version__", + "version_info", + "VersionInfo", + "_update_event_loop_policy", +] if _sys.version_info < MIN_PYTHON_VERSION: print( f"Python {'.'.join(map(str, MIN_PYTHON_VERSION))} is required to run Red, but you have " @@ -173,6 +179,19 @@ class VersionInfo: ) +def _update_event_loop_policy(): + if _sys.platform == "win32": + _asyncio.set_event_loop_policy(_asyncio.WindowsProactorEventLoopPolicy()) + elif _sys.implementation.name == "cpython": + # Let's not force this dependency, uvloop is much faster on cpython + try: + import uvloop as _uvloop + except ImportError: + pass + else: + _asyncio.set_event_loop_policy(_uvloop.EventLoopPolicy()) + + __version__ = "3.1.7" version_info = VersionInfo.from_str(__version__) diff --git a/redbot/__main__.py b/redbot/__main__.py index a195e26a4..2947a9edc 100644 --- a/redbot/__main__.py +++ b/redbot/__main__.py @@ -13,17 +13,9 @@ import discord # Set the event loop policies here so any subsequent `get_event_loop()` # calls, in particular those as a result of the following imports, # return the correct loop object. -if sys.platform == "win32": - asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) -elif sys.implementation.name == "cpython": - # Let's not force this dependency, uvloop is much faster on cpython - try: - import uvloop - except ImportError: - uvloop = None - pass - else: - asyncio.set_event_loop_policy(uvloop.EventLoopPolicy()) +from redbot import _update_event_loop_policy + +_update_event_loop_policy() import redbot.logging from redbot.core.bot import Red, ExitCodes diff --git a/redbot/cogs/downloader/checks.py b/redbot/cogs/downloader/checks.py index cb86a8d4e..55b7327f0 100644 --- a/redbot/cogs/downloader/checks.py +++ b/redbot/cogs/downloader/checks.py @@ -21,7 +21,7 @@ REPO_INSTALL_MSG = _( _ = T_ -async def do_install_agreement(ctx: commands.Context): +async def do_install_agreement(ctx: commands.Context) -> bool: downloader = ctx.cog if downloader is None or downloader.already_agreed: return True diff --git a/redbot/cogs/downloader/converters.py b/redbot/cogs/downloader/converters.py index 54f7522cd..5b7357579 100644 --- a/redbot/cogs/downloader/converters.py +++ b/redbot/cogs/downloader/converters.py @@ -1,14 +1,14 @@ import discord from redbot.core import commands from redbot.core.i18n import Translator -from .installable import Installable +from .installable import InstalledModule _ = Translator("Koala", __file__) -class InstalledCog(Installable): +class InstalledCog(InstalledModule): @classmethod - async def convert(cls, ctx: commands.Context, arg: str) -> Installable: + async def convert(cls, ctx: commands.Context, arg: str) -> InstalledModule: downloader = ctx.bot.get_cog("Downloader") if downloader is None: raise commands.CommandError(_("No Downloader cog found.")) diff --git a/redbot/cogs/downloader/downloader.py b/redbot/cogs/downloader/downloader.py index d8a21d00d..be682431a 100644 --- a/redbot/cogs/downloader/downloader.py +++ b/redbot/cogs/downloader/downloader.py @@ -5,8 +5,8 @@ import re import shutil import sys from pathlib import Path -from sys import path as syspath -from typing import Tuple, Union, Iterable +from typing import Tuple, Union, Iterable, Optional, Dict, Set, List, cast +from collections import defaultdict import discord from redbot.core import checks, commands, Config, version_info as red_version_info @@ -20,7 +20,7 @@ from redbot.core.utils.predicates import MessagePredicate, ReactionPredicate from . import errors from .checks import do_install_agreement from .converters import InstalledCog -from .installable import Installable +from .installable import InstallableType, Installable, InstalledModule from .log import log from .repo_manager import RepoManager, Repo @@ -35,7 +35,7 @@ class Downloader(commands.Cog): self.conf = Config.get_conf(self, identifier=998240343, force_registration=True) - self.conf.register_global(installed=[]) + self.conf.register_global(schema_version=0, installed_cogs={}, installed_libraries={}) self.already_agreed = False @@ -50,10 +50,43 @@ class Downloader(commands.Cog): self._repo_manager = RepoManager() - async def initialize(self): + async def initialize(self) -> None: await self._repo_manager.initialize() + await self._maybe_update_config() - async def cog_install_path(self): + async def _maybe_update_config(self) -> None: + schema_version = await self.conf.schema_version() + + if schema_version == 0: + await self._schema_0_to_1() + schema_version += 1 + await self.conf.schema_version.set(schema_version) + + async def _schema_0_to_1(self): + """ + This contains migration to allow saving state + of both installed cogs and shared libraries. + """ + old_conf = await self.conf.get_raw("installed", default=[]) + if not old_conf: + return + async with self.conf.installed_cogs() as new_cog_conf: + for cog_json in old_conf: + repo_name = cog_json["repo_name"] + module_name = cog_json["cog_name"] + if repo_name not in new_cog_conf: + new_cog_conf[repo_name] = {} + new_cog_conf[repo_name][module_name] = { + "repo_name": repo_name, + "module_name": module_name, + "commit": "", + "pinned": False, + } + await self.conf.clear_raw("installed") + # no reliable way to get installed libraries (i.a. missing repo name) + # but it only helps `[p]cog update` run faster so it's not an issue + + async def cog_install_path(self) -> Path: """Get the current cog install path. Returns @@ -64,100 +97,273 @@ class Downloader(commands.Cog): """ return await self.bot._cog_mgr.install_path() - async def installed_cogs(self) -> Tuple[Installable]: + async def installed_cogs(self) -> Tuple[InstalledModule, ...]: """Get info on installed cogs. Returns ------- - `tuple` of `Installable` - All installed cogs / shared lib directories. + `tuple` of `InstalledModule` + All installed cogs. """ - installed = await self.conf.installed() + installed = await self.conf.installed_cogs() # noinspection PyTypeChecker - return tuple(Installable.from_json(v, self._repo_manager) for v in installed) + return tuple( + InstalledModule.from_json(cog_json, self._repo_manager) + for repo_json in installed.values() + for cog_json in repo_json.values() + ) - async def _add_to_installed(self, cog: Installable): - """Mark a cog as installed. + async def installed_libraries(self) -> Tuple[InstalledModule, ...]: + """Get info on installed shared libraries. + + Returns + ------- + `tuple` of `InstalledModule` + All installed shared libraries. + + """ + installed = await self.conf.installed_libraries() + # noinspection PyTypeChecker + return tuple( + InstalledModule.from_json(lib_json, self._repo_manager) + for repo_json in installed.values() + for lib_json in repo_json.values() + ) + + async def installed_modules(self) -> Tuple[InstalledModule, ...]: + """Get info on installed cogs and shared libraries. + + Returns + ------- + `tuple` of `InstalledModule` + All installed cogs and shared libraries. + + """ + return await self.installed_cogs() + await self.installed_libraries() + + async def _save_to_installed(self, modules: Iterable[InstalledModule]) -> None: + """Mark modules as installed or updates their json in Config. Parameters ---------- - cog : Installable - The cog to check off. + modules : `list` of `InstalledModule` + The modules to check off. """ - installed = await self.conf.installed() - cog_json = cog.to_json() + installed_cogs = await self.conf.installed_cogs() + installed_libraries = await self.conf.installed_libraries() + for module in modules: + if module.type == InstallableType.COG: + installed = installed_cogs + elif module.type == InstallableType.SHARED_LIBRARY: + installed = installed_libraries + else: + continue + module_json = module.to_json() + repo_json = installed.setdefault(module.repo_name, {}) + repo_json[module.name] = module_json - if cog_json not in installed: - installed.append(cog_json) - await self.conf.installed.set(installed) + await self.conf.installed_cogs.set(installed_cogs) + await self.conf.installed_libraries.set(installed_libraries) - async def _remove_from_installed(self, cog: Installable): - """Remove a cog from the saved list of installed cogs. + async def _remove_from_installed(self, modules: Iterable[InstalledModule]) -> None: + """Remove modules from the saved list + of installed modules (corresponding to type of module). Parameters ---------- - cog : Installable - The cog to remove. + modules : `list` of `InstalledModule` + The modules to remove. """ - installed = await self.conf.installed() - cog_json = cog.to_json() + installed_cogs = await self.conf.installed_cogs() + installed_libraries = await self.conf.installed_libraries() + for module in modules: + if module.type == InstallableType.COG: + installed = installed_cogs + elif module.type == InstallableType.SHARED_LIBRARY: + installed = installed_libraries + else: + continue + with contextlib.suppress(KeyError): + installed[module._json_repo_name].pop(module.name) - if cog_json in installed: - installed.remove(cog_json) - await self.conf.installed.set(installed) + await self.conf.installed_cogs.set(installed_cogs) + await self.conf.installed_libraries.set(installed_libraries) - async def _reinstall_cogs(self, cogs: Iterable[Installable]) -> Tuple[Installable]: + async def _available_updates( + self, cogs: Iterable[InstalledModule] + ) -> Tuple[Tuple[Installable, ...], Tuple[Installable, ...]]: """ - Installs a list of cogs, used when updating. - :param cogs: - :return: Any cogs that failed to copy + Get cogs and libraries which can be updated. + + Parameters + ---------- + cogs : `list` of `InstalledModule` + List of cogs, which should be checked against the updates. + + Returns + ------- + tuple + 2-tuple of cogs and libraries which can be updated. + """ - failed = [] - for cog in cogs: - if not await cog.copy_to(await self.cog_install_path()): - failed.append(cog) - - # noinspection PyTypeChecker - return tuple(failed) - - async def _reinstall_libraries(self, cogs: Iterable[Installable]) -> Tuple[Installable]: - """ - Reinstalls any shared libraries from the repos of cogs that - were updated. - :param cogs: - :return: Any libraries that failed to copy - """ - repo_names = set(cog.repo_name for cog in cogs) - unfiltered_repos = (self._repo_manager.get_repo(r) for r in repo_names) - repos = filter(lambda r: r is not None, unfiltered_repos) - - failed = [] + repos = {cog.repo for cog in cogs if cog.repo is not None} + installed_libraries = await self.installed_libraries() + modules: Set[InstalledModule] = set() + cogs_to_update: Set[Installable] = set() + libraries_to_update: Set[Installable] = set() + # split libraries and cogs into 2 categories: + # 1. `cogs_to_update`, `libraries_to_update` - module needs update, skip diffs + # 2. `modules` - module MAY need update, check diffs for repo in repos: - if not await repo.install_libraries( - target_dir=self.SHAREDLIB_PATH, req_target_dir=self.LIB_PATH + for lib in repo.available_libraries: + try: + index = installed_libraries.index(lib) + except ValueError: + libraries_to_update.add(lib) + else: + modules.add(installed_libraries[index]) + for cog in cogs: + if cog.repo is None: + # cog had its repo removed, can't check for updates + continue + if cog.commit: + modules.add(cog) + continue + # marking cog for update if there's no commit data saved (back-compat, see GH-2571) + last_cog_occurrence = await cog.repo.get_last_module_occurrence(cog.name) + if last_cog_occurrence is not None: + cogs_to_update.add(last_cog_occurrence) + + # Reduces diff requests to a single dict with no repeats + hashes: Dict[Tuple[Repo, str], Set[InstalledModule]] = defaultdict(set) + for module in modules: + module.repo = cast(Repo, module.repo) + if module.repo.commit != module.commit and await module.repo.is_ancestor( + module.commit, module.repo.commit ): - failed.extend(repo.available_libraries) + hashes[(module.repo, module.commit)].add(module) + + update_commits = [] + for (repo, old_hash), modules_to_check in hashes.items(): + modified = await repo.get_modified_modules(old_hash, repo.commit) + for module in modules_to_check: + try: + index = modified.index(module) + except ValueError: + # module wasn't modified - we just need to update its commit + module.commit = repo.commit + update_commits.append(module) + else: + modified_module = modified[index] + if modified_module.type == InstallableType.COG: + cogs_to_update.add(modified_module) + elif modified_module.type == InstallableType.SHARED_LIBRARY: + libraries_to_update.add(modified_module) + + await self._save_to_installed(update_commits) + + return (tuple(cogs_to_update), tuple(libraries_to_update)) + + async def _install_cogs( + self, cogs: Iterable[Installable] + ) -> Tuple[Tuple[InstalledModule, ...], Tuple[Installable, ...]]: + """Installs a list of cogs. + + Parameters + ---------- + cogs : `list` of `Installable` + Cogs to install. ``repo`` property of those objects can't be `None` + Returns + ------- + tuple + 2-tuple of installed and failed cogs. + """ + repos: Dict[str, Tuple[Repo, Dict[str, List[Installable]]]] = {} + for cog in cogs: + try: + repo_by_commit = repos[cog.repo_name] + except KeyError: + cog.repo = cast(Repo, cog.repo) # docstring specifies this already + repo_by_commit = repos[cog.repo_name] = (cog.repo, defaultdict(list)) + cogs_by_commit = repo_by_commit[1] + cogs_by_commit[cog.commit].append(cog) + installed = [] + failed = [] + for repo, cogs_by_commit in repos.values(): + exit_to_commit = repo.commit + for commit, cogs_to_install in cogs_by_commit.items(): + await repo.checkout(commit) + for cog in cogs_to_install: + if await cog.copy_to(await self.cog_install_path()): + installed.append(InstalledModule.from_installable(cog)) + else: + failed.append(cog) + await repo.checkout(exit_to_commit) # noinspection PyTypeChecker - return tuple(failed) + return (tuple(installed), tuple(failed)) - async def _reinstall_requirements(self, cogs: Iterable[Installable]) -> bool: + async def _reinstall_libraries( + self, libraries: Iterable[Installable] + ) -> Tuple[Tuple[InstalledModule, ...], Tuple[Installable, ...]]: + """Installs a list of shared libraries, used when updating. + + Parameters + ---------- + libraries : `list` of `Installable` + Libraries to reinstall. ``repo`` property of those objects can't be `None` + Returns + ------- + tuple + 2-tuple of installed and failed libraries. """ - Reinstalls requirements for given cogs that have been updated. - Returns a bool that indicates if all requirement installations - were successful. - :param cogs: - :return: + repos: Dict[str, Tuple[Repo, Dict[str, Set[Installable]]]] = {} + for lib in libraries: + try: + repo_by_commit = repos[lib.repo_name] + except KeyError: + lib.repo = cast(Repo, lib.repo) # docstring specifies this already + repo_by_commit = repos[lib.repo_name] = (lib.repo, defaultdict(set)) + libs_by_commit = repo_by_commit[1] + libs_by_commit[lib.commit].add(lib) + + all_installed: List[InstalledModule] = [] + all_failed: List[Installable] = [] + for repo, libs_by_commit in repos.values(): + exit_to_commit = repo.commit + for commit, libs in libs_by_commit.items(): + await repo.checkout(commit) + installed, failed = await repo.install_libraries( + target_dir=self.SHAREDLIB_PATH, req_target_dir=self.LIB_PATH, libraries=libs + ) + all_installed += installed + all_failed += failed + await repo.checkout(exit_to_commit) + + # noinspection PyTypeChecker + return (tuple(all_installed), tuple(all_failed)) + + async def _install_requirements(self, cogs: Iterable[Installable]) -> Tuple[str, ...]: + """ + Installs requirements for given cogs. + + Parameters + ---------- + cogs : `list` of `Installable` + Cogs whose requirements should be installed. + Returns + ------- + tuple + Tuple of failed requirements. """ # Reduces requirements to a single list with no repeats - requirements = set(r for c in cogs for r in c.requirements) - repo_names = self._repo_manager.get_all_repo_names() - repos = [(self._repo_manager.get_repo(rn), []) for rn in repo_names] + requirements = {requirement for cog in cogs for requirement in cog.requirements} + repos: List[Tuple[Repo, List[str]]] = [(repo, []) for repo in self._repo_manager.repos] # This for loop distributes the requirements across all repos # which will allow us to concurrently install requirements @@ -167,15 +373,15 @@ class Downloader(commands.Cog): has_reqs = list(filter(lambda item: len(item[1]) > 0, repos)) - ret = True + failed_reqs = [] for repo, reqs in has_reqs: for req in reqs: - # noinspection PyTypeChecker - ret = ret and await repo.install_raw_requirements([req], self.LIB_PATH) - return ret + if not await repo.install_raw_requirements([req], self.LIB_PATH): + failed_reqs.append(req) + return tuple(failed_reqs) @staticmethod - async def _delete_cog(target: Path): + async def _delete_cog(target: Path) -> None: """ Removes an (installed) cog. :param target: Path pointing to an existing file or directory @@ -191,11 +397,12 @@ class Downloader(commands.Cog): @commands.command() @checks.is_owner() - async def pipinstall(self, ctx, *deps: str): + async def pipinstall(self, ctx: commands.Context, *deps: str) -> None: """Install a group of dependencies using pip.""" if not deps: - return await ctx.send_help() - repo = Repo("", "", "", Path.cwd(), loop=ctx.bot.loop) + await ctx.send_help() + return + repo = Repo("", "", "", "", Path.cwd(), loop=ctx.bot.loop) async with ctx.typing(): success = await repo.install_raw_requirements(deps, self.LIB_PATH) @@ -211,12 +418,14 @@ class Downloader(commands.Cog): @commands.group() @checks.is_owner() - async def repo(self, ctx): + async def repo(self, ctx: commands.Context) -> None: """Repo management commands.""" pass @repo.command(name="add") - async def _repo_add(self, ctx, name: str, repo_url: str, branch: str = None): + async def _repo_add( + self, ctx: commands.Context, name: str, repo_url: str, branch: str = None + ) -> None: """Add a new repo. Repo names can only contain characters A-z, numbers, underscores, and hyphens. @@ -225,14 +434,15 @@ class Downloader(commands.Cog): agreed = await do_install_agreement(ctx) if not agreed: return - if re.match("^[a-zA-Z0-9_\-]*$", name) is None: + if re.match(r"^[a-zA-Z0-9_\-]*$", name) is None: await ctx.send( _("Repo names can only contain characters A-z, numbers, underscores, and hyphens.") ) return try: - # noinspection PyTypeChecker - repo = await self._repo_manager.add_repo(name=name, url=repo_url, branch=branch) + async with ctx.typing(): + # noinspection PyTypeChecker + repo = await self._repo_manager.add_repo(name=name, url=repo_url, branch=branch) except errors.ExistingGitRepo: await ctx.send(_("That git repo has already been added under another name.")) except errors.CloningError as err: @@ -246,7 +456,8 @@ class Downloader(commands.Cog): except OSError: await ctx.send( _( - "Something went wrong trying to add that repo. Your repo name might have an invalid character." + "Something went wrong trying to add that repo." + " Your repo name might have an invalid character." ) ) else: @@ -255,7 +466,7 @@ class Downloader(commands.Cog): await ctx.send(repo.install_msg.replace("[p]", ctx.prefix)) @repo.command(name="delete", aliases=["remove", "del"], usage="") - async def _repo_del(self, ctx, repo: Repo): + async def _repo_del(self, ctx: commands.Context, repo: Repo) -> None: """Remove a repo and its files.""" await self._repo_manager.delete_repo(repo.name) @@ -264,107 +475,168 @@ class Downloader(commands.Cog): ) @repo.command(name="list") - async def _repo_list(self, ctx): + async def _repo_list(self, ctx: commands.Context) -> None: """List all installed repos.""" - repos = self._repo_manager.get_all_repo_names() - repos = sorted(repos, key=str.lower) + repos = self._repo_manager.repos + sorted_repos = sorted(repos, key=lambda r: str.lower(r.name)) joined = _("Installed Repos:\n\n") - for repo_name in repos: - repo = self._repo_manager.get_repo(repo_name) + for repo in sorted_repos: joined += "+ {}: {}\n".format(repo.name, repo.short or "") for page in pagify(joined, ["\n"], shorten_by=16): await ctx.send(box(page.lstrip(" "), lang="diff")) @repo.command(name="info", usage="") - async def _repo_info(self, ctx, repo: Repo): + async def _repo_info(self, ctx: commands.Context, repo: Repo) -> None: """Show information about a repo.""" - if repo is None: - await ctx.send(_("Repo `{repo.name}` not found.").format(repo=repo)) - return - msg = _("Information on {repo.name}:\n{description}").format( repo=repo, description=repo.description or "" ) await ctx.send(box(msg)) + @repo.command(name="update") + async def _repo_update(self, ctx: commands.Context, *repos: Repo) -> None: + """Update all repos, or ones of your choosing.""" + async with ctx.typing(): + updated: Set[str] + if not repos: + updated = {repo.name for repo in await self._repo_manager.update_all_repos()} + else: + updated = set() + for repo in repos: + old, new = await repo.update() + if old != new: + updated.add(repo.name) + + if updated: + message = _("Repo update completed successfully.") + message += _("\nUpdated: ") + humanize_list(tuple(map(inline, updated))) + elif repos is None: + await ctx.send(_("All installed repos are already up to date.")) + return + else: + await ctx.send(_("These repos are already up to date.")) + return + await ctx.send(message) + @commands.group() @checks.is_owner() - async def cog(self, ctx): + async def cog(self, ctx: commands.Context) -> None: """Cog installation management commands.""" pass - @cog.command(name="install", usage=" ") - async def _cog_install(self, ctx, repo: Repo, cog_name: str): + @cog.command(name="install", usage=" ") + async def _cog_install(self, ctx: commands.Context, repo: Repo, *cog_names: str) -> None: """Install a cog from the given repo.""" - cog: Installable = discord.utils.get(repo.available_cogs, name=cog_name) - if cog is None: - await ctx.send( - _( - "Error: there is no cog by the name of `{cog_name}` in the `{repo.name}` repo." - ).format(cog_name=cog_name, repo=repo) - ) + await self._cog_installrev(ctx, repo, None, cog_names) + + @cog.command(name="installversion", usage=" ") + async def _cog_installversion( + self, ctx: commands.Context, repo: Repo, rev: str, *cog_names: str + ) -> None: + """Install a cog from the specified revision of given repo.""" + await self._cog_installrev(ctx, repo, rev, cog_names) + + async def _cog_installrev( + self, ctx: commands.Context, repo: Repo, rev: Optional[str], cog_names: Iterable[str] + ) -> None: + if not cog_names: + await ctx.send_help() return - elif cog.min_python_version > sys.version_info: - await ctx.send( - _("This cog requires at least python version {version}, aborting install.").format( - version=".".join([str(n) for n in cog.min_python_version]) - ) + commit = None + async with ctx.typing(): + if rev is not None: + try: + commit = await repo.get_full_sha1(rev) + except errors.AmbiguousRevision as e: + msg = _( + "Error: short sha1 `{rev}` is ambiguous. Possible candidates:\n" + ).format(rev=rev) + for candidate in e.candidates: + msg += ( + f"**{candidate.object_type} {candidate.rev}**" + f" - {candidate.description}\n" + ) + for page in pagify(msg): + await ctx.send(msg) + return + except errors.UnknownRevision: + await ctx.send( + _("Error: there is no revision `{rev}` in repo `{repo.name}`").format( + rev=rev, repo=repo + ) + ) + return + cog_names = set(cog_names) + + async with repo.checkout(commit, exit_to_rev=repo.branch): + cogs, message = await self._filter_incorrect_cogs_by_names(repo, cog_names) + if not cogs: + await ctx.send(message) + return + failed_reqs = await self._install_requirements(cogs) + if failed_reqs: + message += _("\nFailed to install requirements: ") + humanize_list( + tuple(map(inline, failed_reqs)) + ) + await ctx.send(message) + return + + installed_cogs, failed_cogs = await self._install_cogs(cogs) + + installed_libs, failed_libs = await repo.install_libraries( + target_dir=self.SHAREDLIB_PATH, req_target_dir=self.LIB_PATH ) - return - ignore_max = cog.min_bot_version > cog.max_bot_version - if ( - cog.min_bot_version > red_version_info - or not ignore_max - and cog.max_bot_version < red_version_info - ): - await ctx.send( - _("This cog requires at least Red version {min_version}").format( - min_version=cog.min_bot_version + if rev is not None: + for cog in installed_cogs: + cog.pinned = True + await self._save_to_installed(installed_cogs + installed_libs) + if failed_libs: + libnames = [inline(lib.name) for lib in failed_libs] + message = ( + _("\nFailed to install shared libraries for `{repo.name}` repo: ").format( + repo=repo + ) + + humanize_list(libnames) + + message ) - + ( - "" - if ignore_max - else _(" and at most {max_version}").format(max_version=cog.max_bot_version) + if failed_cogs: + cognames = [inline(cog.name) for cog in failed_cogs] + message = _("\nFailed to install cogs: ") + humanize_list(cognames) + message + if installed_cogs: + cognames = [inline(cog.name) for cog in installed_cogs] + message = ( + _("Successfully installed cogs: ") + + humanize_list(cognames) + + ( + _( + "\nThese cogs are now pinned and won't get updated automatically." + " To change this, use `{prefix}cog unpin `" + ).format(prefix=ctx.prefix) + if rev is not None + else "" + ) + + _("\nYou can load them using `{prefix}load `").format( + prefix=ctx.prefix + ) + + message ) - + _(", but you have {current_version}, aborting install.").format( - current_version=red_version_info - ) - ) - return - - if not await repo.install_requirements(cog, self.LIB_PATH): - libraries = humanize_list(tuple(map(inline, cog.requirements))) - await ctx.send( - _("Failed to install the required libraries for `{cog_name}`: {libraries}").format( - cog_name=cog.name, libraries=libraries - ) - ) - return - - await repo.install_cog(cog, await self.cog_install_path()) - - await self._add_to_installed(cog) - - await repo.install_libraries(target_dir=self.SHAREDLIB_PATH, req_target_dir=self.LIB_PATH) - - await ctx.send( - _( - "Cog `{cog_name}` successfully installed. You can load it with `{prefix}load {cog_name}`" - ).format(cog_name=cog_name, prefix=ctx.prefix) - ) - if cog.install_msg: - await ctx.send(cog.install_msg.replace("[p]", ctx.prefix)) + # "---" added to separate cog install messages from Downloader's message + await ctx.send(f"{message}\n---") + for cog in installed_cogs: + if cog.install_msg: + await ctx.send(cog.install_msg.replace("[p]", ctx.prefix)) @cog.command(name="uninstall", usage="") - async def _cog_uninstall(self, ctx, cogs: commands.Greedy[InstalledCog]): + async def _cog_uninstall(self, ctx: commands.Context, *cogs: InstalledCog) -> None: """Uninstall cogs. You may only uninstall cogs which were previously installed by Downloader. """ if not cogs: - return await ctx.send_help() + await ctx.send_help() + return async with ctx.typing(): uninstalled_cogs = [] failed_cogs = [] @@ -379,7 +651,7 @@ class Downloader(commands.Cog): uninstalled_cogs.append(inline(real_name)) else: failed_cogs.append(real_name) - await self._remove_from_installed(cog) + await self._remove_from_installed(cogs) message = "" if uninstalled_cogs: @@ -395,43 +667,432 @@ class Downloader(commands.Cog): ) await ctx.send(message) - @cog.command(name="update") - async def _cog_update(self, ctx, cog_name: InstalledCog = None): - """Update all cogs, or one of your choosing.""" - installed_cogs = set(await self.installed_cogs()) - - async with ctx.typing(): - if cog_name is None: - updated = await self._repo_manager.update_all_repos() - - else: - try: - updated = await self._repo_manager.update_repo(cog_name.repo_name) - except KeyError: - # Thrown if the repo no longer exists - updated = {} - - updated_cogs = set(cog for repo in updated for cog in repo.available_cogs) - installed_and_updated = updated_cogs & installed_cogs - - if installed_and_updated: - await self._reinstall_requirements(installed_and_updated) - await self._reinstall_cogs(installed_and_updated) - await self._reinstall_libraries(installed_and_updated) - message = _("Cog update completed successfully.") - - cognames = {c.name for c in installed_and_updated} - message += _("\nUpdated: ") + humanize_list(tuple(map(inline, cognames))) - else: - await ctx.send(_("All installed cogs are already up to date.")) - return + @cog.command(name="pin", usage="") + async def _cog_pin(self, ctx: commands.Context, *cogs: InstalledCog) -> None: + """Pin cogs - this will lock cogs on their current version.""" + if not cogs: + await ctx.send_help() + return + already_pinned = [] + pinned = [] + for cog in set(cogs): + if cog.pinned: + already_pinned.append(inline(cog.name)) + continue + cog.pinned = True + pinned.append(cog) + message = "" + if pinned: + await self._save_to_installed(pinned) + cognames = [inline(cog.name) for cog in pinned] + message += _("Pinned cogs: ") + humanize_list(cognames) + if already_pinned: + message += _("\nThese cogs were already pinned: ") + humanize_list(already_pinned) await ctx.send(message) - cognames &= set(ctx.bot.extensions.keys()) # only reload loaded cogs - if not cognames: - return await ctx.send( - _("None of the updated cogs were previously loaded. Update complete.") + @cog.command(name="unpin", usage="") + async def _cog_unpin(self, ctx: commands.Context, *cogs: InstalledCog) -> None: + """Unpin cogs - this will remove update lock from cogs.""" + if not cogs: + await ctx.send_help() + return + not_pinned = [] + unpinned = [] + for cog in set(cogs): + if not cog.pinned: + not_pinned.append(inline(cog.name)) + continue + cog.pinned = False + unpinned.append(cog) + message = "" + if unpinned: + await self._save_to_installed(unpinned) + cognames = [inline(cog.name) for cog in unpinned] + message += _("Unpinned cogs: ") + humanize_list(cognames) + if not_pinned: + message += _("\nThese cogs weren't pinned: ") + humanize_list(not_pinned) + await ctx.send(message) + + @cog.command(name="checkforupdates") + async def _cog_checkforupdates(self, ctx: commands.Context) -> None: + """ + Check for available cog updates (including pinned cogs). + + This command doesn't update cogs, it only checks for updates. + Use `[p]cog update` to update cogs. + """ + async with ctx.typing(): + cogs_to_check = await self._get_cogs_to_check() + cogs_to_update, libs_to_update = await self._available_updates(cogs_to_check) + message = "" + if cogs_to_update: + cognames = [cog.name for cog in cogs_to_update] + message += _("These cogs can be updated: ") + humanize_list( + tuple(map(inline, cognames)) + ) + if libs_to_update: + libnames = [cog.name for cog in libs_to_update] + message += _("\nThese shared libraries can be updated: ") + humanize_list( + tuple(map(inline, libnames)) + ) + if message: + await ctx.send(message) + else: + await ctx.send(_("All installed cogs are up to date.")) + + @cog.command(name="update") + async def _cog_update(self, ctx: commands.Context, *cogs: InstalledCog) -> None: + """Update all cogs, or ones of your choosing.""" + await self._cog_update_logic(ctx, cogs=cogs) + + @cog.command(name="updateallfromrepos", usage="") + async def _cog_updateallfromrepos(self, ctx: commands.Context, *repos: Repo) -> None: + """Update all cogs from repos of your choosing.""" + if not repos: + await ctx.send_help() + return + await self._cog_update_logic(ctx, repos=repos) + + @cog.command(name="updatetoversion", usage=" [cogs]") + async def _cog_updatetoversion( + self, ctx: commands.Context, repo: Repo, rev: str, *cogs: InstalledCog + ) -> None: + """Update all cogs, or ones of your choosing to chosen revision of one repo. + + Note that update doesn't mean downgrade and therefore revision + has to be newer than the one that cog currently has. If you want to + downgrade the cog, uninstall and install it again. + """ + await self._cog_update_logic(ctx, repo=repo, rev=rev, cogs=cogs) + + async def _cog_update_logic( + self, + ctx: commands.Context, + *, + repo: Optional[Repo] = None, + repos: Optional[List[Repo]] = None, + rev: Optional[str] = None, + cogs: Optional[List[InstalledModule]] = None, + ) -> None: + async with ctx.typing(): + # this is enough to be sure that `rev` is not None (based on calls to this method) + if repo is not None: + rev = cast(str, rev) + await repo.update() + try: + commit = await repo.get_full_sha1(rev) + except errors.AmbiguousRevision as e: + msg = _( + "Error: short sha1 `{rev}` is ambiguous. Possible candidates:\n" + ).format(rev=rev) + for candidate in e.candidates: + msg += ( + f"**{candidate.object_type} {candidate.rev}**" + f" - {candidate.description}\n" + ) + for page in pagify(msg): + await ctx.send(msg) + return + except errors.UnknownRevision: + await ctx.send( + _("Error: there is no revision `{rev}` in repo `{repo.name}`").format( + rev=rev, repo=repo + ) + ) + return + await repo.checkout(commit) + cogs_to_check = await self._get_cogs_to_check(repos=[repo], cogs=cogs) + else: + cogs_to_check = await self._get_cogs_to_check(repos=repos, cogs=cogs) + + pinned_cogs = {cog for cog in cogs_to_check if cog.pinned} + cogs_to_check -= pinned_cogs + if not cogs_to_check: + message = _("There were no cogs to check.") + if pinned_cogs: + cognames = [cog.name for cog in pinned_cogs] + message += _( + "\nThese cogs are pinned and therefore weren't checked: " + ) + humanize_list(tuple(map(inline, cognames))) + await ctx.send(message) + return + cogs_to_update, libs_to_update = await self._available_updates(cogs_to_check) + + updates_available = cogs_to_update or libs_to_update + cogs_to_update, filter_message = self._filter_incorrect_cogs(cogs_to_update) + message = "" + if updates_available: + updated_cognames, message = await self._update_cogs_and_libs( + cogs_to_update, libs_to_update + ) + else: + if repos: + message = _("Cogs from provided repos are already up to date.") + elif repo: + if cogs: + message = _("Provided cogs are already up to date with this revision.") + else: + message = _( + "Cogs from provided repo are already up to date with this revision." + ) + else: + if cogs: + message = _("Provided cogs are already up to date.") + else: + message = _("All installed cogs are already up to date.") + if repo is not None: + await repo.checkout(repo.branch) + if pinned_cogs: + cognames = [cog.name for cog in pinned_cogs] + message += _( + "\nThese cogs are pinned and therefore weren't checked: " + ) + humanize_list(tuple(map(inline, cognames))) + message += filter_message + await ctx.send(message) + if updates_available and updated_cognames: + await self._ask_for_cog_reload(ctx, updated_cognames) + + @cog.command(name="list", usage="") + async def _cog_list(self, ctx: commands.Context, repo: Repo) -> None: + """List all available cogs from a single repo.""" + installed = await self.installed_cogs() + installed_str = "" + if installed: + installed_str = _("Installed Cogs:\n") + "\n".join( + [ + "- {}{}".format(i.name, ": {}".format(i.short) if i.short else "") + for i in installed + if i.repo_name == repo.name + ] ) + cogs = _("Available Cogs:\n") + "\n".join( + [ + "+ {}: {}".format(cog.name, cog.short or "") + for cog in repo.available_cogs + if not (cog.hidden or cog in installed) + ] + ) + cogs = cogs + "\n\n" + installed_str + for page in pagify(cogs, ["\n"], shorten_by=16): + await ctx.send(box(page.lstrip(" "), lang="diff")) + + @cog.command(name="info", usage=" ") + async def _cog_info(self, ctx: commands.Context, repo: Repo, cog_name: str) -> None: + """List information about a single cog.""" + cog = discord.utils.get(repo.available_cogs, name=cog_name) + if cog is None: + await ctx.send( + _("There is no cog `{cog_name}` in the repo `{repo.name}`").format( + cog_name=cog_name, repo=repo + ) + ) + return + + msg = _( + "Information on {cog_name}:\n{description}\n\nRequirements: {requirements}" + ).format( + cog_name=cog.name, + description=cog.description or "", + requirements=", ".join(cog.requirements) or "None", + ) + await ctx.send(box(msg)) + + async def is_installed( + self, cog_name: str + ) -> Union[Tuple[bool, InstalledModule], Tuple[bool, None]]: + """Check to see if a cog has been installed through Downloader. + + Parameters + ---------- + cog_name : str + The name of the cog to check for. + + Returns + ------- + `tuple` of (`bool`, `InstalledModule`) + :code:`(True, InstalledModule)` if the cog is installed, else + :code:`(False, None)`. + + """ + for installed_cog in await self.installed_cogs(): + if installed_cog.name == cog_name: + return True, installed_cog + return False, None + + async def _filter_incorrect_cogs_by_names( + self, repo: Repo, cog_names: Iterable[str] + ) -> Tuple[Tuple[Installable, ...], str]: + """Filter out incorrect cogs from list. + + Parameters + ---------- + repo : `Repo` + Repo which should be searched for `cog_names` + cog_names : `list` of `str` + Cog names to search for in repo. + Returns + ------- + tuple + 2-tuple of cogs to install and error message for incorrect cogs. + """ + installed_cogs = await self.installed_cogs() + cogs: List[Installable] = [] + unavailable_cogs: List[str] = [] + already_installed: List[str] = [] + name_already_used: List[str] = [] + + for cog_name in cog_names: + cog: Optional[Installable] = discord.utils.get(repo.available_cogs, name=cog_name) + if cog is None: + unavailable_cogs.append(inline(cog_name)) + continue + if cog in installed_cogs: + already_installed.append(inline(cog_name)) + continue + if discord.utils.get(installed_cogs, name=cog.name): + name_already_used.append(inline(cog_name)) + continue + cogs.append(cog) + + message = "" + + if unavailable_cogs: + message += _("\nCouldn't find these cogs in {repo.name}: ").format( + repo=repo + ) + humanize_list(unavailable_cogs) + if already_installed: + message += _("\nThese cogs were already installed: ") + humanize_list( + already_installed + ) + if name_already_used: + message += _( + "\nSome cogs with these names are already installed from different repos: " + ) + humanize_list(already_installed) + correct_cogs, add_to_message = self._filter_incorrect_cogs(cogs) + if add_to_message: + return correct_cogs, f"{message}{add_to_message}" + return correct_cogs, message + + def _filter_incorrect_cogs( + self, cogs: Iterable[Installable] + ) -> Tuple[Tuple[Installable, ...], str]: + correct_cogs: List[Installable] = [] + outdated_python_version: List[str] = [] + outdated_bot_version: List[str] = [] + for cog in cogs: + if cog.min_python_version > sys.version_info: + outdated_python_version.append( + inline(cog.name) + + _(" (Minimum: {min_version})").format( + min_version=".".join([str(n) for n in cog.min_python_version]) + ) + ) + continue + ignore_max = cog.min_bot_version > cog.max_bot_version + if ( + cog.min_bot_version > red_version_info + or not ignore_max + and cog.max_bot_version < red_version_info + ): + outdated_bot_version.append( + inline(cog.name) + + _(" (Minimum: {min_version}").format(min_version=cog.min_bot_version) + + ( + "" + if ignore_max + else _(", at most: {max_version}").format(max_version=cog.max_bot_version) + ) + + ")" + ) + continue + correct_cogs.append(cog) + message = "" + if outdated_python_version: + message += _( + "\nThese cogs require higher python version than you have: " + ) + humanize_list(outdated_python_version) + if outdated_bot_version: + message += _( + "\nThese cogs require different Red version" + " than you currently have ({current_version}): " + ).format(current_version=red_version_info) + humanize_list(outdated_bot_version) + + return tuple(correct_cogs), message + + async def _get_cogs_to_check( + self, + *, + repos: Optional[Iterable[Repo]] = None, + cogs: Optional[Iterable[InstalledModule]] = None, + ) -> Set[InstalledModule]: + if not (cogs or repos): + await self._repo_manager.update_all_repos() + cogs_to_check = {cog for cog in await self.installed_cogs() if cog.repo is not None} + else: + # this is enough to be sure that `cogs` is not None (based on if above) + if not repos: + cogs = cast(Iterable[InstalledModule], cogs) + repos = {cog.repo for cog in cogs if cog.repo is not None} + + for repo in repos: + if await repo.is_on_branch(): + exit_to_commit = None + else: + exit_to_commit = repo.commit + await repo.update() + await repo.checkout(exit_to_commit) + if cogs: + cogs_to_check = {cog for cog in cogs if cog.repo is not None and cog.repo in repos} + else: + cogs_to_check = { + cog + for cog in await self.installed_cogs() + if cog.repo is not None and cog.repo in repos + } + + return cogs_to_check + + async def _update_cogs_and_libs( + self, cogs_to_update: Iterable[Installable], libs_to_update: Iterable[Installable] + ) -> Tuple[Set[str], str]: + failed_reqs = await self._install_requirements(cogs_to_update) + if failed_reqs: + return ( + set(), + _("Failed to install requirements: ") + + humanize_list(tuple(map(inline, failed_reqs))), + ) + installed_cogs, failed_cogs = await self._install_cogs(cogs_to_update) + installed_libs, failed_libs = await self._reinstall_libraries(libs_to_update) + await self._save_to_installed(installed_cogs + installed_libs) + message = _("Cog update completed successfully.") + + updated_cognames: Set[str] = set() + if installed_cogs: + updated_cognames = {cog.name for cog in installed_cogs} + message += _("\nUpdated: ") + humanize_list(tuple(map(inline, updated_cognames))) + if failed_cogs: + cognames = [cog.name for cog in failed_cogs] + message += _("\nFailed to update cogs: ") + humanize_list(tuple(map(inline, cognames))) + if not cogs_to_update: + message = _("No cogs were updated.") + if installed_libs: + message += _( + "\nSome shared libraries were updated, you should restart the bot " + "to bring the changes into effect." + ) + if failed_libs: + libnames = [lib.name for lib in failed_libs] + message += _("\nFailed to install shared libraries: ") + humanize_list( + tuple(map(inline, libnames)) + ) + return (updated_cognames, message) + + async def _ask_for_cog_reload(self, ctx: commands.Context, updated_cognames: Set[str]) -> None: + updated_cognames &= ctx.bot.extensions.keys() # only reload loaded cogs + if not updated_cognames: + await ctx.send(_("None of the updated cogs were previously loaded. Update complete.")) + return if not ctx.assume_yes: message = _("Would you like to reload the updated cogs?") @@ -464,75 +1125,7 @@ class Downloader(commands.Cog): with contextlib.suppress(discord.Forbidden): await query.clear_reactions() - await ctx.invoke(ctx.bot.get_cog("Core").reload, *cognames) - - @cog.command(name="list", usage="") - async def _cog_list(self, ctx, repo: Repo): - """List all available cogs from a single repo.""" - installed = await self.installed_cogs() - installed_str = "" - if installed: - installed_str = _("Installed Cogs:\n") + "\n".join( - [ - "- {}{}".format(i.name, ": {}".format(i.short) if i.short else "") - for i in installed - if i.repo_name == repo.name - ] - ) - cogs = repo.available_cogs - cogs = _("Available Cogs:\n") + "\n".join( - [ - "+ {}: {}".format(c.name, c.short or "") - for c in cogs - if not (c.hidden or c in installed) - ] - ) - cogs = cogs + "\n\n" + installed_str - for page in pagify(cogs, ["\n"], shorten_by=16): - await ctx.send(box(page.lstrip(" "), lang="diff")) - - @cog.command(name="info", usage=" ") - async def _cog_info(self, ctx, repo: Repo, cog_name: str): - """List information about a single cog.""" - cog = discord.utils.get(repo.available_cogs, name=cog_name) - if cog is None: - await ctx.send( - _("There is no cog `{cog_name}` in the repo `{repo.name}`").format( - cog_name=cog_name, repo=repo - ) - ) - return - - msg = _( - "Information on {cog_name}:\n{description}\n\nRequirements: {requirements}" - ).format( - cog_name=cog.name, - description=cog.description or "", - requirements=", ".join(cog.requirements) or "None", - ) - await ctx.send(box(msg)) - - async def is_installed( - self, cog_name: str - ) -> Union[Tuple[bool, Installable], Tuple[bool, None]]: - """Check to see if a cog has been installed through Downloader. - - Parameters - ---------- - cog_name : str - The name of the cog to check for. - - Returns - ------- - `tuple` of (`bool`, `Installable`) - :code:`(True, Installable)` if the cog is installed, else - :code:`(False, None)`. - - """ - for installable in await self.installed_cogs(): - if installable.name == cog_name: - return True, installable - return False, None + await ctx.invoke(ctx.bot.get_cog("Core").reload, *updated_cognames) def format_findcog_info( self, command_name: str, cog_installable: Union[Installable, object] = None @@ -554,17 +1147,20 @@ class Downloader(commands.Cog): """ if isinstance(cog_installable, Installable): made_by = ", ".join(cog_installable.author) or _("Missing from info.json") - repo = self._repo_manager.get_repo(cog_installable.repo_name) - repo_url = _("Missing from installed repos") if repo is None else repo.url + repo_url = ( + _("Missing from installed repos") + if cog_installable.repo is None + else cog_installable.repo.url + ) cog_name = cog_installable.name else: made_by = "26 & co." repo_url = "https://github.com/Cog-Creators/Red-DiscordBot" cog_name = cog_installable.__class__.__name__ - msg = _("Command: {command}\nMade by: {author}\nRepo: {repo}\nCog name: {cog}") + msg = _("Command: {command}\nMade by: {author}\nRepo: {repo_url}\nCog name: {cog}") - return msg.format(command=command_name, author=made_by, repo=repo_url, cog=cog_name) + return msg.format(command=command_name, author=made_by, repo_url=repo_url, cog=cog_name) def cog_name_from_instance(self, instance: object) -> str: """Determines the cog name that Downloader knows from the cog instance. @@ -586,7 +1182,7 @@ class Downloader(commands.Cog): return splitted[-2] @commands.command() - async def findcog(self, ctx: commands.Context, command_name: str): + async def findcog(self, ctx: commands.Context, command_name: str) -> None: """Find which cog a command comes from. This will only work with loaded cogs. diff --git a/redbot/cogs/downloader/errors.py b/redbot/cogs/downloader/errors.py index cd8f7405b..efd31bedd 100644 --- a/redbot/cogs/downloader/errors.py +++ b/redbot/cogs/downloader/errors.py @@ -1,7 +1,16 @@ +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +if TYPE_CHECKING: + from .repo_manager import Candidate + + __all__ = [ "DownloaderException", "GitException", "InvalidRepoName", + "CopyingError", "ExistingGitRepo", "MissingGitRepo", "CloningError", @@ -10,6 +19,8 @@ __all__ = [ "UpdateError", "GitDiffError", "NoRemoteURL", + "UnknownRevision", + "AmbiguousRevision", "PipError", ] @@ -37,6 +48,15 @@ class InvalidRepoName(DownloaderException): pass +class CopyingError(DownloaderException): + """ + Throw when there was an issue + during copying of module's files. + """ + + pass + + class ExistingGitRepo(DownloaderException): """ Thrown when trying to clone into a folder where a @@ -105,6 +125,24 @@ class NoRemoteURL(GitException): pass +class UnknownRevision(GitException): + """ + Thrown when specified revision cannot be found. + """ + + pass + + +class AmbiguousRevision(GitException): + """ + Thrown when specified revision is ambiguous. + """ + + def __init__(self, message: str, candidates: List[Candidate]) -> None: + super().__init__(message) + self.candidates = candidates + + class PipError(DownloaderException): """ Thrown when pip returns a non-zero return code. diff --git a/redbot/cogs/downloader/installable.py b/redbot/cogs/downloader/installable.py index 772aec4a9..327d571ad 100644 --- a/redbot/cogs/downloader/installable.py +++ b/redbot/cogs/downloader/installable.py @@ -1,9 +1,11 @@ +from __future__ import annotations + import json import distutils.dir_util import shutil -from enum import Enum +from enum import IntEnum from pathlib import Path -from typing import MutableMapping, Any, TYPE_CHECKING +from typing import MutableMapping, Any, TYPE_CHECKING, Optional, Dict, Union, Callable, Tuple, cast from .log import log from .json_mixins import RepoJSONMixin @@ -11,10 +13,11 @@ from .json_mixins import RepoJSONMixin from redbot.core import __version__, version_info as red_version_info, VersionInfo if TYPE_CHECKING: - from .repo_manager import RepoManager + from .repo_manager import RepoManager, Repo -class InstallableType(Enum): +class InstallableType(IntEnum): + # using IntEnum, because hot-reload breaks its identity UNKNOWN = 0 COG = 1 SHARED_LIBRARY = 2 @@ -34,6 +37,10 @@ class Installable(RepoJSONMixin): ---------- repo_name : `str` Name of the repository which this package belongs to. + repo : Repo, optional + Repo object of the Installable, if repo is missing this will be `None` + commit : `str`, optional + Installable's commit. This is not the same as ``repo.commit`` author : `tuple` of `str`, optional Name(s) of the author(s). bot_version : `tuple` of `int` @@ -58,30 +65,36 @@ class Installable(RepoJSONMixin): """ - def __init__(self, location: Path): + def __init__(self, location: Path, repo: Optional[Repo] = None, commit: str = ""): """Base installable initializer. Parameters ---------- location : pathlib.Path Location (file or folder) to the installable. + repo : Repo, optional + Repo object of the Installable, if repo is missing this will be `None` + commit : str + Installable's commit. This is not the same as ``repo.commit`` """ super().__init__(location) self._location = location + self.repo = repo self.repo_name = self._location.parent.stem + self.commit = commit - self.author = () + self.author: Tuple[str, ...] = () self.min_bot_version = red_version_info self.max_bot_version = red_version_info self.min_python_version = (3, 5, 1) self.hidden = False self.disabled = False - self.required_cogs = {} # Cog name -> repo URL - self.requirements = () - self.tags = () + self.required_cogs: Dict[str, str] = {} # Cog name -> repo URL + self.requirements: Tuple[str, ...] = () + self.tags: Tuple[str, ...] = () self.type = InstallableType.UNKNOWN if self._info_file.exists(): @@ -90,15 +103,15 @@ class Installable(RepoJSONMixin): if self._info == {}: self.type = InstallableType.COG - def __eq__(self, other): + def __eq__(self, other: Any) -> bool: # noinspection PyProtectedMember return self._location == other._location - def __hash__(self): + def __hash__(self) -> int: return hash(self._location) @property - def name(self): + def name(self) -> str: """`str` : The name of this package.""" return self._location.stem @@ -111,6 +124,7 @@ class Installable(RepoJSONMixin): :return: Status of installation :rtype: bool """ + copy_func: Callable[..., Any] if self._location.is_file(): copy_func = shutil.copy2 else: @@ -121,18 +135,20 @@ class Installable(RepoJSONMixin): # noinspection PyBroadException try: copy_func(src=str(self._location), dst=str(target_dir / self._location.stem)) - except: + except: # noqa: E722 log.exception("Error occurred when copying path: {}".format(self._location)) return False return True - def _read_info_file(self): + def _read_info_file(self) -> None: super()._read_info_file() if self._info_file.exists(): self._process_info_file() - def _process_info_file(self, info_file_path: Path = None) -> MutableMapping[str, Any]: + def _process_info_file( + self, info_file_path: Optional[Path] = None + ) -> MutableMapping[str, Any]: """ Processes an information file. Loads dependencies among other information into this object. @@ -145,7 +161,7 @@ class Installable(RepoJSONMixin): if info_file_path is None or not info_file_path.is_file(): raise ValueError("No valid information file path was found.") - info = {} + info: Dict[str, Any] = {} with info_file_path.open(encoding="utf-8") as f: try: info = json.load(f) @@ -174,7 +190,7 @@ class Installable(RepoJSONMixin): self.max_bot_version = max_bot_version try: - min_python_version = tuple(info.get("min_python_version", [3, 5, 1])) + min_python_version = tuple(info.get("min_python_version", (3, 5, 1))) except ValueError: min_python_version = self.min_python_version self.min_python_version = min_python_version @@ -212,14 +228,51 @@ class Installable(RepoJSONMixin): return info - def to_json(self): - return {"repo_name": self.repo_name, "cog_name": self.name} + +class InstalledModule(Installable): + """Base class for installed modules, + this is basically instance of installed `Installable` + used by Downloader. + + Attributes + ---------- + pinned : `bool` + Whether or not this cog is pinned, always `False` if module is not a cog. + """ + + def __init__( + self, + location: Path, + repo: Optional[Repo] = None, + commit: str = "", + pinned: bool = False, + json_repo_name: str = "", + ): + super().__init__(location=location, repo=repo, commit=commit) + self.pinned: bool = pinned if self.type == InstallableType.COG else False + # this is here so that Downloader could use real repo name instead of "MISSING_REPO" + self._json_repo_name = json_repo_name + + def to_json(self) -> Dict[str, Union[str, bool]]: + module_json: Dict[str, Union[str, bool]] = { + "repo_name": self.repo_name, + "module_name": self.name, + "commit": self.commit, + } + if self.type == InstallableType.COG: + module_json["pinned"] = self.pinned + return module_json @classmethod - def from_json(cls, data: dict, repo_mgr: "RepoManager"): - repo_name = data["repo_name"] - cog_name = data["cog_name"] + def from_json( + cls, data: Dict[str, Union[str, bool]], repo_mgr: RepoManager + ) -> InstalledModule: + repo_name = cast(str, data["repo_name"]) + cog_name = cast(str, data["module_name"]) + commit = cast(str, data.get("commit", "")) + pinned = cast(bool, data.get("pinned", False)) + # TypedDict, where are you :/ repo = repo_mgr.get_repo(repo_name) if repo is not None: repo_folder = repo.folder_path @@ -228,4 +281,12 @@ class Installable(RepoJSONMixin): location = repo_folder / cog_name - return cls(location=location) + return cls( + location=location, repo=repo, commit=commit, pinned=pinned, json_repo_name=repo_name + ) + + @classmethod + def from_installable(cls, module: Installable, *, pinned: bool = False) -> InstalledModule: + return cls( + location=module._location, repo=module.repo, commit=module.commit, pinned=pinned + ) diff --git a/redbot/cogs/downloader/json_mixins.py b/redbot/cogs/downloader/json_mixins.py index b989e91ef..6c8a18282 100644 --- a/redbot/cogs/downloader/json_mixins.py +++ b/redbot/cogs/downloader/json_mixins.py @@ -1,5 +1,6 @@ import json from pathlib import Path +from typing import Optional, Tuple, Dict, Any class RepoJSONMixin: @@ -8,18 +9,18 @@ class RepoJSONMixin: def __init__(self, repo_folder: Path): self._repo_folder = repo_folder - self.author = None - self.install_msg = None - self.short = None - self.description = None + self.author: Optional[Tuple[str, ...]] = None + self.install_msg: Optional[str] = None + self.short: Optional[str] = None + self.description: Optional[str] = None self._info_file = repo_folder / self.INFO_FILE_NAME if self._info_file.exists(): self._read_info_file() - self._info = {} + self._info: Dict[str, Any] = {} - def _read_info_file(self): + def _read_info_file(self) -> None: if not (self._info_file.exists() or self._info_file.is_file()): return diff --git a/redbot/cogs/downloader/repo_manager.py b/redbot/cogs/downloader/repo_manager.py index 6e29032bc..39ee18dce 100644 --- a/redbot/cogs/downloader/repo_manager.py +++ b/redbot/cogs/downloader/repo_manager.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import asyncio import functools import os @@ -7,23 +9,71 @@ import shutil import re from concurrent.futures import ThreadPoolExecutor from pathlib import Path -from subprocess import run as sp_run, PIPE +from subprocess import run as sp_run, PIPE, CompletedProcess from string import Formatter from sys import executable -from typing import List, Tuple, Iterable, MutableMapping, Union, Optional +from typing import ( + Any, + AsyncContextManager, + Awaitable, + Dict, + Generator, + Iterable, + List, + NamedTuple, + Optional, + Tuple, +) -from redbot.core import data_manager, commands +import discord +from redbot.core import data_manager, commands, Config from redbot.core.utils import safe_delete from redbot.core.i18n import Translator from . import errors -from .installable import Installable, InstallableType +from .installable import Installable, InstallableType, InstalledModule from .json_mixins import RepoJSONMixin from .log import log _ = Translator("RepoManager", __file__) +class Candidate(NamedTuple): + rev: str + object_type: str + description: str + + +class _RepoCheckoutCtxManager( + Awaitable[None], AsyncContextManager[None] +): # pylint: disable=duplicate-bases + def __init__( + self, + repo: Repo, + rev: Optional[str], + exit_to_rev: Optional[str] = None, + force_checkout: bool = False, + ): + self.repo = repo + self.rev = rev + if exit_to_rev is None: + self.exit_to_rev = self.repo.commit + else: + self.exit_to_rev = exit_to_rev + self.force_checkout = force_checkout + self.coro = repo._checkout(self.rev, force_checkout=self.force_checkout) + + def __await__(self) -> Generator[Any, None, None]: + return self.coro.__await__() + + async def __aenter__(self) -> None: + await self + + async def __aexit__(self, exc_type, exc, tb) -> None: + if self.rev is not None: + await self.repo._checkout(self.exit_to_rev, force_checkout=self.force_checkout) + + class ProcessFormatter(Formatter): def vformat(self, format_string, args, kwargs): return shlex.split(super().vformat(format_string, args, kwargs)) @@ -38,27 +88,49 @@ class ProcessFormatter(Formatter): class Repo(RepoJSONMixin): GIT_CLONE = "git clone --recurse-submodules -b {branch} {url} {folder}" GIT_CLONE_NO_BRANCH = "git clone --recurse-submodules {url} {folder}" - GIT_CURRENT_BRANCH = "git -C {path} rev-parse --abbrev-ref HEAD" + GIT_CURRENT_BRANCH = "git -C {path} symbolic-ref --short HEAD" + GIT_CURRENT_COMMIT = "git -C {path} rev-parse HEAD" GIT_LATEST_COMMIT = "git -C {path} rev-parse {branch}" GIT_HARD_RESET = "git -C {path} reset --hard origin/{branch} -q" GIT_PULL = "git -C {path} pull --recurse-submodules -q --ff-only" - GIT_DIFF_FILE_STATUS = "git -C {path} diff --no-commit-id --name-status {old_hash} {new_hash}" - GIT_LOG = "git -C {path} log --relative-date --reverse {old_hash}.. {relative_file_path}" + GIT_DIFF_FILE_STATUS = ( + "git -C {path} diff-tree --no-commit-id --name-status" + " -r -z --line-prefix='\t' {old_rev} {new_rev}" + ) + GIT_LOG = "git -C {path} log --relative-date --reverse {old_rev}.. {relative_file_path}" GIT_DISCOVER_REMOTE_URL = "git -C {path} config --get remote.origin.url" + GIT_CHECKOUT = "git -C {path} checkout {rev}" + GIT_GET_FULL_SHA1 = "git -C {path} rev-parse --verify {rev}^{{commit}}" + GIT_IS_ANCESTOR = ( + "git -C {path} merge-base --is-ancestor {maybe_ancestor_rev} {descendant_rev}" + ) + GIT_CHECK_IF_MODULE_EXISTS = "git -C {path} cat-file -e {rev}:{module_name}/__init__.py" + # ↓ this gives a commit after last occurrence + GIT_GET_LAST_MODULE_OCCURRENCE_COMMIT = ( + "git -C {path} log --diff-filter=D --pretty=format:%H -n 1 {descendant_rev}" + " -- {module_name}/__init__.py" + ) PIP_INSTALL = "{python} -m pip install -U -t {target_dir} {reqs}" + MODULE_FOLDER_REGEX = re.compile(r"(\w+)\/") + AMBIGUOUS_ERROR_REGEX = re.compile( + r"^hint: {3}(?P[A-Za-z0-9]+) (?Pcommit|tag) (?P.+)$", re.MULTILINE + ) + def __init__( self, name: str, url: str, - branch: str, + branch: Optional[str], + commit: str, folder_path: Path, - available_modules: Tuple[Installable] = (), - loop: asyncio.AbstractEventLoop = None, + available_modules: Tuple[Installable, ...] = (), + loop: Optional[asyncio.AbstractEventLoop] = None, ): self.url = url self.branch = branch + self.commit = commit self.name = name @@ -73,12 +145,10 @@ class Repo(RepoJSONMixin): self._repo_lock = asyncio.Lock() - self._loop = loop - if self._loop is None: - self._loop = asyncio.get_event_loop() + self._loop = loop if loop is not None else asyncio.get_event_loop() @classmethod - async def convert(cls, ctx: commands.Context, argument: str): + async def convert(cls, ctx: commands.Context, argument: str) -> Repo: downloader_cog = ctx.bot.get_cog("Downloader") if downloader_cog is None: raise commands.CommandError(_("No Downloader cog found.")) @@ -92,26 +162,82 @@ class Repo(RepoJSONMixin): ) return poss_repo - def _existing_git_repo(self) -> (bool, Path): + def _existing_git_repo(self) -> Tuple[bool, Path]: git_path = self.folder_path / ".git" return git_path.exists(), git_path - async def _get_file_update_statuses( - self, old_hash: str, new_hash: str - ) -> MutableMapping[str, str]: + async def is_ancestor(self, maybe_ancestor_rev: str, descendant_rev: str) -> bool: """ - Gets the file update status letters for each changed file between - the two hashes. - :param old_hash: Pre-update - :param new_hash: Post-update - :return: Mapping of filename -> status_letter + Check if the first is an ancestor of the second. + + Parameters + ---------- + maybe_ancestor_rev : `str` + Revision to check if it is ancestor of :code:`descendant_rev` + descendant_rev : `str` + Descendant revision + + Returns + ------- + bool + `True` if :code:`maybe_ancestor_rev` is + ancestor of :code:`descendant_rev` or `False` otherwise + """ + valid_exit_codes = (0, 1) p = await self._run( ProcessFormatter().format( - self.GIT_DIFF_FILE_STATUS, + self.GIT_IS_ANCESTOR, path=self.folder_path, - old_hash=old_hash, - new_hash=new_hash, + maybe_ancestor_rev=maybe_ancestor_rev, + descendant_rev=descendant_rev, + ), + valid_exit_codes=valid_exit_codes, + ) + + if p.returncode in valid_exit_codes: + return not bool(p.returncode) + raise errors.GitException( + f"Git failed to determine if commit {maybe_ancestor_rev}" + f" is ancestor of {descendant_rev} for repo at path: {self.folder_path}" + ) + + async def is_on_branch(self) -> bool: + """ + Check if repo is currently on branch. + + Returns + ------- + bool + `True` if repo is on branch or `False` otherwise + + """ + return await self.latest_commit() == self.commit + + async def _get_file_update_statuses( + self, old_rev: str, new_rev: Optional[str] = None + ) -> Dict[str, str]: + """ + Gets the file update status letters for each changed file between the two revisions. + + Parameters + ---------- + old_rev : `str` + Pre-update revision + new_rev : `str`, optional + Post-update revision, defaults to repo's branch if not given + + Returns + ------- + Dict[str, str] + Mapping of filename -> status_letter + + """ + if new_rev is None: + new_rev = self.branch + p = await self._run( + ProcessFormatter().format( + self.GIT_DIFF_FILE_STATUS, path=self.folder_path, old_rev=old_rev, new_rev=new_rev ) ) @@ -120,21 +246,156 @@ class Repo(RepoJSONMixin): "Git diff failed for repo at path: {}".format(self.folder_path) ) - stdout = p.stdout.strip().decode().split("\n") - + stdout = p.stdout.strip(b"\t\n\x00 ").decode().split("\x00\t") ret = {} for filename in stdout: - # TODO: filter these filenames by ones in self.available_modules - status, _, filepath = filename.partition("\t") + status, __, filepath = filename.partition("\x00") # NUL character ret[filepath] = status return ret - async def _get_commit_notes(self, old_commit_hash: str, relative_file_path: str) -> str: + async def get_last_module_occurrence( + self, module_name: str, descendant_rev: Optional[str] = None + ) -> Optional[Installable]: + """ + Gets module's `Installable` from last commit in which it still occurs. + + Parameters + ---------- + module_name : str + Name of module to get. + descendant_rev : `str`, optional + Revision from which the module's commit must be + reachable (i.e. descendant commit), + defaults to repo's branch if not given. + + Returns + ------- + `Installable` + Module from last commit in which it still occurs + or `None` if it couldn't be found. + + """ + if descendant_rev is None: + descendant_rev = self.branch + p = await self._run( + ProcessFormatter().format( + self.GIT_CHECK_IF_MODULE_EXISTS, + path=self.folder_path, + rev=descendant_rev, + module_name=module_name, + ), + debug_only=True, + ) + if p.returncode == 0: + async with self.checkout(descendant_rev): + return discord.utils.get(self.available_modules, name=module_name) + + p = await self._run( + ProcessFormatter().format( + self.GIT_GET_LAST_MODULE_OCCURRENCE_COMMIT, + path=self.folder_path, + descendant_rev=descendant_rev, + module_name=module_name, + ) + ) + + if p.returncode != 0: + raise errors.GitException( + "Git log failed for repo at path: {}".format(self.folder_path) + ) + + commit = p.stdout.decode().strip() + if commit: + async with self.checkout(f"{commit}~"): + return discord.utils.get(self.available_modules, name=module_name) + return None + + async def _is_module_modified(self, module: Installable, other_hash: str) -> bool: + """ + Checks if given module was different in :code:`other_hash`. + + Parameters + ---------- + module : `Installable` + Module to check. + other_hash : `str` + Hash to compare module to. + + Returns + ------- + bool + `True` if module was different, `False` otherwise. + + """ + if module.commit == other_hash: + return False + + for status in await self._get_file_update_statuses(other_hash, module.commit): + match = self.MODULE_FOLDER_REGEX.match(status) + if match is not None and match.group(1) == module.name: + return True + + return False + + async def get_modified_modules( + self, old_rev: str, new_rev: Optional[str] = None + ) -> Tuple[Installable, ...]: + """ + Gets modified modules between the two revisions. + For every module that doesn't exist in :code:`new_rev`, + it will try to find last commit, where it still existed + + Parameters + ---------- + old_rev : `str` + Pre-update revision, ancestor of :code:`new_rev` + new_rev : `str`, optional + Post-update revision, defaults to repo's branch if not given + + Returns + ------- + `tuple` of `Installable` + List of changed modules between the two revisions. + + """ + if new_rev is None: + new_rev = self.branch + modified_modules = set() + # check differences + for status in await self._get_file_update_statuses(old_rev, new_rev): + match = self.MODULE_FOLDER_REGEX.match(status) + if match is not None: + modified_modules.add(match.group(1)) + + async with self.checkout(old_rev): + # save old modules + old_hash = self.commit + old_modules = self.available_modules + # save new modules + await self.checkout(new_rev) + modules = [] + new_modules = self.available_modules + for old_module in old_modules: + if old_module.name not in modified_modules: + continue + try: + index = new_modules.index(old_module) + except ValueError: + # module doesn't exist in this revision, try finding previous occurrence + module = await self.get_last_module_occurrence(old_module.name, new_rev) + if module is not None and await self._is_module_modified(module, old_hash): + modules.append(module) + else: + modules.append(new_modules[index]) + + return tuple(modules) + + async def _get_commit_notes(self, old_rev: str, relative_file_path: str) -> str: """ Gets the commit notes from git log. - :param old_commit_hash: Point in time to start getting messages + :param old_rev: Point in time to start getting messages :param relative_file_path: Path relative to the repo folder of the file to get messages for. :return: Git commit note log @@ -143,7 +404,7 @@ class Repo(RepoJSONMixin): ProcessFormatter().format( self.GIT_LOG, path=self.folder_path, - old_hash=old_commit_hash, + old_rev=old_rev, relative_file_path=relative_file_path, ) ) @@ -156,7 +417,47 @@ class Repo(RepoJSONMixin): return p.stdout.decode().strip() - def _update_available_modules(self) -> Tuple[str]: + async def get_full_sha1(self, rev: str) -> str: + """ + Gets full sha1 object name. + + Parameters + ---------- + rev : str + Revision to search for full sha1 object name. + + Raises + ------ + .UnknownRevision + When git cannot find provided revision. + .AmbiguousRevision + When git cannot resolve provided short sha1 to one commit. + + Returns + ------- + `str` + Full sha1 object name for provided revision. + + """ + p = await self._run( + ProcessFormatter().format(self.GIT_GET_FULL_SHA1, path=self.folder_path, rev=rev) + ) + + if p.returncode != 0: + stderr = p.stderr.decode().strip() + ambiguous_error = f"error: short SHA1 {rev} is ambiguous\nhint: The candidates are:\n" + if not stderr.startswith(ambiguous_error): + raise errors.UnknownRevision(f"Revision {rev} cannot be found.") + candidates = [] + for match in self.AMBIGUOUS_ERROR_REGEX.finditer(stderr, len(ambiguous_error)): + candidates.append(Candidate(match["rev"], match["type"], match["desc"])) + if candidates: + raise errors.AmbiguousRevision(f"Short SHA1 {rev} is ambiguous.", candidates) + raise errors.UnknownRevision(f"Revision {rev} cannot be found.") + + return p.stdout.decode().strip() + + def _update_available_modules(self) -> Tuple[Installable, ...]: """ Updates the available modules attribute for this repo. :return: List of available modules. @@ -175,22 +476,114 @@ class Repo(RepoJSONMixin): """ for file_finder, name, is_pkg in pkgutil.iter_modules(path=[str(self.folder_path)]): if is_pkg: - curr_modules.append(Installable(location=self.folder_path / name)) - self.available_modules = curr_modules + curr_modules.append( + Installable(location=self.folder_path / name, repo=self, commit=self.commit) + ) + self.available_modules = tuple(curr_modules) - # noinspection PyTypeChecker - return tuple(self.available_modules) + return self.available_modules - async def _run(self, *args, **kwargs): + async def _run( + self, + *args: Any, + valid_exit_codes: Tuple[int, ...] = (0,), + debug_only: bool = False, + **kwargs: Any, + ) -> CompletedProcess: + """ + Parameters + ---------- + valid_exit_codes : tuple + Specifies valid exit codes, used to determine + if stderr should be sent as debug or error level in logging. + When not provided, defaults to :code:`(0,)` + debug_only : bool + Specifies if stderr can be sent only as debug level in logging. + When not provided, defaults to `False` + """ env = os.environ.copy() env["GIT_TERMINAL_PROMPT"] = "0" kwargs["env"] = env async with self._repo_lock: - return await self._loop.run_in_executor( - self._executor, functools.partial(sp_run, *args, stdout=PIPE, **kwargs) + p: CompletedProcess = await self._loop.run_in_executor( + self._executor, + functools.partial(sp_run, *args, stdout=PIPE, stderr=PIPE, **kwargs), + ) + stderr = p.stderr.decode().strip() + if stderr: + if debug_only or p.returncode in valid_exit_codes: + log.debug(stderr) + else: + log.error(stderr) + return p + + async def _setup_repo(self) -> None: + self.commit = await self.current_commit() + self._read_info_file() + self._update_available_modules() + + async def _checkout(self, rev: Optional[str] = None, force_checkout: bool = False) -> None: + if rev is None: + return + if not force_checkout and self.commit == rev: + return + exists, __ = self._existing_git_repo() + if not exists: + raise errors.MissingGitRepo( + "A git repo does not exist at path: {}".format(self.folder_path) ) - async def clone(self) -> Tuple[str]: + p = await self._run( + ProcessFormatter().format(self.GIT_CHECKOUT, path=self.folder_path, rev=rev) + ) + + if p.returncode != 0: + raise errors.UnknownRevision( + "Could not checkout to {}. This revision may not exist".format(rev) + ) + + await self._setup_repo() + + def checkout( + self, + rev: Optional[str] = None, + *, + exit_to_rev: Optional[str] = None, + force_checkout: bool = False, + ) -> _RepoCheckoutCtxManager: + """ + Checks out repository to provided revision. + + The return value of this method can also be used as an asynchronous + context manager, i.e. with :code:`async with` syntax. This will + checkout repository to :code:`exit_to_rev` on exit of the context manager. + + Parameters + ---------- + rev : str, optional + Revision to checkout to, when not provided, method won't do anything + exit_to_rev : str, optional + Revision to checkout to after exiting context manager, + when not provided, defaults to current commit + This will be ignored, when used with :code:`await` or when :code:`rev` is `None`. + force_checkout : bool + When `True` checkout will be done even + if :code:`self.commit` is the same as target hash + (applies to exiting context manager as well) + If provided revision isn't full sha1 hash, + checkout will be done no matter to this parameter. + Defaults to `False`. + + Raises + ------ + .UnknownRevision + When git cannot checkout to provided revision. + + """ + + return _RepoCheckoutCtxManager(self, rev, exit_to_rev, force_checkout) + + async def clone(self) -> Tuple[Installable, ...]: """Clone a new repo. Returns @@ -224,9 +617,9 @@ class Repo(RepoJSONMixin): if self.branch is None: self.branch = await self.current_branch() - self._read_info_file() + await self._setup_repo() - return self._update_available_modules() + return self.available_modules async def current_branch(self) -> str: """Determine the current branch using git commands. @@ -237,7 +630,7 @@ class Repo(RepoJSONMixin): The current branch name. """ - exists, _ = self._existing_git_repo() + exists, __ = self._existing_git_repo() if not exists: raise errors.MissingGitRepo( "A git repo does not exist at path: {}".format(self.folder_path) @@ -254,9 +647,33 @@ class Repo(RepoJSONMixin): return p.stdout.decode().strip() - async def current_commit(self, branch: str = None) -> str: + async def current_commit(self) -> str: """Determine the current commit hash of the repo. + Returns + ------- + str + The requested commit hash. + + """ + exists, __ = self._existing_git_repo() + if not exists: + raise errors.MissingGitRepo( + "A git repo does not exist at path: {}".format(self.folder_path) + ) + + p = await self._run( + ProcessFormatter().format(self.GIT_CURRENT_COMMIT, path=self.folder_path) + ) + + if p.returncode != 0: + raise errors.CurrentHashError("Unable to determine commit hash.") + + return p.stdout.decode().strip() + + async def latest_commit(self, branch: Optional[str] = None) -> str: + """Determine the latest commit hash of the repo. + Parameters ---------- branch : `str`, optional @@ -271,7 +688,7 @@ class Repo(RepoJSONMixin): if branch is None: branch = self.branch - exists, _ = self._existing_git_repo() + exists, __ = self._existing_git_repo() if not exists: raise errors.MissingGitRepo( "A git repo does not exist at path: {}".format(self.folder_path) @@ -282,11 +699,11 @@ class Repo(RepoJSONMixin): ) if p.returncode != 0: - raise errors.CurrentHashError("Unable to determine old commit hash.") + raise errors.CurrentHashError("Unable to determine latest commit hash.") return p.stdout.decode().strip() - async def current_url(self, folder: Path = None) -> str: + async def current_url(self, folder: Optional[Path] = None) -> str: """ Discovers the FETCH URL for a Git repo. @@ -316,7 +733,7 @@ class Repo(RepoJSONMixin): return p.stdout.decode().strip() - async def hard_reset(self, branch: str = None) -> None: + async def hard_reset(self, branch: Optional[str] = None) -> None: """Perform a hard reset on the current repo. Parameters @@ -328,7 +745,8 @@ class Repo(RepoJSONMixin): if branch is None: branch = self.branch - exists, _ = self._existing_git_repo() + await self.checkout(branch) + exists, __ = self._existing_git_repo() if not exists: raise errors.MissingGitRepo( "A git repo does not exist at path: {}".format(self.folder_path) @@ -345,7 +763,7 @@ class Repo(RepoJSONMixin): " the following path: {}".format(self.folder_path) ) - async def update(self) -> (str, str): + async def update(self) -> Tuple[str, str]: """Update the current branch of this repo. Returns @@ -354,10 +772,9 @@ class Repo(RepoJSONMixin): :py:code`(old commit hash, new commit hash)` """ - curr_branch = await self.current_branch() - old_commit = await self.current_commit(branch=curr_branch) + old_commit = await self.latest_commit() - await self.hard_reset(branch=curr_branch) + await self.hard_reset() p = await self._run(ProcessFormatter().format(self.GIT_PULL, path=self.folder_path)) @@ -367,14 +784,11 @@ class Repo(RepoJSONMixin): " for the repo located at path: {}".format(self.folder_path) ) - new_commit = await self.current_commit(branch=curr_branch) + await self._setup_repo() - self._update_available_modules() - self._read_info_file() + return old_commit, self.commit - return old_commit, new_commit - - async def install_cog(self, cog: Installable, target_dir: Path) -> bool: + async def install_cog(self, cog: Installable, target_dir: Path) -> InstalledModule: """Install a cog to the target directory. Parameters @@ -386,8 +800,13 @@ class Repo(RepoJSONMixin): Returns ------- - bool - The success of the installation. + `InstalledModule` + Cog instance. + + Raises + ------ + .CopyingError + When cog couldn't be copied. """ if cog not in self.available_cogs: @@ -399,11 +818,14 @@ class Repo(RepoJSONMixin): if not target_dir.exists(): raise ValueError("That target directory does not exist.") - return await cog.copy_to(target_dir=target_dir) + if not await cog.copy_to(target_dir=target_dir): + raise errors.CopyingError("There was an issue during copying of cog's files") + + return InstalledModule.from_installable(cog) async def install_libraries( - self, target_dir: Path, req_target_dir: Path, libraries: Tuple[Installable] = () - ) -> bool: + self, target_dir: Path, req_target_dir: Path, libraries: Iterable[Installable] = () + ) -> Tuple[Tuple[InstalledModule, ...], Tuple[Installable, ...]]: """Install shared libraries to the target directory. If :code:`libraries` is not specified, all shared libraries in the repo @@ -420,26 +842,30 @@ class Repo(RepoJSONMixin): Returns ------- - bool - The success of the installation. + tuple + 2-tuple of installed and failed libraries. """ - if len(libraries) > 0: + + if libraries: if not all([i in self.available_libraries for i in libraries]): raise ValueError("Some given libraries are not available in this repo.") else: libraries = self.available_libraries - if len(libraries) > 0: - ret = True + if libraries: + installed = [] + failed = [] for lib in libraries: - ret = ( - ret - and await self.install_requirements(cog=lib, target_dir=req_target_dir) + if not ( + await self.install_requirements(cog=lib, target_dir=req_target_dir) and await lib.copy_to(target_dir=target_dir) - ) - return ret - return True + ): + failed.append(lib) + else: + installed.append(InstalledModule.from_installable(lib)) + return (tuple(installed), tuple(failed)) + return ((), ()) async def install_requirements(self, cog: Installable, target_dir: Path) -> bool: """Install a cog's requirements. @@ -466,7 +892,9 @@ class Repo(RepoJSONMixin): return await self.install_raw_requirements(cog.requirements, target_dir) - async def install_raw_requirements(self, requirements: Tuple[str], target_dir: Path) -> bool: + async def install_raw_requirements( + self, requirements: Iterable[str], target_dir: Path + ) -> bool: """Install a list of requirements using pip. Parameters @@ -482,7 +910,7 @@ class Repo(RepoJSONMixin): Success of the installation """ - if len(requirements) == 0: + if not requirements: return True # TODO: Check and see if any of these modules are already available @@ -503,7 +931,7 @@ class Repo(RepoJSONMixin): return True @property - def available_cogs(self) -> Tuple[Installable]: + def available_cogs(self) -> Tuple[Installable, ...]: """`tuple` of `installable` : All available cogs in this Repo. This excludes hidden or shared packages. @@ -514,7 +942,7 @@ class Repo(RepoJSONMixin): ) @property - def available_libraries(self) -> Tuple[Installable]: + def available_libraries(self) -> Tuple[Installable, ...]: """`tuple` of `installable` : All available shared libraries in this Repo. """ @@ -524,11 +952,14 @@ class Repo(RepoJSONMixin): ) @classmethod - async def from_folder(cls, folder: Path): - repo = cls(name=folder.stem, branch="", url="", folder_path=folder) - repo.branch = await repo.current_branch() + async def from_folder(cls, folder: Path, branch: str = "") -> Repo: + repo = cls(name=folder.stem, url="", branch=branch, commit="", folder_path=folder) repo.url = await repo.current_url() - repo._update_available_modules() + if branch == "": + repo.branch = await repo.current_branch() + repo._update_available_modules() + else: + await repo.checkout(repo.branch, force_checkout=True) return repo @@ -537,11 +968,13 @@ class RepoManager: GITHUB_OR_GITLAB_RE = re.compile(r"https?://git(?:hub)|(?:lab)\.com/") TREE_URL_RE = re.compile(r"(?P/tree)/(?P\S+)$") - def __init__(self): - self._repos = {} + def __init__(self) -> None: + self._repos: Dict[str, Repo] = {} + self.conf = Config.get_conf(self, identifier=170708480, force_registration=True) + self.conf.register_global(repos={}) - async def initialize(self): - await self._load_repos(set=True) + async def initialize(self) -> None: + await self._load_repos(set_repos=True) @property def repos_folder(self) -> Path: @@ -583,14 +1016,17 @@ class RepoManager: url, branch = self._parse_url(url, branch) # noinspection PyTypeChecker - r = Repo(url=url, name=name, branch=branch, folder_path=self.repos_folder / name) + r = Repo( + url=url, name=name, branch=branch, commit="", folder_path=self.repos_folder / name + ) await r.clone() + await self.conf.repos.set_raw(name, value=r.branch) self._repos[name] = r return r - def get_repo(self, name: str) -> Union[Repo, None]: + def get_repo(self, name: str) -> Optional[Repo]: """Get a Repo object for a repository. Parameters @@ -606,7 +1042,11 @@ class RepoManager: """ return self._repos.get(name, None) - def get_all_repo_names(self) -> Tuple[str]: + @property + def repos(self) -> Tuple[Repo, ...]: + return tuple(self._repos.values()) + + def get_all_repo_names(self) -> Tuple[str, ...]: """Get all repo names. Returns @@ -617,7 +1057,20 @@ class RepoManager: # noinspection PyTypeChecker return tuple(self._repos.keys()) - async def delete_repo(self, name: str): + def get_all_cogs(self) -> Tuple[Installable, ...]: + """Get all cogs. + + Returns + ------- + `tuple` of `Installable` + + """ + all_cogs: List[Installable] = [] + for repo in self._repos.values(): + all_cogs += repo.available_cogs + return tuple(all_cogs) + + async def delete_repo(self, name: str) -> None: """Delete a repository and its folders. Parameters @@ -637,41 +1090,59 @@ class RepoManager: safe_delete(repo.folder_path) + await self.conf.repos.clear_raw(repo.name) try: del self._repos[name] except KeyError: pass - async def update_repo(self, repo_name: str) -> MutableMapping[Repo, Tuple[str, str]]: + async def update_repo(self, repo_name: str) -> Tuple[Repo, Tuple[str, str]]: + """Update repo with provided name. + + Parameters + ---------- + name : str + The name of the repository to update. + + Returns + ------- + Tuple[Repo, Tuple[str, str]] + A 2-`tuple` with Repo object and a 2-`tuple` of `str` + containing old and new commit hashes. + + """ repo = self._repos[repo_name] old, new = await repo.update() - return {repo: (old, new)} + return (repo, (old, new)) - async def update_all_repos(self) -> MutableMapping[Repo, Tuple[str, str]]: + async def update_all_repos(self) -> Dict[Repo, Tuple[str, str]]: """Call `Repo.update` on all repositories. Returns ------- - dict - A mapping of `Repo` objects that received new commits to a `tuple` - of `str` containing old and new commit hashes. + Dict[Repo, Tuple[str, str]] + A mapping of `Repo` objects that received new commits to + a 2-`tuple` of `str` containing old and new commit hashes. """ ret = {} - for repo_name, _ in self._repos.items(): - repo, (old, new) = (await self.update_repo(repo_name)).popitem() + for repo_name, __ in self._repos.items(): + repo, (old, new) = await self.update_repo(repo_name) if old != new: ret[repo] = (old, new) return ret - async def _load_repos(self, set=False) -> MutableMapping[str, Repo]: + async def _load_repos(self, set_repos: bool = False) -> Dict[str, Repo]: ret = {} self.repos_folder.mkdir(parents=True, exist_ok=True) for folder in self.repos_folder.iterdir(): if not folder.is_dir(): continue try: - ret[folder.stem] = await Repo.from_folder(folder) + branch = await self.conf.repos.get_raw(folder.stem, default="") + ret[folder.stem] = await Repo.from_folder(folder, branch) + if branch == "": + await self.conf.repos.set_raw(folder.stem, value=ret[folder.stem].branch) except errors.NoRemoteURL: log.warning("A remote URL does not exist for repo %s", folder.stem) except errors.DownloaderException as err: @@ -683,7 +1154,7 @@ class RepoManager: ), ) - if set: + if set_repos: self._repos = ret return ret diff --git a/redbot/pytest/.gitattributes b/redbot/pytest/.gitattributes new file mode 100644 index 000000000..31b7e1c61 --- /dev/null +++ b/redbot/pytest/.gitattributes @@ -0,0 +1 @@ +downloader_testrepo.export -text \ No newline at end of file diff --git a/redbot/pytest/downloader.py b/redbot/pytest/downloader.py index 4c6e7d342..0ac043791 100644 --- a/redbot/pytest/downloader.py +++ b/redbot/pytest/downloader.py @@ -1,39 +1,43 @@ from collections import namedtuple from pathlib import Path import json +import subprocess as sp +import shutil import pytest -from redbot.cogs.downloader.repo_manager import RepoManager, Repo -from redbot.cogs.downloader.installable import Installable +from redbot.cogs.downloader.repo_manager import RepoManager, Repo, ProcessFormatter +from redbot.cogs.downloader.installable import Installable, InstalledModule __all__ = [ "patch_relative_to", "repo_manager", "repo", - "repo_norun", "bot_repo", "INFO_JSON", "LIBRARY_INFO_JSON", "installable", + "installed_cog", "library_installable", "fake_run_noprint", + "fake_current_commit", + "_session_git_repo", + "git_repo", + "cloned_git_repo", + "git_repo_with_remote", ] -async def fake_run(*args, **kwargs): - fake_result_tuple = namedtuple("fake_result", "returncode result") - res = fake_result_tuple(0, (args, kwargs)) - print(args[0]) - return res - - async def fake_run_noprint(*args, **kwargs): fake_result_tuple = namedtuple("fake_result", "returncode result") res = fake_result_tuple(0, (args, kwargs)) return res +async def fake_current_commit(*args, **kwargs): + return "fake_result" + + @pytest.fixture(scope="module", autouse=True) def patch_relative_to(monkeysession): def fake_relative_to(self, some_path: Path): @@ -50,30 +54,26 @@ def repo_manager(tmpdir_factory): @pytest.fixture -def repo(tmpdir): - repo_folder = Path(str(tmpdir)) / "repos" / "squid" +def repo(tmp_path): + repo_folder = tmp_path / "repos" / "squid" repo_folder.mkdir(parents=True, exist_ok=True) return Repo( url="https://github.com/tekulvw/Squid-Plugins", name="squid", branch="rewrite_cogs", + commit="6acb5decbb717932e5dc0cda7fca0eff452c47dd", folder_path=repo_folder, ) -@pytest.fixture -def repo_norun(repo): - repo._run = fake_run - return repo - - @pytest.fixture def bot_repo(event_loop): cwd = Path.cwd() return Repo( name="Red-DiscordBot", branch="WRONG", + commit="", url="https://empty.com/something.git", folder_path=cwd, loop=event_loop, @@ -120,6 +120,16 @@ def installable(tmpdir): return cog_info +@pytest.fixture +def installed_cog(tmpdir): + cog_path = tmpdir.mkdir("test_repo").mkdir("test_installed_cog") + info_path = cog_path.join("info.json") + info_path.write_text(json.dumps(INFO_JSON), "utf-8") + + cog_info = InstalledModule(Path(str(cog_path))) + return cog_info + + @pytest.fixture def library_installable(tmpdir): lib_path = tmpdir.mkdir("test_repo").mkdir("test_lib") @@ -128,3 +138,93 @@ def library_installable(tmpdir): cog_info = Installable(Path(str(lib_path))) return cog_info + + +# Git +TEST_REPO_EXPORT_PTH: Path = Path(__file__).parent / "downloader_testrepo.export" + + +def _init_test_repo(destination: Path): + # copied from tools/edit_testrepo.py + git_dirparams = ("git", "-C", str(destination)) + init_commands = ( + (*git_dirparams, "init"), + (*git_dirparams, "config", "--local", "user.name", "Cog-Creators"), + (*git_dirparams, "config", "--local", "user.email", "cog-creators@example.org"), + (*git_dirparams, "config", "--local", "commit.gpgSign", "false"), + ) + + for args in init_commands: + sp.run(args, check=True) + return git_dirparams + + +@pytest.fixture(scope="session") +async def _session_git_repo(tmp_path_factory, event_loop): + # we will import repo only once once per session and duplicate the repo folder + repo_path = tmp_path_factory.mktemp("session_git_repo") + repo = Repo( + name="redbot-testrepo", + url="", + branch="master", + commit="", + folder_path=repo_path, + loop=event_loop, + ) + git_dirparams = _init_test_repo(repo_path) + fast_import = sp.Popen((*git_dirparams, "fast-import", "--quiet"), stdin=sp.PIPE) + with TEST_REPO_EXPORT_PTH.open(mode="rb") as f: + fast_import.communicate(f.read()) + return_code = fast_import.wait() + if return_code: + raise Exception(f"git fast-import failed with code {return_code}") + sp.run((*git_dirparams, "reset", "--hard")) + return repo + + +@pytest.fixture +async def git_repo(_session_git_repo, tmp_path, event_loop): + # fixture only copies repo that was imported in _session_git_repo + repo_path = tmp_path / "redbot-testrepo" + shutil.copytree(_session_git_repo.folder_path, repo_path) + repo = Repo( + name="redbot-testrepo", + url=_session_git_repo.url, + branch=_session_git_repo.branch, + commit=_session_git_repo.commit, + folder_path=repo_path, + loop=event_loop, + ) + return repo + + +@pytest.fixture +async def cloned_git_repo(_session_git_repo, tmp_path, event_loop): + # don't use this if you want to edit origin repo + repo_path = tmp_path / "redbot-cloned_testrepo" + repo = Repo( + name="redbot-testrepo", + url=str(_session_git_repo.folder_path), + branch=_session_git_repo.branch, + commit=_session_git_repo.commit, + folder_path=repo_path, + loop=event_loop, + ) + sp.run(("git", "clone", str(_session_git_repo.folder_path), str(repo_path)), check=True) + return repo + + +@pytest.fixture +async def git_repo_with_remote(git_repo, tmp_path, event_loop): + # this can safely be used when you want to do changes to origin repo + repo_path = tmp_path / "redbot-testrepo_with_remote" + repo = Repo( + name="redbot-testrepo", + url=str(git_repo.folder_path), + branch=git_repo.branch, + commit=git_repo.commit, + folder_path=repo_path, + loop=event_loop, + ) + sp.run(("git", "clone", str(git_repo.folder_path), str(repo_path)), check=True) + return repo diff --git a/redbot/pytest/downloader_testrepo.export b/redbot/pytest/downloader_testrepo.export new file mode 100644 index 000000000..5e27ae041 --- /dev/null +++ b/redbot/pytest/downloader_testrepo.export @@ -0,0 +1,134 @@ +# THIS FILE SHOULDN'T BE EDITED MANUALLY. USE `edit_testrepo.py` TOOL TO UPDATE THE REPO. +blob +mark :1 +original-oid cfd75093008a560c1f2a09e5068e0dd1517eaa1c +data 14 +Sample file 1. +reset refs/heads/ambiguous_with_tag +commit refs/heads/ambiguous_with_tag +mark :2 +original-oid c6f0e5ec04d99bdf8c6c78ff20d66d286eecb3ea +author Cog-Creators 1571921830 +0200 +committer Cog-Creators 1571919491 +0200 +data 27 +Commit ambiguous with tag. +M 100644 :1 sample_file1.txt + +reset refs/heads/ambiguous_1 +commit refs/heads/ambiguous_1 +mark :3 +original-oid 95da0b576271cb5bee5f3e075074c03ee05fed05 +author Cog-Creators 1571777704 +0200 +committer Cog-Creators 1571777704 +0200 +data 23 +Ambiguous commit 16955 +M 100644 :1 sample_file1.txt + +reset refs/heads/ambiguous_2 +commit refs/heads/ambiguous_2 +mark :4 +original-oid 95da0b57a416d9c8ce950554228d1fc195c30b43 +author Cog-Creators 1571777704 +0200 +committer Cog-Creators 1571777704 +0200 +data 23 +Ambiguous commit 44414 +M 100644 :1 sample_file1.txt + +blob +mark :5 +original-oid f1a18139c84a82addbded8a7b5738c36fb02fce1 +data 22 +print("Hello world!") + +blob +mark :6 +original-oid 1abb7a2470722faee2175980ee202717b4158057 +data 14 +Sample file 2. +reset refs/tags/lightweight +commit refs/tags/lightweight +mark :7 +original-oid c950fc05a540dd76b944719c2a3302da2e2f3090 +author Cog-Creators 1571776887 +0200 +committer Cog-Creators 1571777047 +0200 +data 31 +Initial commit, prepare files. +M 100644 :5 mycog/__init__.py +M 100644 :1 sample_file1.txt +M 100644 :6 sample_file2.txt + +blob +mark :8 +original-oid 10ec5813415b6d7c902eee95cc13dc38c6f50917 +data 11 +Added file. +blob +mark :9 +original-oid 5ed17bf7914989db85f2e66045e62b35eed10f3b +data 42 +def setup(bot): + print("Hello world!") + +commit refs/tags/lightweight +mark :10 +original-oid fb99eb7d2d5bed514efc98fe6686b368f8425745 +author Cog-Creators 1571777140 +0200 +committer Cog-Creators 1571777140 +0200 +data 39 +Add, modify, rename and remove a file. +from :7 +M 100644 :8 added_file.txt +M 100644 :9 mycog/__init__.py +D sample_file1.txt +D sample_file2.txt +M 100644 :6 sample_file3.txt + +commit refs/tags/annotated +mark :11 +original-oid a7120330cc179396914e0d6af80cfa282adc124b +author Cog-Creators 1571777209 +0200 +committer Cog-Creators 1571777209 +0200 +data 14 +Remove mycog. +from :10 +D mycog/__init__.py + +blob +mark :12 +original-oid 1ba9a868ae2f65571c75681ec47d40595bea4882 +data 14 +Sample file 4. +commit refs/heads/master +mark :13 +original-oid 2db662c1d341b1db7d225ccc1af4019ba5228c70 +author Cog-Creators 1571777704 +0200 +committer Cog-Creators 1571777704 +0200 +data 32 +One commit after mycog removal. +from :11 +M 100644 :12 sample_file4.txt + +reset refs/heads/dont_add_commits +commit refs/heads/dont_add_commits +mark :14 +original-oid a0ccc2390883c85a361f5a90c72e1b07958939fa +author Cog-Creators 1571777548 +0200 +committer Cog-Creators 1571777548 +0200 +data 103 +Don't edit this, this is used for tests for current commit, latest commit, full sha1 from branch name. +M 100644 :1 sample_file1.txt + +tag annotated +from :11 +original-oid 41f6cf3b58e774d2b3414ced3ee9f2541f1c682f +tagger Cog-Creators 1571777367 +0200 +data 15 +Annotated tag. + +tag ambiguous_tag_66387 +from :2 +original-oid c6f028f843389c850e2c20d8dd1f5fa498252764 +tagger Cog-Creators 1571919491 +0200 +data 37 +Annotated tag ambiguous with commit. + diff --git a/redbot/pytest/downloader_testrepo.md b/redbot/pytest/downloader_testrepo.md new file mode 100644 index 000000000..77571a4d6 --- /dev/null +++ b/redbot/pytest/downloader_testrepo.md @@ -0,0 +1,102 @@ +# Downloader's test repo reference + +This file can be used as a reference on what repo contains +if some dev will want to add more test in future. + +Branch master +--- + +**Commit:** c950fc05a540dd76b944719c2a3302da2e2f3090 +**Commit message:** Initial commit, prepare files. +**Tree status:** +``` +downloader_testrepo/ + ├── mycog +A │ ├── __init__.py +A ├── sample_file1.txt +A └── sample_file2.txt +``` +--- +**Commit:** fb99eb7d2d5bed514efc98fe6686b368f8425745 +**Tag:** lightweight +**Commit message:** Add, modify, rename and remove a file. +**Tree status:** +``` +downloader_testrepo/ + ├── mycog/ +M │ ├── __init__.py +A ├── added_file.txt +D ├── sample_file1.txt +R └── sample_file2.txt -> sample_file3.txt +``` +--- +**Commit:** a7120330cc179396914e0d6af80cfa282adc124b +**Tag:** annotated (sha1: 41f6cf3b58e774d2b3414ced3ee9f2541f1c682f) +**Commit message:** Remove mycog. +**Tree status:** +``` +downloader_testrepo/ +D ├── mycog/ +D │ ├── __init__.py + ├── added_file.txt + └── sample_file3.txt +``` +--- +**Commit:** 2db662c1d341b1db7d225ccc1af4019ba5228c70 +**Commit message:** One commit after mycog removal. +**Tree status:** +``` +downloader_testrepo/ + ├── added_file.txt + ├── sample_file3.txt +A └── sample_file4.txt +``` + +Branch with persistent HEAD +--- + +**Commit:** a0ccc2390883c85a361f5a90c72e1b07958939fa +**Branch:** dont_add_commits +**Commit message:** Don't edit this, this is used for tests for current commit, latest commit, full sha1 from branch name. +**Tree status:** +``` +downloader_testrepo/ +A └── sample_file1.txt +``` + +Branches with ambiguous commits (95da0b57) +--- + +**Commit:** 95da0b576271cb5bee5f3e075074c03ee05fed05 +**Branch:** ambiguous_1 +**Commit message:** Ambiguous commit 16955 +**Tree status:** +``` +downloader_testrepo/ +A └── sample_file1.txt +``` + + +**Commit:** 95da0b57a416d9c8ce950554228d1fc195c30b43 +**Branch:** ambiguous_2 +**Commit message:** Ambiguous commit 44414 +**Tree status:** +``` +downloader_testrepo/ +A └── sample_file1.txt +``` + + +Branch with ambiguous tag (c6f0) +--- + +**Commit:** c6f0e5ec04d99bdf8c6c78ff20d66d286eecb3ea +**Branch:** ambiguous_with_tag +**Tag:** ambiguous_tag_66387 (sha1: c6f028f843389c850e2c20d8dd1f5fa498252764) +**Commit message:** Commit ambiguous with tag. +**Tree status:** + +``` +downloader_testrepo/ +A └── sample_file1.txt +``` \ No newline at end of file diff --git a/setup.cfg b/setup.cfg index 929144bab..cec4be2b5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -106,6 +106,7 @@ test = pyparsing==2.4.2 pytest==5.1.2 pytest-asyncio==0.10.0 + pytest-mock==1.11.2 six==1.12.0 typed-ast==1.4.0 wcwidth==0.1.7 @@ -131,5 +132,6 @@ include = **/locales/*.po data/* data/**/* + *.export redbot.core.drivers.postgres = *.sql diff --git a/tests/cogs/downloader/test_downloader.py b/tests/cogs/downloader/test_downloader.py index 57186baaa..29d49a7e7 100644 --- a/tests/cogs/downloader/test_downloader.py +++ b/tests/cogs/downloader/test_downloader.py @@ -1,41 +1,339 @@ +import asyncio import pathlib from collections import namedtuple +from typing import Any, NamedTuple from pathlib import Path import pytest -from unittest.mock import MagicMock +from pytest_mock import MockFixture from redbot.pytest.downloader import * -from redbot.cogs.downloader.repo_manager import RepoManager, Repo -from redbot.cogs.downloader.errors import ExistingGitRepo +from redbot.cogs.downloader.repo_manager import Installable +from redbot.cogs.downloader.repo_manager import Candidate, ProcessFormatter, RepoManager, Repo +from redbot.cogs.downloader.errors import ( + AmbiguousRevision, + ExistingGitRepo, + GitException, + UnknownRevision, +) -def test_existing_git_repo(tmpdir): - repo_folder = Path(str(tmpdir)) / "repos" / "squid" / ".git" +class FakeCompletedProcess(NamedTuple): + returncode: int + stdout: bytes = b"" + stderr: bytes = b"" + + +async def async_return(ret: Any): + return ret + + +def _mock_run( + mocker: MockFixture, repo: Repo, returncode: int, stdout: bytes = b"", stderr: bytes = b"" +): + return mocker.patch.object( + repo, + "_run", + autospec=True, + return_value=async_return(FakeCompletedProcess(returncode, stdout, stderr)), + ) + + +def _mock_setup_repo(mocker: MockFixture, repo: Repo, commit: str): + def update_commit(*args, **kwargs): + repo.commit = commit + return mocker.DEFAULT + + return mocker.patch.object( + repo, + "_setup_repo", + autospec=True, + side_effect=update_commit, + return_value=async_return(None), + ) + + +def test_existing_git_repo(tmp_path): + repo_folder = tmp_path / "repos" / "squid" / ".git" repo_folder.mkdir(parents=True, exist_ok=True) r = Repo( url="https://github.com/tekulvw/Squid-Plugins", name="squid", branch="rewrite_cogs", + commit="6acb5decbb717932e5dc0cda7fca0eff452c47dd", folder_path=repo_folder.parent, ) - exists, _ = r._existing_git_repo() + exists, git_path = r._existing_git_repo() assert exists is True + assert git_path == repo_folder + + +ancestor_rev = "c950fc05a540dd76b944719c2a3302da2e2f3090" +descendant_rev = "fb99eb7d2d5bed514efc98fe6686b368f8425745" + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "maybe_ancestor_rev,descendant_rev,returncode,expected", + [(ancestor_rev, descendant_rev, 0, True), (descendant_rev, ancestor_rev, 1, False)], +) +async def test_is_ancestor(mocker, repo, maybe_ancestor_rev, descendant_rev, returncode, expected): + m = _mock_run(mocker, repo, returncode) + ret = await repo.is_ancestor(maybe_ancestor_rev, descendant_rev) + m.assert_called_once_with( + ProcessFormatter().format( + repo.GIT_IS_ANCESTOR, + path=repo.folder_path, + maybe_ancestor_rev=maybe_ancestor_rev, + descendant_rev=descendant_rev, + ), + valid_exit_codes=(0, 1), + ) + assert ret is expected + + +@pytest.mark.asyncio +async def test_is_ancestor_raise(mocker, repo): + m = _mock_run(mocker, repo, 128) + with pytest.raises(GitException): + await repo.is_ancestor("invalid1", "invalid2") + + m.assert_called_once_with( + ProcessFormatter().format( + repo.GIT_IS_ANCESTOR, + path=repo.folder_path, + maybe_ancestor_rev="invalid1", + descendant_rev="invalid2", + ), + valid_exit_codes=(0, 1), + ) + + +@pytest.mark.asyncio +async def test_get_file_update_statuses(mocker, repo): + old_rev = "c950fc05a540dd76b944719c2a3302da2e2f3090" + new_rev = "fb99eb7d2d5bed514efc98fe6686b368f8425745" + m = _mock_run( + mocker, + repo, + 0, + b"A\x00added_file.txt\x00\t" + b"M\x00mycog/__init__.py\x00\t" + b"D\x00sample_file1.txt\x00\t" + b"D\x00sample_file2.txt\x00\t" + b"A\x00sample_file3.txt", + ) + ret = await repo._get_file_update_statuses(old_rev, new_rev) + m.assert_called_once_with( + ProcessFormatter().format( + repo.GIT_DIFF_FILE_STATUS, path=repo.folder_path, old_rev=old_rev, new_rev=new_rev + ) + ) + + assert ret == { + "added_file.txt": "A", + "mycog/__init__.py": "M", + "sample_file1.txt": "D", + "sample_file2.txt": "D", + "sample_file3.txt": "A", + } + + +@pytest.mark.asyncio +async def test_is_module_modified(mocker, repo): + old_rev = "c950fc05a540dd76b944719c2a3302da2e2f3090" + new_rev = "fb99eb7d2d5bed514efc98fe6686b368f8425745" + FakeInstallable = namedtuple("Installable", "name commit") + module = FakeInstallable("mycog", new_rev) + m = mocker.patch.object( + repo, + "_get_file_update_statuses", + autospec=True, + return_value=async_return( + { + "added_file.txt": "A", + "mycog/__init__.py": "M", + "sample_file1.txt": "D", + "sample_file2.txt": "D", + "sample_file3.txt": "A", + } + ), + ) + ret = await repo._is_module_modified(module, old_rev) + m.assert_called_once_with(old_rev, new_rev) + + assert ret is True + + +@pytest.mark.asyncio +async def test_get_full_sha1_success(mocker, repo): + commit = "c950fc05a540dd76b944719c2a3302da2e2f3090" + m = _mock_run(mocker, repo, 0, commit.encode()) + ret = await repo.get_full_sha1(commit) + m.assert_called_once_with( + ProcessFormatter().format(repo.GIT_GET_FULL_SHA1, path=repo.folder_path, rev=commit) + ) + + assert ret == commit + + +@pytest.mark.asyncio +async def test_get_full_sha1_notfound(mocker, repo): + m = _mock_run(mocker, repo, 128, b"", b"fatal: Needed a single revision") + with pytest.raises(UnknownRevision): + await repo.get_full_sha1("invalid") + m.assert_called_once_with( + ProcessFormatter().format(repo.GIT_GET_FULL_SHA1, path=repo.folder_path, rev="invalid") + ) + + +@pytest.mark.asyncio +async def test_get_full_sha1_ambiguous(mocker, repo): + m = _mock_run( + mocker, + repo, + 128, + b"", + b"error: short SHA1 c6f0 is ambiguous\n" + b"hint: The candidates are:\n" + b"hint: c6f028f tag ambiguous_tag_66387\n" + b"hint: c6f0e5e commit 2019-10-24 - Commit ambiguous with tag.\n" + b"fatal: Needed a single revision", + ) + with pytest.raises(AmbiguousRevision) as exc_info: + await repo.get_full_sha1("c6f0") + m.assert_called_once_with( + ProcessFormatter().format(repo.GIT_GET_FULL_SHA1, path=repo.folder_path, rev="c6f0") + ) + + assert exc_info.value.candidates == [ + Candidate("c6f028f", "tag", "ambiguous_tag_66387"), + Candidate("c6f0e5e", "commit", "2019-10-24 - Commit ambiguous with tag."), + ] + + +def test_update_available_modules(repo): + module = repo.folder_path / "mycog" / "__init__.py" + submodule = module.parent / "submodule" / "__init__.py" + module.parent.mkdir(parents=True) + module.touch() + submodule.parent.mkdir() + submodule.touch() + ret = repo._update_available_modules() + assert ( + ret + == repo.available_modules + == (Installable(location=module.parent, repo=repo, commit=repo.commit),) + ) + + +@pytest.mark.asyncio +async def test_checkout(mocker, repo): + commit = "c950fc05a540dd76b944719c2a3302da2e2f3090" + m = _mock_run(mocker, repo, 0) + _mock_setup_repo(mocker, repo, commit) + git_path = repo.folder_path / ".git" + git_path.mkdir() + await repo._checkout(commit) + + assert repo.commit == commit + m.assert_called_once_with( + ProcessFormatter().format(repo.GIT_CHECKOUT, path=repo.folder_path, rev=commit) + ) + + +@pytest.mark.asyncio +async def test_checkout_ctx_manager(mocker, repo): + commit = "c950fc05a540dd76b944719c2a3302da2e2f3090" + m = mocker.patch.object(repo, "_checkout", autospec=True, return_value=async_return(None)) + old_commit = repo.commit + async with repo.checkout(commit): + m.assert_called_with(commit, force_checkout=False) + m.return_value = async_return(None) + + m.assert_called_with(old_commit, force_checkout=False) + + +@pytest.mark.asyncio +async def test_checkout_await(mocker, repo): + commit = "c950fc05a540dd76b944719c2a3302da2e2f3090" + m = mocker.patch.object(repo, "_checkout", autospec=True, return_value=async_return(None)) + await repo.checkout(commit) + + m.assert_called_once_with(commit, force_checkout=False) + + +@pytest.mark.asyncio +async def test_clone_with_branch(mocker, repo): + branch = repo.branch = "dont_add_commits" + commit = "a0ccc2390883c85a361f5a90c72e1b07958939fa" + repo.commit = "" + m = _mock_run(mocker, repo, 0) + _mock_setup_repo(mocker, repo, commit) + + await repo.clone() + + assert repo.commit == commit + m.assert_called_once_with( + ProcessFormatter().format( + repo.GIT_CLONE, branch=branch, url=repo.url, folder=repo.folder_path + ) + ) + + +@pytest.mark.asyncio +async def test_clone_without_branch(mocker, repo): + branch = "dont_add_commits" + commit = "a0ccc2390883c85a361f5a90c72e1b07958939fa" + repo.branch = None + repo.commit = "" + m = _mock_run(mocker, repo, 0) + _mock_setup_repo(mocker, repo, commit) + mocker.patch.object(repo, "current_branch", autospec=True, return_value=async_return(branch)) + + await repo.clone() + + assert repo.commit == commit + m.assert_called_once_with( + ProcessFormatter().format(repo.GIT_CLONE_NO_BRANCH, url=repo.url, folder=repo.folder_path) + ) + + +@pytest.mark.asyncio +async def test_update(mocker, repo): + old_commit = repo.commit + new_commit = "a0ccc2390883c85a361f5a90c72e1b07958939fa" + m = _mock_run(mocker, repo, 0) + _mock_setup_repo(mocker, repo, new_commit) + mocker.patch.object( + repo, "latest_commit", autospec=True, return_value=async_return(old_commit) + ) + mocker.patch.object(repo, "hard_reset", autospec=True, return_value=async_return(None)) + ret = await repo.update() + + assert ret == (old_commit, new_commit) + m.assert_called_once_with(ProcessFormatter().format(repo.GIT_PULL, path=repo.folder_path)) + + +# old tests @pytest.mark.asyncio async def test_add_repo(monkeypatch, repo_manager): monkeypatch.setattr("redbot.cogs.downloader.repo_manager.Repo._run", fake_run_noprint) + monkeypatch.setattr( + "redbot.cogs.downloader.repo_manager.Repo.current_commit", fake_current_commit + ) squid = await repo_manager.add_repo( url="https://github.com/tekulvw/Squid-Plugins", name="squid", branch="rewrite_cogs" ) - assert squid.available_modules == [] + assert squid.available_modules == () @pytest.mark.asyncio @@ -49,14 +347,20 @@ async def test_lib_install_requirements(monkeypatch, library_installable, repo, sharedlib_path = lib_path / "cog_shared" sharedlib_path.mkdir(parents=True, exist_ok=True) - result = await repo.install_libraries(target_dir=sharedlib_path, req_target_dir=lib_path) + installed, failed = await repo.install_libraries( + target_dir=sharedlib_path, req_target_dir=lib_path + ) - assert result is True + assert len(installed) == 1 + assert len(failed) == 0 @pytest.mark.asyncio async def test_remove_repo(monkeypatch, repo_manager): monkeypatch.setattr("redbot.cogs.downloader.repo_manager.Repo._run", fake_run_noprint) + monkeypatch.setattr( + "redbot.cogs.downloader.repo_manager.Repo.current_commit", fake_current_commit + ) await repo_manager.add_repo( url="https://github.com/tekulvw/Squid-Plugins", name="squid", branch="rewrite_cogs" @@ -67,17 +371,8 @@ async def test_remove_repo(monkeypatch, repo_manager): @pytest.mark.asyncio -async def test_current_branch(bot_repo): - branch = await bot_repo.current_branch() - - # So this does work, just not sure how to fully automate the test - - assert branch not in ("WRONG", "") - - -@pytest.mark.asyncio -async def test_existing_repo(repo_manager): - repo_manager.does_repo_exist = MagicMock(return_value=True) +async def test_existing_repo(mocker, repo_manager): + repo_manager.does_repo_exist = mocker.MagicMock(return_value=True) with pytest.raises(ExistingGitRepo): await repo_manager.add_repo("http://test.com", "test") diff --git a/tests/cogs/downloader/test_git.py b/tests/cogs/downloader/test_git.py new file mode 100644 index 000000000..075b6e783 --- /dev/null +++ b/tests/cogs/downloader/test_git.py @@ -0,0 +1,452 @@ +from pathlib import Path +import subprocess as sp + +import pytest + +from redbot.cogs.downloader.repo_manager import ProcessFormatter, Repo +from redbot.pytest.downloader import ( + cloned_git_repo, + git_repo, + git_repo_with_remote, + _session_git_repo, +) + + +@pytest.mark.asyncio +async def test_git_clone_nobranch(git_repo, tmp_path): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CLONE_NO_BRANCH, + url=git_repo.folder_path, + folder=tmp_path / "cloned_repo_test", + ) + ) + assert p.returncode == 0 + + +@pytest.mark.asyncio +async def test_git_clone_branch(git_repo, tmp_path): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CLONE, + branch="master", + url=git_repo.folder_path, + folder=tmp_path / "cloned_repo_test", + ) + ) + assert p.returncode == 0 + + +@pytest.mark.asyncio +async def test_git_clone_non_existent_branch(git_repo, tmp_path): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CLONE, + branch="non-existent-branch", + url=git_repo.folder_path, + folder=tmp_path / "cloned_repo_test", + ) + ) + assert p.returncode == 128 + + +@pytest.mark.asyncio +async def test_git_clone_notgit_repo(git_repo, tmp_path): + notgit_repo = tmp_path / "test_clone_folder" + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CLONE, branch=None, url=notgit_repo, folder=tmp_path / "cloned_repo_test" + ) + ) + assert p.returncode == 128 + + +@pytest.mark.asyncio +async def test_git_current_branch_master(git_repo): + p = await git_repo._run( + ProcessFormatter().format(git_repo.GIT_CURRENT_BRANCH, path=git_repo.folder_path) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "master" + + +@pytest.mark.asyncio +async def test_git_current_branch_detached(git_repo): + await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECKOUT, + path=git_repo.folder_path, + rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + ) + ) + p = await git_repo._run( + ProcessFormatter().format(git_repo.GIT_CURRENT_BRANCH, path=git_repo.folder_path) + ) + assert p.returncode == 128 + assert p.stderr.decode().strip() == "fatal: ref HEAD is not a symbolic ref" + + +@pytest.mark.asyncio +async def test_git_current_commit_on_branch(git_repo): + # HEAD on dont_add_commits (a0ccc2390883c85a361f5a90c72e1b07958939fa) + # setup + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECKOUT, path=git_repo.folder_path, rev="dont_add_commits" + ) + ) + assert p.returncode == 0 + + p = await git_repo._run( + ProcessFormatter().format(git_repo.GIT_CURRENT_COMMIT, path=git_repo.folder_path) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "a0ccc2390883c85a361f5a90c72e1b07958939fa" + + +@pytest.mark.asyncio +async def test_git_current_commit_detached(git_repo): + # detached HEAD state (c950fc05a540dd76b944719c2a3302da2e2f3090) + await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECKOUT, + path=git_repo.folder_path, + rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + ) + ) + p = await git_repo._run( + ProcessFormatter().format(git_repo.GIT_CURRENT_COMMIT, path=git_repo.folder_path) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "c950fc05a540dd76b944719c2a3302da2e2f3090" + + +@pytest.mark.asyncio +async def test_git_latest_commit(git_repo): + # HEAD on dont_add_commits (a0ccc2390883c85a361f5a90c72e1b07958939fa) + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_LATEST_COMMIT, path=git_repo.folder_path, branch="dont_add_commits" + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "a0ccc2390883c85a361f5a90c72e1b07958939fa" + + +@pytest.mark.asyncio +async def test_git_hard_reset(cloned_git_repo, tmp_path): + staged_file = cloned_git_repo.folder_path / "staged_file.txt" + staged_file.touch() + git_dirparams = ("git", "-C", str(cloned_git_repo.folder_path)) + sp.run((*git_dirparams, "add", "staged_file.txt"), check=True) + assert staged_file.exists() is True + p = await cloned_git_repo._run( + ProcessFormatter().format( + cloned_git_repo.GIT_HARD_RESET, path=cloned_git_repo.folder_path, branch="master" + ) + ) + assert p.returncode == 0 + assert staged_file.exists() is False + + +@pytest.mark.asyncio +async def test_git_pull(git_repo_with_remote, tmp_path): + # setup + staged_file = Path(git_repo_with_remote.url) / "staged_file.txt" + staged_file.touch() + git_dirparams = ("git", "-C", git_repo_with_remote.url) + sp.run((*git_dirparams, "add", "staged_file.txt"), check=True) + sp.run( + (*git_dirparams, "commit", "-m", "test commit", "--no-gpg-sign", "--no-verify"), check=True + ) + assert not (git_repo_with_remote.folder_path / "staged_file.txt").exists() + + p = await git_repo_with_remote._run( + ProcessFormatter().format( + git_repo_with_remote.GIT_PULL, path=git_repo_with_remote.folder_path + ) + ) + assert p.returncode == 0 + assert (git_repo_with_remote.folder_path / "staged_file.txt").exists() + + +@pytest.mark.asyncio +async def test_git_diff_file_status(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_DIFF_FILE_STATUS, + path=git_repo.folder_path, + old_rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + new_rev="fb99eb7d2d5bed514efc98fe6686b368f8425745", + ) + ) + assert p.returncode == 0 + stdout = p.stdout.strip(b"\t\n\x00 ").decode() + assert stdout == ( + "A\x00added_file.txt\x00\t" + "M\x00mycog/__init__.py\x00\t" + "D\x00sample_file1.txt\x00\t" + "D\x00sample_file2.txt\x00\t" + "A\x00sample_file3.txt" + ) + + +# might need to add test for test_git_log, but it's unused method currently + + +@pytest.mark.asyncio +async def test_git_discover_remote_url(cloned_git_repo, tmp_path): + p = await cloned_git_repo._run( + ProcessFormatter().format( + cloned_git_repo.GIT_DISCOVER_REMOTE_URL, path=cloned_git_repo.folder_path + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == cloned_git_repo.url + + +@pytest.mark.asyncio +async def test_git_checkout_detached_head(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECKOUT, + path=git_repo.folder_path, + rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + ) + ) + assert p.returncode == 0 + + +@pytest.mark.asyncio +async def test_git_checkout_branch(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECKOUT, path=git_repo.folder_path, rev="dont_add_commits" + ) + ) + assert p.returncode == 0 + + +@pytest.mark.asyncio +async def test_git_checkout_non_existent_branch(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECKOUT, path=git_repo.folder_path, rev="non-existent-branch" + ) + ) + assert p.returncode == 1 + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_branch_name(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="dont_add_commits" + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "a0ccc2390883c85a361f5a90c72e1b07958939fa" + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_full_hash(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, + path=git_repo.folder_path, + rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "c950fc05a540dd76b944719c2a3302da2e2f3090" + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_short_hash(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="c950" + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "c950fc05a540dd76b944719c2a3302da2e2f3090" + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_too_short_hash(git_repo): + p = await git_repo._run( + ProcessFormatter().format(git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="c95") + ) + assert p.returncode == 128 + assert p.stderr.decode().strip() == "fatal: Needed a single revision" + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_lightweight_tag(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="lightweight" + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "fb99eb7d2d5bed514efc98fe6686b368f8425745" + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_annotated_tag(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="annotated" + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "a7120330cc179396914e0d6af80cfa282adc124b" + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_invalid_ref(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="invalid" + ) + ) + assert p.returncode == 128 + assert p.stderr.decode().strip() == "fatal: Needed a single revision" + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_ambiguous_commits(git_repo): + # 2 ambiguous refs: + # branch ambiguous_1 - 95da0b576271cb5bee5f3e075074c03ee05fed05 + # branch ambiguous_2 - 95da0b57a416d9c8ce950554228d1fc195c30b43 + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="95da0b57" + ) + ) + assert p.returncode == 128 + assert p.stderr.decode().strip() == ( + "error: short SHA1 95da0b57 is ambiguous\n" + "hint: The candidates are:\n" + "hint: 95da0b576 commit 2019-10-22 - Ambiguous commit 16955\n" + "hint: 95da0b57a commit 2019-10-22 - Ambiguous commit 44414\n" + "fatal: Needed a single revision" + ) + + +@pytest.mark.asyncio +async def test_git_get_full_sha1_from_ambiguous_tag_and_commit(git_repo): + # 2 ambiguous refs: + # branch ambiguous_with_tag - c6f0e5ec04d99bdf8c6c78ff20d66d286eecb3ea + # tag ambiguous_tag_66387 - c6f0e5ec04d99bdf8c6c78ff20d66d286eecb3ea + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_FULL_SHA1, path=git_repo.folder_path, rev="c6f0" + ) + ) + assert p.returncode == 128 + assert p.stderr.decode().strip() == ( + "error: short SHA1 c6f0 is ambiguous\n" + "hint: The candidates are:\n" + "hint: c6f028f tag ambiguous_tag_66387\n" + "hint: c6f0e5e commit 2019-10-24 - Commit ambiguous with tag.\n" + "fatal: Needed a single revision" + ) + + +@pytest.mark.asyncio +async def test_git_is_ancestor_true(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_IS_ANCESTOR, + path=git_repo.folder_path, + maybe_ancestor_rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + descendant_rev="fb99eb7d2d5bed514efc98fe6686b368f8425745", + ) + ) + assert p.returncode == 0 + + +@pytest.mark.asyncio +async def test_git_is_ancestor_false(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_IS_ANCESTOR, + path=git_repo.folder_path, + maybe_ancestor_rev="fb99eb7d2d5bed514efc98fe6686b368f8425745", + descendant_rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + ) + ) + assert p.returncode == 1 + + +@pytest.mark.asyncio +async def test_git_is_ancestor_invalid_ref(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_IS_ANCESTOR, + path=git_repo.folder_path, + maybe_ancestor_rev="invalid1", + descendant_rev="invalid2", + ) + ) + assert p.returncode == 128 + assert p.stderr.decode().strip() == "fatal: Not a valid object name invalid1" + + +@pytest.mark.asyncio +async def test_git_check_if_module_exists_true(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECK_IF_MODULE_EXISTS, + path=git_repo.folder_path, + rev="fb99eb7d2d5bed514efc98fe6686b368f8425745", + module_name="mycog", + ) + ) + assert p.returncode == 0 + + +@pytest.mark.asyncio +async def test_git_check_if_module_exists_false(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_CHECK_IF_MODULE_EXISTS, + path=git_repo.folder_path, + rev="a7120330cc179396914e0d6af80cfa282adc124b", + module_name="mycog", + ) + ) + assert p.returncode == 128 + assert p.stderr.decode().strip() == ( + "fatal: Not a valid object name a7120330cc179396914e0d6af80cfa282adc124b:mycog/__init__.py" + ) + + +@pytest.mark.asyncio +async def test_git_find_last_occurrence_existent(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_LAST_MODULE_OCCURRENCE_COMMIT, + path=git_repo.folder_path, + descendant_rev="2db662c1d341b1db7d225ccc1af4019ba5228c70", + module_name="mycog", + ) + ) + assert p.returncode == 0 + # the command gives a commit after last occurrence + assert p.stdout.decode().strip() == "a7120330cc179396914e0d6af80cfa282adc124b" + + +@pytest.mark.asyncio +async def test_git_find_last_occurrence_non_existent(git_repo): + p = await git_repo._run( + ProcessFormatter().format( + git_repo.GIT_GET_LAST_MODULE_OCCURRENCE_COMMIT, + path=git_repo.folder_path, + descendant_rev="c950fc05a540dd76b944719c2a3302da2e2f3090", + module_name="mycog", + ) + ) + assert p.returncode == 0 + assert p.stdout.decode().strip() == "" diff --git a/tests/cogs/downloader/test_installable.py b/tests/cogs/downloader/test_installable.py index 730ae9bcb..825945baf 100644 --- a/tests/cogs/downloader/test_installable.py +++ b/tests/cogs/downloader/test_installable.py @@ -51,8 +51,8 @@ def test_repo_name(installable): assert installable.repo_name == "test_repo" -def test_serialization(installable): - data = installable.to_json() - cog_name = data["cog_name"] +def test_serialization(installed_cog): + data = installed_cog.to_json() + cog_name = data["module_name"] - assert cog_name == "test_cog" + assert cog_name == "test_installed_cog" diff --git a/tests/conftest.py b/tests/conftest.py index 11d03fb88..7810e142f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,8 +3,11 @@ import os import pytest +from redbot import _update_event_loop_policy from redbot.core import drivers, data_manager +_update_event_loop_policy() + @pytest.fixture(scope="session") def event_loop(request): diff --git a/tools/edit_testrepo.py b/tools/edit_testrepo.py new file mode 100644 index 000000000..1befef09f --- /dev/null +++ b/tools/edit_testrepo.py @@ -0,0 +1,172 @@ +#!/usr/bin/env python3.7 +"""Script to edit test repo used by Downloader git integration tests. + +This script aims to help update the human-readable version of repo +used for git integration tests in ``redbot/tests/downloader_testrepo.export`` +by exporting/importing it in/from provided directory. + +What this script does +--------------------- +edit_testrepo.py import: + It inits test repo in provided directory, sets up committer data in git config, + imports the repo from ``redbot/tests/downloader_testrepo.export`` using + git's fast-import command and updates repo's working tree. +edit_testrepo.py export: + It exports repo from provided directory into ``redbot/tests/downloader_testrepo.export`` + using git's fast-export. To make the file more useful for developers, + it's called with option that adds extra directive ``original-oid ``, + which while ignored by import, might ease up creating tests without importing the repo. + +Note +---- +Editing `downloader_git_test_repo.export` file manually is strongly discouraged, +especially editing any part of commit directives as that causes a change in the commit's hash. +Another problem devs could encounter when trying to manually edit that file +are editors that will use CRLF instead of LF for new line character(s) and therefore break it. + +Also, if Git ever changes currently used SHA-1 to SHA-256 we will have to +update old hashes with new ones. But it's a small drawback, +when we can have human-readable version of repo. + +Known limitations +----------------- +``git fast-export`` exports commits without GPG signs so this script disables it in repo's config. +This also means devs shouldn't use ``--gpg-sign`` flag in ``git commit`` within the test repo. +""" +import shlex +import subprocess as sp +from pathlib import Path +from typing import Tuple + +import click + + +MAIN_DIRECTORY = Path(__file__).absolute().parent.parent +TEST_REPO_EXPORT_PTH: Path = MAIN_DIRECTORY / "redbot" / "pytest" / "downloader_testrepo.export" + + +class ClickCustomPath(click.Path): + """Similar to `click.Path` but returns `Path` object instead.""" + + def convert(self, value, param, ctx): + path_string = super().convert(value, param, ctx) + return Path(path_string) + + +class EmptyDirectory(ClickCustomPath): + """Similar to `ClickCustomPath`, but only allows empty or non-existent directories. + Unlike `ClickCustomPath`, this type doesn't accept + 'file_okay', 'dir_okay' and 'readable' keyword arguments. + """ + + def __init__(self, **kwargs): + super().__init__(readable=True, dir_okay=True, file_okay=False, **kwargs) + + def convert(self, value, param, ctx): + path = super().convert(value, param, ctx) + if path.exists() and next(path.glob("*"), None) is not None: + self.fail(f'Directory "{str(path)}" is not empty!') + return path + + +class GitRepoDirectory(ClickCustomPath): + """Similar to `ClickCustomPath`, but only allows git repo directories. + Unlike `ClickCustomPath`, this type doesn't accept + 'file_okay', 'dir_okay' and 'readable' keyword arguments. + """ + + def __init__(self, **kwargs): + super().__init__(readable=True, dir_okay=True, file_okay=False, **kwargs) + + def convert(self, value, param, ctx): + path = super().convert(value, param, ctx) + git_path = path / ".git" + if not git_path.exists(): + self.fail(f"A git repo does not exist at path: {str(path)}") + return path + + +@click.group() +def cli(): + """Downloader test repo commands.""" + + +@cli.command(name="init", short_help="Init a new test repo in chosen directory.") +@click.argument("destination", type=EmptyDirectory(writable=True, resolve_path=True)) +def git_init(destination: Path): + """Init a new test repo in chosen directory. This might be useful + if someone will ever want to make a completely new test repo without importing it.""" + init_test_repo(destination) + click.echo(f'New test repo successfully initialized at "{str(destination)}".') + + +@cli.command(name="import", short_help="Import test repo into chosen directory.") +@click.argument("destination", type=EmptyDirectory(writable=True, resolve_path=True)) +def git_import(destination: Path): + """Import test repo into chosen directory.""" + if not TEST_REPO_EXPORT_PTH.is_file(): + raise click.ClickException(f'File "{str(TEST_REPO_EXPORT_PTH)}" can\'t be found.') + git_dirparams = init_test_repo(destination) + + fast_import = sp.Popen((*git_dirparams, "fast-import", "--quiet"), stdin=sp.PIPE) + with TEST_REPO_EXPORT_PTH.open(mode="rb") as f: + fast_import.communicate(f.read()) + return_code = fast_import.wait() + if return_code: + raise click.ClickException(f"git fast-import failed with code {return_code}") + + _run((*git_dirparams, "reset", "--hard")) + click.echo( + f'Test repo successfully imported at "{str(destination)}"\n' + 'When you\'ll update it, use "edit_testrepo.py export" to update test repo file.' + ) + + +@cli.command(name="export", short_help="Export repo to test repo file.") +@click.argument("source", type=GitRepoDirectory(resolve_path=True)) +@click.option("--yes", is_flag=True) +def git_export(source: Path, yes: bool): + if not yes and TEST_REPO_EXPORT_PTH.is_file(): + click.confirm( + f"Test repo file ({str(TEST_REPO_EXPORT_PTH)}) already exists, " + "are you sure you want to replace it?", + abort=True, + ) + p = _run( + ("git", "-C", str(source), "fast-export", "--all", "--show-original-ids"), stdout=sp.PIPE + ) + with TEST_REPO_EXPORT_PTH.open(mode="wb") as f: + f.write( + b"# THIS FILE SHOULDN'T BE EDITED MANUALLY. " + b"USE `edit_testrepo.py` TOOL TO UPDATE THE REPO.\n" + p.stdout + ) + click.echo("Test repo successfully exported.") + + +def init_test_repo(destination: Path): + destination.mkdir(exist_ok=True) + git_dirparams = ("git", "-C", str(destination)) + init_commands: Tuple[Tuple[str, ...], ...] = ( + (*git_dirparams, "init"), + (*git_dirparams, "config", "--local", "user.name", "Cog-Creators"), + (*git_dirparams, "config", "--local", "user.email", "cog-creators@example.org"), + (*git_dirparams, "config", "--local", "commit.gpgSign", "false"), + ) + + for args in init_commands: + _run(args) + return git_dirparams + + +def _run(args, stderr=None, stdout=sp.DEVNULL) -> sp.CompletedProcess: + try: + return sp.run(args, stderr=stderr, stdout=stdout, check=True) + except sp.CalledProcessError as exc: + cmd = " ".join(map(lambda c: shlex.quote(str(c)), exc.cmd)) + raise click.ClickException( + f"The following command failed with code {exc.returncode}:\n {cmd}" + ) + + +if __name__ == "__main__": + cli() diff --git a/tools/primary_deps.ini b/tools/primary_deps.ini index a91aa2188..0405d74c3 100644 --- a/tools/primary_deps.ini +++ b/tools/primary_deps.ini @@ -45,3 +45,4 @@ test = pylint pytest pytest-asyncio + pytest-mock From 078210b54c27a0598acfaed5f6ea6eb19b97f8b5 Mon Sep 17 00:00:00 2001 From: Bakersbakebread <29239704+Bakersbakebread@users.noreply.github.com> Date: Fri, 8 Nov 2019 14:43:21 +0000 Subject: [PATCH 09/41] change to_check.guild to getattr() (#3101) * change to_check.guild to getattr() * add webhook check * changelog * Update changelog.d/3100.bugfix.rst Co-Authored-By: Michael H --- changelog.d/3100.bugfix.rst | 1 + redbot/core/bot.py | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) create mode 100644 changelog.d/3100.bugfix.rst diff --git a/changelog.d/3100.bugfix.rst b/changelog.d/3100.bugfix.rst new file mode 100644 index 000000000..53e3797b4 --- /dev/null +++ b/changelog.d/3100.bugfix.rst @@ -0,0 +1 @@ +fix ``is_automod_immune`` handling of guild check and support for checking webhooks diff --git a/redbot/core/bot.py b/redbot/core/bot.py index 796ed7a27..a7becdf73 100644 --- a/redbot/core/bot.py +++ b/redbot/core/bot.py @@ -653,7 +653,7 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d ``True`` if immune """ - guild = to_check.guild + guild = getattr(to_check, "guild", None) if not guild: return False @@ -666,7 +666,8 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d except AttributeError: # webhook messages are a user not member, # cheaper than isinstance - return True # webhooks require significant permissions to enable. + if author.bot and author.discriminator == "0000": + return True # webhooks require significant permissions to enable. else: ids_to_check.append(author.id) From 1651de13051ee838325262e7ce6cafa808644bc0 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Fri, 8 Nov 2019 18:07:33 +0100 Subject: [PATCH 10/41] [Core] Add `redbot --edit` cli flag (replacement for `[p]set owner&token`) (#3060) * feat(core): add `redbot --edit` cli flag * chore(changelog): add towncrier entries * refactor(core): clean up `redbot --edit`, few fixes * fix(core): prepare for review * chore(changelog): update towncrier entry to use double ticks :p * style(black): ugh, Sinbad's git hook isn't perfect (using worktrees) * fix: Address Flame's first review --- changelog.d/3060.enhance.rst | 1 + changelog.d/3060.feature.rst | 1 + changelog.d/3060.fix.rst | 1 + redbot/__main__.py | 173 +++++++++++++++++++++++++++++++++-- redbot/core/cli.py | 71 ++++++++++++-- redbot/launcher.py | 2 +- redbot/setup.py | 83 ++++------------- 7 files changed, 252 insertions(+), 80 deletions(-) create mode 100644 changelog.d/3060.enhance.rst create mode 100644 changelog.d/3060.feature.rst create mode 100644 changelog.d/3060.fix.rst diff --git a/changelog.d/3060.enhance.rst b/changelog.d/3060.enhance.rst new file mode 100644 index 000000000..c3716ebed --- /dev/null +++ b/changelog.d/3060.enhance.rst @@ -0,0 +1 @@ +All ``y/n`` confirmations in cli commands are now unified. \ No newline at end of file diff --git a/changelog.d/3060.feature.rst b/changelog.d/3060.feature.rst new file mode 100644 index 000000000..1bee12c0d --- /dev/null +++ b/changelog.d/3060.feature.rst @@ -0,0 +1 @@ +Added ``redbot --edit`` cli flag that can be used to edit instance name, token, owner and datapath. \ No newline at end of file diff --git a/changelog.d/3060.fix.rst b/changelog.d/3060.fix.rst new file mode 100644 index 000000000..8a1858999 --- /dev/null +++ b/changelog.d/3060.fix.rst @@ -0,0 +1 @@ +Arguments ``--co-owner`` and ``--load-cogs`` now properly require at least one argument to be passed. \ No newline at end of file diff --git a/redbot/__main__.py b/redbot/__main__.py index 2947a9edc..8f61504bb 100644 --- a/redbot/__main__.py +++ b/redbot/__main__.py @@ -6,7 +6,10 @@ import asyncio import json import logging import os +import shutil import sys +from copy import deepcopy +from pathlib import Path import discord @@ -23,6 +26,7 @@ from redbot.core.cog_manager import CogManagerUI from redbot.core.global_checks import init_global_checks from redbot.core.events import init_events from redbot.core.cli import interactive_config, confirm, parse_cli_flags +from redbot.setup import get_data_dir, get_name, save_config from redbot.core.core_commands import Core from redbot.core.dev_commands import Dev from redbot.core import __version__, modlog, bank, data_manager, drivers @@ -48,6 +52,12 @@ async def _get_prefix_and_token(red, indict): indict["prefix"] = await red._config.prefix() +def _get_instance_names(): + with data_manager.config_file.open(encoding="utf-8") as fs: + data = json.load(fs) + return sorted(data.keys()) + + def list_instances(): if not data_manager.config_file.exists(): print( @@ -56,15 +66,157 @@ def list_instances(): ) sys.exit(1) else: - with data_manager.config_file.open(encoding="utf-8") as fs: - data = json.load(fs) text = "Configured Instances:\n\n" - for instance_name in sorted(data.keys()): + for instance_name in _get_instance_names(): text += "{}\n".format(instance_name) print(text) sys.exit(0) +def edit_instance(red, cli_flags): + no_prompt = cli_flags.no_prompt + token = cli_flags.token + owner = cli_flags.owner + old_name = cli_flags.instance_name + new_name = cli_flags.edit_instance_name + data_path = cli_flags.edit_data_path + copy_data = cli_flags.copy_data + confirm_overwrite = cli_flags.overwrite_existing_instance + + if data_path is None and copy_data: + print("--copy-data can't be used without --edit-data-path argument") + sys.exit(1) + if new_name is None and confirm_overwrite: + print("--overwrite-existing-instance can't be used without --edit-instance-name argument") + sys.exit(1) + if no_prompt and all(to_change is None for to_change in (token, owner, new_name, data_path)): + print( + "No arguments to edit were provided. Available arguments (check help for more " + "information): --edit-instance-name, --edit-data-path, --copy-data, --owner, --token" + ) + sys.exit(1) + + _edit_token(red, token, no_prompt) + _edit_owner(red, owner, no_prompt) + + data = deepcopy(data_manager.basic_config) + name = _edit_instance_name(old_name, new_name, confirm_overwrite, no_prompt) + _edit_data_path(data, data_path, copy_data, no_prompt) + + save_config(name, data) + if old_name != name: + save_config(old_name, {}, remove=True) + + +def _edit_token(red, token, no_prompt): + if token: + if not len(token) >= 50: + print( + "The provided token doesn't look a valid Discord bot token." + " Instance's token will remain unchanged.\n" + ) + return + red.loop.run_until_complete(red._config.token.set(token)) + elif not no_prompt and confirm("Would you like to change instance's token?", default=False): + interactive_config(red, False, True, print_header=False) + print("Token updated.\n") + + +def _edit_owner(red, owner, no_prompt): + if owner: + if not (15 <= len(str(owner)) <= 21): + print( + "The provided owner id doesn't look like a valid Discord user id." + " Instance's owner will remain unchanged." + ) + return + red.loop.run_until_complete(red._config.owner.set(owner)) + elif not no_prompt and confirm("Would you like to change instance's owner?", default=False): + print( + "Remember:\n" + "ONLY the person who is hosting Red should be owner." + " This has SERIOUS security implications." + " The owner can access any data that is present on the host system.\n" + ) + if confirm("Are you sure you want to change instance's owner?", default=False): + print("Please enter a Discord user id for new owner:") + while True: + owner_id = input("> ").strip() + if not (15 <= len(owner_id) <= 21 and owner_id.isdecimal()): + print("That doesn't look like a valid Discord user id.") + continue + owner_id = int(owner_id) + red.loop.run_until_complete(red._config.owner.set(owner_id)) + print("Owner updated.") + break + else: + print("Instance's owner will remain unchanged.") + print() + + +def _edit_instance_name(old_name, new_name, confirm_overwrite, no_prompt): + if new_name: + name = new_name + if name in _get_instance_names() and not confirm_overwrite: + name = old_name + print( + "An instance with this name already exists.\n" + "If you want to remove the existing instance and replace it with this one," + " run this command with --overwrite-existing-instance flag." + ) + elif not no_prompt and confirm("Would you like to change the instance name?", default=False): + name = get_name() + if name in _get_instance_names(): + print( + "WARNING: An instance already exists with this name. " + "Continuing will overwrite the existing instance config." + ) + if not confirm( + "Are you absolutely certain you want to continue with this instance name?", + default=False, + ): + print("Instance name will remain unchanged.") + name = old_name + else: + print("Instance name updated.") + print() + else: + name = old_name + return name + + +def _edit_data_path(data, data_path, copy_data, no_prompt): + # This modifies the passed dict. + if data_path: + data["DATA_PATH"] = data_path + if copy_data and not _copy_data(data): + print("Can't copy data to non-empty location. Data location will remain unchanged.") + data["DATA_PATH"] = data_manager.basic_config["DATA_PATH"] + elif not no_prompt and confirm("Would you like to change the data location?", default=False): + data["DATA_PATH"] = get_data_dir() + if confirm( + "Do you want to copy the data from old location?", default=True + ) and not _copy_data(data): + print("Can't copy the data to non-empty location.") + if not confirm("Do you still want to use the new data location?"): + data["DATA_PATH"] = data_manager.basic_config["DATA_PATH"] + print("Data location will remain unchanged.") + else: + print("Data location updated.") + + +def _copy_data(data): + if Path(data["DATA_PATH"]).exists(): + if any(os.scandir(data["DATA_PATH"])): + return False + else: + # this is needed because copytree doesn't work when destination folder exists + # Python 3.8 has `dirs_exist_ok` option for that + os.rmdir(data["DATA_PATH"]) + shutil.copytree(data_manager.basic_config["DATA_PATH"], data["DATA_PATH"]) + return True + + async def sigterm_handler(red, log): log.info("SIGTERM received. Quitting...") await red.shutdown(restart=False) @@ -79,7 +231,7 @@ def main(): print(description) print("Current Version: {}".format(__version__)) sys.exit(0) - elif not cli_flags.instance_name and not cli_flags.no_instance: + elif not cli_flags.instance_name and (not cli_flags.no_instance or cli_flags.edit): print("Error: No instance name was provided!") sys.exit(1) if cli_flags.no_instance: @@ -108,6 +260,16 @@ def main(): cli_flags=cli_flags, description=description, dm_help=None, fetch_offline_members=True ) loop.run_until_complete(red._maybe_update_config()) + + if cli_flags.edit: + try: + edit_instance(red, cli_flags) + except (KeyboardInterrupt, EOFError): + print("Aborted!") + finally: + loop.run_until_complete(driver_cls.teardown()) + sys.exit(0) + init_global_checks(red) init_events(red, cli_flags) @@ -154,8 +316,7 @@ def main(): log.critical("This token doesn't seem to be valid.") db_token = loop.run_until_complete(red._config.token()) if db_token and not cli_flags.no_prompt: - print("\nDo you want to reset the token? (y/n)") - if confirm("> "): + if confirm("\nDo you want to reset the token?"): loop.run_until_complete(red._config.token.set("")) print("Token has been reset.") except KeyboardInterrupt: diff --git a/redbot/core/cli.py b/redbot/core/cli.py index 539c5b9d3..b8831f6a8 100644 --- a/redbot/core/cli.py +++ b/redbot/core/cli.py @@ -1,17 +1,42 @@ import argparse import asyncio import logging +import sys +from typing import Optional -def confirm(m=""): - return input(m).lower().strip() in ("y", "yes") +def confirm(text: str, default: Optional[bool] = None) -> bool: + if default is None: + options = "y/n" + elif default is True: + options = "Y/n" + elif default is False: + options = "y/N" + else: + raise TypeError(f"expected bool, not {type(default)}") + + while True: + try: + value = input(f"{text}: [{options}] ").lower().strip() + except (KeyboardInterrupt, EOFError): + print("\nAborted!") + sys.exit(1) + if value in ("y", "yes"): + return True + if value in ("n", "no"): + return False + if value == "": + if default is not None: + return default + print("Error: invalid input") -def interactive_config(red, token_set, prefix_set): +def interactive_config(red, token_set, prefix_set, *, print_header=True): loop = asyncio.get_event_loop() token = "" - print("Red - Discord Bot | Configuration process\n") + if print_header: + print("Red - Discord Bot | Configuration process\n") if not token_set: print("Please enter a valid token:") @@ -35,8 +60,7 @@ def interactive_config(red, token_set, prefix_set): while not prefix: prefix = input("Prefix> ") if len(prefix) > 10: - print("Your prefix seems overly long. Are you sure that it's correct? (y/n)") - if not confirm("> "): + if not confirm("Your prefix seems overly long. Are you sure that it's correct?"): prefix = "" if prefix: loop.run_until_complete(red._config.prefix.set([prefix])) @@ -54,6 +78,37 @@ def parse_cli_flags(args): action="store_true", help="List all instance names setup with 'redbot-setup'", ) + parser.add_argument( + "--edit", + action="store_true", + help="Edit the instance. This can be done without console interaction " + "by passing --no-prompt and arguments that you want to change (available arguments: " + "--edit-instance-name, --edit-data-path, --copy-data, --owner, --token).", + ) + parser.add_argument( + "--edit-instance-name", + type=str, + help="New name for the instance. This argument only works with --edit argument passed.", + ) + parser.add_argument( + "--overwrite-existing-instance", + action="store_true", + help="Confirm overwriting of existing instance when changing name." + " This argument only works with --edit argument passed.", + ) + parser.add_argument( + "--edit-data-path", + type=str, + help=( + "New data path for the instance. This argument only works with --edit argument passed." + ), + ) + parser.add_argument( + "--copy-data", + action="store_true", + help="Copy data from old location. This argument only works " + "with --edit and --edit-data-path arguments passed.", + ) parser.add_argument( "--owner", type=int, @@ -65,7 +120,7 @@ def parse_cli_flags(args): "--co-owner", type=int, default=[], - nargs="*", + nargs="+", help="ID of a co-owner. Only people who have access " "to the system that is hosting Red should be " "co-owners, as this gives them complete access " @@ -87,7 +142,7 @@ def parse_cli_flags(args): parser.add_argument( "--load-cogs", type=str, - nargs="*", + nargs="+", help="Force loading specified cogs from the installed packages. " "Can be used with the --no-cogs flag to load these cogs exclusively.", ) diff --git a/redbot/launcher.py b/redbot/launcher.py index 3c2628168..7528c9e3a 100644 --- a/redbot/launcher.py +++ b/redbot/launcher.py @@ -264,7 +264,7 @@ async def reset_red(): print("Cancelling...") return - if confirm("\nDo you want to create a backup for an instance? (y/n) "): + if confirm("\nDo you want to create a backup for an instance?"): for index, instance in instances.items(): print("\nRemoving {}...".format(index)) await create_backup(index) diff --git a/redbot/setup.py b/redbot/setup.py index 2655b0961..4bfb06a4b 100644 --- a/redbot/setup.py +++ b/redbot/setup.py @@ -53,16 +53,6 @@ def save_config(name, data, remove=False): if remove and name in _config: _config.pop(name) else: - if name in _config: - print( - "WARNING: An instance already exists with this name. " - "Continuing will overwrite the existing instance config." - ) - if not click.confirm( - "Are you absolutely certain you want to continue?", default=False - ): - print("Not continuing") - sys.exit(0) _config[name] = data with config_file.open("w", encoding="utf-8") as fs: @@ -73,12 +63,9 @@ def get_data_dir(): default_data_dir = Path(appdir.user_data_dir) print( - "Hello! Before we begin the full configuration process we need to" - " gather some initial information about where you'd like us" - " to store your bot's data. We've attempted to figure out a" - " sane default data location which is printed below. If you don't" - " want to change this default please press [ENTER], otherwise" - " input your desired data location." + "We've attempted to figure out a sane default data location which is printed below." + " If you don't want to change this default please press [ENTER]," + " otherwise input your desired data location." ) print() print("Default: {}".format(default_data_dir)) @@ -104,7 +91,7 @@ def get_data_dir(): if not click.confirm("Please confirm", default=True): print("Please start the process over.") sys.exit(0) - return default_data_dir + return str(default_data_dir.resolve()) def get_storage_type(): @@ -147,10 +134,15 @@ def basic_setup(): :return: """ + print( + "Hello! Before we begin the full configuration process we need to" + " gather some initial information about where you'd like us" + " to store your bot's data." + ) default_data_dir = get_data_dir() default_dirs = deepcopy(data_manager.basic_config_default) - default_dirs["DATA_PATH"] = str(default_data_dir.resolve()) + default_dirs["DATA_PATH"] = default_data_dir storage = get_storage_type() @@ -161,6 +153,14 @@ def basic_setup(): default_dirs["STORAGE_DETAILS"] = driver_cls.get_config_details() name = get_name() + if name in instance_data: + print( + "WARNING: An instance already exists with this name. " + "Continuing will overwrite the existing instance config." + ) + if not click.confirm("Are you absolutely certain you want to continue?", default=False): + print("Not continuing") + sys.exit(0) save_config(name, default_dirs) print() @@ -236,53 +236,6 @@ async def mongov1_to_json() -> Dict[str, Any]: return {} -async def edit_instance(): - _instance_list = load_existing_config() - if not _instance_list: - print("No instances have been set up!") - return - - print( - "You have chosen to edit an instance. The following " - "is a list of instances that currently exist:\n" - ) - for instance in _instance_list.keys(): - print("{}\n".format(instance)) - print("Please select one of the above by entering its name") - selected = input("> ") - - if selected not in _instance_list.keys(): - print("That isn't a valid instance!") - return - _instance_data = _instance_list[selected] - default_dirs = deepcopy(data_manager.basic_config_default) - - current_data_dir = Path(_instance_data["DATA_PATH"]) - print("You have selected '{}' as the instance to modify.".format(selected)) - if not click.confirm("Please confirm", default=True): - print("Ok, we will not continue then.") - return - - print("Ok, we will continue on.") - print() - if click.confirm("Would you like to change the instance name?", default=False): - name = get_name() - else: - name = selected - - if click.confirm("Would you like to change the data location?", default=False): - default_data_dir = get_data_dir() - default_dirs["DATA_PATH"] = str(default_data_dir.resolve()) - else: - default_dirs["DATA_PATH"] = str(current_data_dir.resolve()) - - if name != selected: - save_config(selected, {}, remove=True) - save_config(name, default_dirs) - - print("Your basic configuration has been edited") - - async def create_backup(instance: str) -> None: data_manager.load_basic_configuration(instance) backend_type = get_current_backend(instance) From dd899c804acfc9941ac8cc2285ed3bfb31f8490b Mon Sep 17 00:00:00 2001 From: Michael H Date: Fri, 8 Nov 2019 14:48:04 -0500 Subject: [PATCH 11/41] Remove the mongo driver (#3099) * kills mongo * changelog * more refeences to mongo needed to go --- .readthedocs.yml | 1 - .travis.yml | 4 - changelog.d/3099.breaking.rst | 1 + docs/framework_config.rst | 4 - docs/install_linux_mac.rst | 8 +- docs/install_windows.rst | 6 - redbot/core/drivers/__init__.py | 21 +- redbot/core/drivers/json.py | 2 +- redbot/core/drivers/mongo.py | 448 -------------------------------- redbot/core/events.py | 2 - redbot/launcher.py | 15 +- redbot/setup.py | 63 +---- setup.cfg | 4 - tests/conftest.py | 14 +- tools/dev-requirements.txt | 2 +- tools/primary_deps.ini | 3 - tox.ini | 19 +- 17 files changed, 25 insertions(+), 592 deletions(-) create mode 100644 changelog.d/3099.breaking.rst delete mode 100644 redbot/core/drivers/mongo.py diff --git a/.readthedocs.yml b/.readthedocs.yml index 7634080ca..a8be59ce1 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -14,4 +14,3 @@ python: path: . extra_requirements: - docs - - mongo diff --git a/.travis.yml b/.travis.yml index a1748288b..282bef8df 100644 --- a/.travis.yml +++ b/.travis.yml @@ -27,10 +27,6 @@ jobs: postgresql: "10" before_script: - psql -c 'create database red_db;' -U postgres - - env: TOXENV=mongo - services: mongodb - before_script: - - mongo red_db --eval 'db.createUser({user:"red",pwd:"red",roles:["readWrite"]});' # These jobs only occur on tag creation if the prior ones succeed - stage: PyPi Deployment if: tag IS present diff --git a/changelog.d/3099.breaking.rst b/changelog.d/3099.breaking.rst new file mode 100644 index 000000000..80a92bf70 --- /dev/null +++ b/changelog.d/3099.breaking.rst @@ -0,0 +1 @@ +Removes the mongo driver. \ No newline at end of file diff --git a/docs/framework_config.rst b/docs/framework_config.rst index d18aed63b..b561b793c 100644 --- a/docs/framework_config.rst +++ b/docs/framework_config.rst @@ -429,7 +429,3 @@ JSON Driver .. autoclass:: redbot.core.drivers.JsonDriver :members: -Mongo Driver -^^^^^^^^^^^^ -.. autoclass:: redbot.core.drivers.MongoDriver - :members: diff --git a/docs/install_linux_mac.rst b/docs/install_linux_mac.rst index fac383b88..8377e84bc 100644 --- a/docs/install_linux_mac.rst +++ b/docs/install_linux_mac.rst @@ -265,18 +265,12 @@ Choose one of the following commands to install Red. python3.7 -m pip install --user -U Red-DiscordBot -To install without MongoDB support: +To install without additional config backend support: .. code-block:: none python3.7 -m pip install -U Red-DiscordBot -Or, to install with MongoDB support: - -.. code-block:: none - - python3.7 -m pip install -U Red-DiscordBot[mongo] - Or, to install with PostgreSQL support: .. code-block:: none diff --git a/docs/install_windows.rst b/docs/install_windows.rst index 5c4862958..f3bcd156a 100644 --- a/docs/install_windows.rst +++ b/docs/install_windows.rst @@ -76,12 +76,6 @@ Installing Red python -m pip install -U Red-DiscordBot - * With MongoDB support: - - .. code-block:: none - - python -m pip install -U Red-DiscordBot[mongo] - * With PostgreSQL support: .. code-block:: none diff --git a/redbot/core/drivers/__init__.py b/redbot/core/drivers/__init__.py index f34727c86..125714b74 100644 --- a/redbot/core/drivers/__init__.py +++ b/redbot/core/drivers/__init__.py @@ -4,7 +4,6 @@ from typing import Optional, Type from .. import data_manager from .base import IdentifierData, BaseDriver, ConfigCategory from .json import JsonDriver -from .mongo import MongoDriver from .postgres import PostgresDriver __all__ = [ @@ -13,7 +12,6 @@ __all__ = [ "IdentifierData", "BaseDriver", "JsonDriver", - "MongoDriver", "PostgresDriver", "BackendType", ] @@ -21,16 +19,13 @@ __all__ = [ class BackendType(enum.Enum): JSON = "JSON" - MONGO = "MongoDBV2" - MONGOV1 = "MongoDB" POSTGRES = "Postgres" + # Dead drivrs below retained for error handling. + MONGOV1 = "MongoDB" + MONGO = "MongoDBV2" -_DRIVER_CLASSES = { - BackendType.JSON: JsonDriver, - BackendType.MONGO: MongoDriver, - BackendType.POSTGRES: PostgresDriver, -} +_DRIVER_CLASSES = {BackendType.JSON: JsonDriver, BackendType.POSTGRES: PostgresDriver} def get_driver_class(storage_type: Optional[BackendType] = None) -> Type[BaseDriver]: @@ -86,7 +81,7 @@ def get_driver( Raises ------ RuntimeError - If the storage type is MongoV1 or invalid. + If the storage type is MongoV1, Mongo, or invalid. """ if storage_type is None: @@ -98,12 +93,10 @@ def get_driver( try: driver_cls: Type[BaseDriver] = get_driver_class(storage_type) except ValueError: - if storage_type == BackendType.MONGOV1: + if storage_type in (BackendType.MONGOV1, BackendType.MONGO): raise RuntimeError( "Please convert to JSON first to continue using the bot." - " This is a required conversion prior to using the new Mongo driver." - " This message will be updated with a link to the update docs once those" - " docs have been created." + "Mongo support was removed in 3.2." ) from None else: raise RuntimeError(f"Invalid driver type: '{storage_type}'") from None diff --git a/redbot/core/drivers/json.py b/redbot/core/drivers/json.py index bd66c84f1..60ee86ae5 100644 --- a/redbot/core/drivers/json.py +++ b/redbot/core/drivers/json.py @@ -221,7 +221,7 @@ def _save_json(path: Path, data: Dict[str, Any]) -> None: On windows, it is not available in entirety. If a windows user ends up with tons of temp files, they should consider hosting on - something POSIX compatible, or using the mongo backend instead. + something POSIX compatible, or using a different backend instead. Most users wont encounter this issue, but with high write volumes, without the fsync on both the temp file, and after the replace on the directory, diff --git a/redbot/core/drivers/mongo.py b/redbot/core/drivers/mongo.py deleted file mode 100644 index dadb23996..000000000 --- a/redbot/core/drivers/mongo.py +++ /dev/null @@ -1,448 +0,0 @@ -import contextlib -import itertools -import re -from getpass import getpass -from typing import Match, Pattern, Tuple, Optional, AsyncIterator, Any, Dict, Iterator, List -from urllib.parse import quote_plus - -try: - # pylint: disable=import-error - import pymongo.errors - import motor.core - import motor.motor_asyncio -except ModuleNotFoundError: - motor = None - pymongo = None - -from .. import errors -from .base import BaseDriver, IdentifierData - -__all__ = ["MongoDriver"] - - -class MongoDriver(BaseDriver): - """ - Subclass of :py:class:`.BaseDriver`. - """ - - _conn: Optional["motor.motor_asyncio.AsyncIOMotorClient"] = None - - @classmethod - async def initialize(cls, **storage_details) -> None: - if motor is None: - raise errors.MissingExtraRequirements( - "Red must be installed with the [mongo] extra to use the MongoDB driver" - ) - uri = storage_details.get("URI", "mongodb") - host = storage_details["HOST"] - port = storage_details["PORT"] - user = storage_details["USERNAME"] - password = storage_details["PASSWORD"] - database = storage_details.get("DB_NAME", "default_db") - - if port is 0: - ports = "" - else: - ports = ":{}".format(port) - - if user is not None and password is not None: - url = "{}://{}:{}@{}{}/{}".format( - uri, quote_plus(user), quote_plus(password), host, ports, database - ) - else: - url = "{}://{}{}/{}".format(uri, host, ports, database) - - cls._conn = motor.motor_asyncio.AsyncIOMotorClient(url, retryWrites=True) - - @classmethod - async def teardown(cls) -> None: - if cls._conn is not None: - cls._conn.close() - - @staticmethod - def get_config_details(): - while True: - uri = input("Enter URI scheme (mongodb or mongodb+srv): ") - if uri is "": - uri = "mongodb" - - if uri in ["mongodb", "mongodb+srv"]: - break - else: - print("Invalid URI scheme") - - host = input("Enter host address: ") - if uri is "mongodb": - port = int(input("Enter host port: ")) - else: - port = 0 - - admin_uname = input("Enter login username: ") - admin_password = getpass("Enter login password: ") - - db_name = input("Enter mongodb database name: ") - - if admin_uname == "": - admin_uname = admin_password = None - - ret = { - "HOST": host, - "PORT": port, - "USERNAME": admin_uname, - "PASSWORD": admin_password, - "DB_NAME": db_name, - "URI": uri, - } - return ret - - @property - def db(self) -> "motor.core.Database": - """ - Gets the mongo database for this cog's name. - - :return: - PyMongo Database object. - """ - return self._conn.get_database() - - def get_collection(self, category: str) -> "motor.core.Collection": - """ - Gets a specified collection within the PyMongo database for this cog. - - Unless you are doing custom stuff ``category`` should be one of the class - attributes of :py:class:`core.config.Config`. - - :param str category: - The group identifier of a category. - :return: - PyMongo collection object. - """ - return self.db[self.cog_name][category] - - @staticmethod - def get_primary_key(identifier_data: IdentifierData) -> Tuple[str, ...]: - # noinspection PyTypeChecker - return identifier_data.primary_key - - async def rebuild_dataset( - self, identifier_data: IdentifierData, cursor: "motor.motor_asyncio.AsyncIOMotorCursor" - ): - ret = {} - async for doc in cursor: - pkeys = doc["_id"]["RED_primary_key"] - del doc["_id"] - doc = self._unescape_dict_keys(doc) - if len(pkeys) == 0: - # Global data - ret.update(**doc) - elif len(pkeys) > 0: - # All other data - partial = ret - for key in pkeys[:-1]: - if key in identifier_data.primary_key: - continue - if key not in partial: - partial[key] = {} - partial = partial[key] - if pkeys[-1] in identifier_data.primary_key: - partial.update(**doc) - else: - partial[pkeys[-1]] = doc - return ret - - async def get(self, identifier_data: IdentifierData): - mongo_collection = self.get_collection(identifier_data.category) - - pkey_filter = self.generate_primary_key_filter(identifier_data) - escaped_identifiers = list(map(self._escape_key, identifier_data.identifiers)) - if len(identifier_data.identifiers) > 0: - proj = {"_id": False, ".".join(escaped_identifiers): True} - - partial = await mongo_collection.find_one(filter=pkey_filter, projection=proj) - else: - # The case here is for partial primary keys like all_members() - cursor = mongo_collection.find(filter=pkey_filter) - partial = await self.rebuild_dataset(identifier_data, cursor) - - if partial is None: - raise KeyError("No matching document was found and Config expects a KeyError.") - - for i in escaped_identifiers: - partial = partial[i] - if isinstance(partial, dict): - return self._unescape_dict_keys(partial) - return partial - - async def set(self, identifier_data: IdentifierData, value=None): - uuid = self._escape_key(identifier_data.uuid) - primary_key = list(map(self._escape_key, self.get_primary_key(identifier_data))) - dot_identifiers = ".".join(map(self._escape_key, identifier_data.identifiers)) - if isinstance(value, dict): - if len(value) == 0: - await self.clear(identifier_data) - return - value = self._escape_dict_keys(value) - mongo_collection = self.get_collection(identifier_data.category) - num_pkeys = len(primary_key) - - if num_pkeys >= identifier_data.primary_key_len: - # We're setting at the document level or below. - dot_identifiers = ".".join(map(self._escape_key, identifier_data.identifiers)) - if dot_identifiers: - update_stmt = {"$set": {dot_identifiers: value}} - else: - update_stmt = {"$set": value} - - try: - await mongo_collection.update_one( - {"_id": {"RED_uuid": uuid, "RED_primary_key": primary_key}}, - update=update_stmt, - upsert=True, - ) - except pymongo.errors.WriteError as exc: - if exc.args and exc.args[0].startswith("Cannot create field"): - # There's a bit of a failing edge case here... - # If we accidentally set the sub-field of an array, and the key happens to be a - # digit, it will successfully set the value in the array, and not raise an - # error. This is different to how other drivers would behave, and could lead to - # unexpected behaviour. - raise errors.CannotSetSubfield - else: - # Unhandled driver exception, should expose. - raise - - else: - # We're setting above the document level. - # Easiest and most efficient thing to do is delete all documents that we're potentially - # replacing, then insert_many(). - # We'll do it in a transaction so we can roll-back in case something goes horribly - # wrong. - pkey_filter = self.generate_primary_key_filter(identifier_data) - async with await self._conn.start_session() as session: - with contextlib.suppress(pymongo.errors.CollectionInvalid): - # Collections must already exist when inserting documents within a transaction - await self.db.create_collection(mongo_collection.full_name) - try: - async with session.start_transaction(): - await mongo_collection.delete_many(pkey_filter, session=session) - await mongo_collection.insert_many( - self.generate_documents_to_insert( - uuid, primary_key, value, identifier_data.primary_key_len - ), - session=session, - ) - except pymongo.errors.OperationFailure: - # This DB version / setup doesn't support transactions, so we'll have to use - # a shittier method. - - # The strategy here is to separate the existing documents and the new documents - # into ones to be deleted, ones to be replaced, and new ones to be inserted. - # Then we can do a bulk_write(). - - # This is our list of (filter, new_document) tuples for replacing existing - # documents. The `new_document` should be taken and removed from `value`, so - # `value` only ends up containing documents which need to be inserted. - to_replace: List[Tuple[Dict, Dict]] = [] - - # This is our list of primary key filters which need deleting. They should - # simply be all the primary keys which were part of existing documents but are - # not included in the new documents. - to_delete: List[Dict] = [] - async for document in mongo_collection.find(pkey_filter, session=session): - pkey = document["_id"]["RED_primary_key"] - new_document = value - try: - for pkey_part in pkey[num_pkeys:-1]: - new_document = new_document[pkey_part] - # This document is being replaced - remove it from `value`. - new_document = new_document.pop(pkey[-1]) - except KeyError: - # We've found the primary key of an old document which isn't in the - # updated set of documents - it should be deleted. - to_delete.append({"_id": {"RED_uuid": uuid, "RED_primary_key": pkey}}) - else: - _filter = {"_id": {"RED_uuid": uuid, "RED_primary_key": pkey}} - new_document.update(_filter) - to_replace.append((_filter, new_document)) - - # What's left of `value` should be the new documents needing to be inserted. - to_insert = self.generate_documents_to_insert( - uuid, primary_key, value, identifier_data.primary_key_len - ) - requests = list( - itertools.chain( - (pymongo.DeleteOne(f) for f in to_delete), - (pymongo.ReplaceOne(f, d) for f, d in to_replace), - (pymongo.InsertOne(d) for d in to_insert if d), - ) - ) - # This will pipeline the operations so they all complete quickly. However if - # any of them fail, the rest of them will complete - i.e. this operation is not - # atomic. - await mongo_collection.bulk_write(requests, ordered=False) - - def generate_primary_key_filter(self, identifier_data: IdentifierData): - uuid = self._escape_key(identifier_data.uuid) - primary_key = list(map(self._escape_key, self.get_primary_key(identifier_data))) - ret = {"_id.RED_uuid": uuid} - if len(identifier_data.identifiers) > 0: - ret["_id.RED_primary_key"] = primary_key - elif len(identifier_data.primary_key) > 0: - for i, key in enumerate(primary_key): - keyname = f"_id.RED_primary_key.{i}" - ret[keyname] = key - else: - ret["_id.RED_primary_key"] = {"$exists": True} - return ret - - @classmethod - def generate_documents_to_insert( - cls, uuid: str, primary_keys: List[str], data: Dict[str, Dict[str, Any]], pkey_len: int - ) -> Iterator[Dict[str, Any]]: - num_missing_pkeys = pkey_len - len(primary_keys) - if num_missing_pkeys == 1: - for pkey, document in data.items(): - document["_id"] = {"RED_uuid": uuid, "RED_primary_key": primary_keys + [pkey]} - yield document - else: - for pkey, inner_data in data.items(): - for document in cls.generate_documents_to_insert( - uuid, primary_keys + [pkey], inner_data, pkey_len - ): - yield document - - async def clear(self, identifier_data: IdentifierData): - # There are five cases here: - # 1) We're clearing out a subset of identifiers (aka identifiers is NOT empty) - # 2) We're clearing out full primary key and no identifiers - # 3) We're clearing out partial primary key and no identifiers - # 4) Primary key is empty, should wipe all documents in the collection - # 5) Category is empty, all of this cog's data should be deleted - pkey_filter = self.generate_primary_key_filter(identifier_data) - if identifier_data.identifiers: - # This covers case 1 - mongo_collection = self.get_collection(identifier_data.category) - dot_identifiers = ".".join(map(self._escape_key, identifier_data.identifiers)) - await mongo_collection.update_one(pkey_filter, update={"$unset": {dot_identifiers: 1}}) - elif identifier_data.category: - # This covers cases 2-4 - mongo_collection = self.get_collection(identifier_data.category) - await mongo_collection.delete_many(pkey_filter) - else: - # This covers case 5 - db = self.db - super_collection = db[self.cog_name] - results = await db.list_collections( - filter={"name": {"$regex": rf"^{super_collection.name}\."}} - ) - for result in results: - await db[result["name"]].delete_many(pkey_filter) - - @classmethod - async def aiter_cogs(cls) -> AsyncIterator[Tuple[str, str]]: - db = cls._conn.get_database() - for collection_name in await db.list_collection_names(): - parts = collection_name.split(".") - if not len(parts) == 2: - continue - cog_name = parts[0] - for cog_id in await db[collection_name].distinct("_id.RED_uuid"): - yield cog_name, cog_id - - @classmethod - async def delete_all_data( - cls, *, interactive: bool = False, drop_db: Optional[bool] = None, **kwargs - ) -> None: - """Delete all data being stored by this driver. - - Parameters - ---------- - interactive : bool - Set to ``True`` to allow the method to ask the user for - input from the console, regarding the other unset parameters - for this method. - drop_db : Optional[bool] - Set to ``True`` to drop the entire database for the current - bot's instance. Otherwise, collections which appear to be - storing bot data will be dropped. - - """ - if interactive is True and drop_db is None: - print( - "Please choose from one of the following options:\n" - " 1. Drop the entire MongoDB database for this instance, or\n" - " 2. Delete all of Red's data within this database, without dropping the database " - "itself." - ) - options = ("1", "2") - while True: - resp = input("> ") - try: - drop_db = bool(options.index(resp)) - except ValueError: - print("Please type a number corresponding to one of the options.") - else: - break - db = cls._conn.get_database() - if drop_db is True: - await cls._conn.drop_database(db) - else: - async with await cls._conn.start_session() as session: - async for cog_name, cog_id in cls.aiter_cogs(): - await db.drop_collection(db[cog_name], session=session) - - @staticmethod - def _escape_key(key: str) -> str: - return _SPECIAL_CHAR_PATTERN.sub(_replace_with_escaped, key) - - @staticmethod - def _unescape_key(key: str) -> str: - return _CHAR_ESCAPE_PATTERN.sub(_replace_with_unescaped, key) - - @classmethod - def _escape_dict_keys(cls, data: dict) -> dict: - """Recursively escape all keys in a dict.""" - ret = {} - for key, value in data.items(): - key = cls._escape_key(key) - if isinstance(value, dict): - value = cls._escape_dict_keys(value) - ret[key] = value - return ret - - @classmethod - def _unescape_dict_keys(cls, data: dict) -> dict: - """Recursively unescape all keys in a dict.""" - ret = {} - for key, value in data.items(): - key = cls._unescape_key(key) - if isinstance(value, dict): - value = cls._unescape_dict_keys(value) - ret[key] = value - return ret - - -_SPECIAL_CHAR_PATTERN: Pattern[str] = re.compile(r"([.$]|\\U0000002E|\\U00000024)") -_SPECIAL_CHARS = { - ".": "\\U0000002E", - "$": "\\U00000024", - "\\U0000002E": "\\U&0000002E", - "\\U00000024": "\\U&00000024", -} - - -def _replace_with_escaped(match: Match[str]) -> str: - return _SPECIAL_CHARS[match[0]] - - -_CHAR_ESCAPE_PATTERN: Pattern[str] = re.compile(r"(\\U0000002E|\\U00000024)") -_CHAR_ESCAPES = { - "\\U0000002E": ".", - "\\U00000024": "$", - "\\U&0000002E": "\\U0000002E", - "\\U&00000024": "\\U00000024", -} - - -def _replace_with_unescaped(match: Match[str]) -> str: - return _CHAR_ESCAPES[match[0]] diff --git a/redbot/core/events.py b/redbot/core/events.py index 050ae4aa9..20970e670 100644 --- a/redbot/core/events.py +++ b/redbot/core/events.py @@ -129,7 +129,6 @@ def init_events(bot, cli_flags): ) INFO2 = [] - mongo_enabled = storage_type() != "JSON" reqs_installed = {"docs": None, "test": None} for key in reqs_installed.keys(): reqs = [x.name for x in red_pkg._dep_map[key]] @@ -141,7 +140,6 @@ def init_events(bot, cli_flags): reqs_installed[key] = True options = ( - ("MongoDB", mongo_enabled), ("Voice", True), ("Docs", reqs_installed["docs"]), ("Tests", reqs_installed["test"]), diff --git a/redbot/launcher.py b/redbot/launcher.py index 7528c9e3a..481d2428b 100644 --- a/redbot/launcher.py +++ b/redbot/launcher.py @@ -66,9 +66,6 @@ def parse_cli_args(): parser.add_argument( "--style", help="Installs extra 'style' when updating", action="store_true" ) - parser.add_argument( - "--mongo", help="Installs extra 'mongo' when updating", action="store_true" - ) parser.add_argument( "--debuginfo", help="Prints basic debug info that would be useful for support", @@ -77,7 +74,7 @@ def parse_cli_args(): return parser.parse_known_args() -def update_red(dev=False, style=False, mongo=False, docs=False, test=False): +def update_red(dev=False, style=False, docs=False, test=False): interpreter = sys.executable print("Updating Red...") # If the user ran redbot-launcher.exe, updating with pip will fail @@ -96,8 +93,6 @@ def update_red(dev=False, style=False, mongo=False, docs=False, test=False): egg_l = [] if style: egg_l.append("style") - if mongo: - egg_l.append("mongo") if docs: egg_l.append("docs") if test: @@ -293,7 +288,7 @@ def user_choice(): def extras_selector(): print("Enter any extra requirements you want installed\n") - print("Options are: style, docs, test, mongo\n") + print("Options are: style, docs, test\n") selected = user_choice() selected = selected.split() return selected @@ -318,7 +313,6 @@ def development_choice(can_go_back=True): style=True if "style" in selected else False, docs=True if "docs" in selected else False, test=True if "test" in selected else False, - mongo=True if "mongo" in selected else False, ) break elif choice == "2": @@ -328,7 +322,6 @@ def development_choice(can_go_back=True): style=True if "style" in selected else False, docs=True if "docs" in selected else False, test=True if "test" in selected else False, - mongo=True if "mongo" in selected else False, ) break elif choice == "0" and can_go_back: @@ -464,9 +457,9 @@ def main(): "Please try again using only one of --update or --update-dev" ) if args.update: - update_red(style=args.style, docs=args.docs, test=args.test, mongo=args.mongo) + update_red(style=args.style, docs=args.docs, test=args.test) elif args.update_dev: - update_red(dev=True, style=args.style, docs=args.docs, test=args.test, mongo=args.mongo) + update_red(dev=True, style=args.style, docs=args.docs, test=args.test) if INTERACTIVE_MODE: main_menu() diff --git a/redbot/setup.py b/redbot/setup.py index 4bfb06a4b..9259340a9 100644 --- a/redbot/setup.py +++ b/redbot/setup.py @@ -95,14 +95,13 @@ def get_data_dir(): def get_storage_type(): - storage_dict = {1: "JSON", 2: "MongoDB", 3: "PostgreSQL"} + storage_dict = {1: "JSON", 2: "PostgreSQL"} storage = None while storage is None: print() print("Please choose your storage backend (if you're unsure, choose 1).") print("1. JSON (file storage, requires no database).") - print("2. MongoDB") - print("3. PostgreSQL") + print("2. PostgreSQL") storage = input("> ") try: storage = int(storage) @@ -146,7 +145,7 @@ def basic_setup(): storage = get_storage_type() - storage_dict = {1: BackendType.JSON, 2: BackendType.MONGO, 3: BackendType.POSTGRES} + storage_dict = {1: BackendType.JSON, 2: BackendType.POSTGRES} storage_type: BackendType = storage_dict.get(storage, BackendType.JSON) default_dirs["STORAGE_TYPE"] = storage_type.value driver_cls = drivers.get_driver_class(storage_type) @@ -177,8 +176,6 @@ def get_current_backend(instance) -> BackendType: def get_target_backend(backend) -> BackendType: if backend == "json": return BackendType.JSON - elif backend == "mongo": - return BackendType.MONGO elif backend == "postgres": return BackendType.POSTGRES @@ -202,46 +199,10 @@ async def do_migration( return new_storage_details -async def mongov1_to_json() -> Dict[str, Any]: - await drivers.MongoDriver.initialize(**data_manager.storage_details()) - m = drivers.MongoDriver("Core", "0") - db = m.db - collection_names = await db.list_collection_names() - for collection_name in collection_names: - if "." in collection_name: - # Fix for one of Zeph's problems - continue - # Every cog name has its own collection - collection = db[collection_name] - async for document in collection.find(): - # Every cog has its own document. - # This means if two cogs have the same name but different identifiers, they will - # be two separate documents in the same collection - cog_id = document.pop("_id") - if not isinstance(cog_id, str): - # Another garbage data check - continue - elif not str(cog_id).isdigit(): - continue - driver = drivers.JsonDriver(collection_name, cog_id) - for category, value in document.items(): - ident_data = IdentifierData( - str(collection_name), str(cog_id), category, tuple(), tuple(), 0 - ) - await driver.set(ident_data, value=value) - - conversion_log.info("Cog conversion complete.") - await drivers.MongoDriver.teardown() - - return {} - - async def create_backup(instance: str) -> None: data_manager.load_basic_configuration(instance) backend_type = get_current_backend(instance) - if backend_type == BackendType.MONGOV1: - await mongov1_to_json() - elif backend_type != BackendType.JSON: + if backend_type != BackendType.JSON: await do_migration(backend_type, BackendType.JSON) print("Backing up the instance's data...") backup_fpath = await red_create_backup() @@ -275,10 +236,7 @@ async def remove_instance( await create_backup(instance) backend = get_current_backend(instance) - if backend == BackendType.MONGOV1: - driver_cls = drivers.MongoDriver - else: - driver_cls = drivers.get_driver_class(backend) + driver_cls = drivers.get_driver_class(backend) if delete_data is True: await driver_cls.delete_all_data(interactive=interactive, drop_db=drop_db) @@ -394,7 +352,7 @@ def delete( @cli.command() @click.argument("instance", type=click.Choice(instance_list)) -@click.argument("backend", type=click.Choice(["json", "mongo", "postgres"])) +@click.argument("backend", type=click.Choice(["json", "postgres"])) def convert(instance, backend): current_backend = get_current_backend(instance) target = get_target_backend(backend) @@ -405,13 +363,8 @@ def convert(instance, backend): loop = asyncio.get_event_loop() - if current_backend == BackendType.MONGOV1: - if target == BackendType.JSON: - new_storage_details = loop.run_until_complete(mongov1_to_json()) - else: - raise RuntimeError( - "Please see conversion docs for updating to the latest mongo version." - ) + if current_backend in (BackendType.MONGOV1, BackendType.MONGO): + raise RuntimeError("Please see the 3.2 release notes for upgrading a bot using mongo.") else: new_storage_details = loop.run_until_complete(do_migration(current_backend, target)) diff --git a/setup.cfg b/setup.cfg index cec4be2b5..65b4a2635 100644 --- a/setup.cfg +++ b/setup.cfg @@ -82,10 +82,6 @@ docs = toml==0.10.0 towncrier==19.2.0 urllib3==1.25.3 -mongo = - dnspython==1.16.0 - motor==2.0.0 - pymongo==3.9.0 postgres = asyncpg==0.18.3 style = diff --git a/tests/conftest.py b/tests/conftest.py index 7810e142f..cb420c900 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -20,8 +20,6 @@ def event_loop(request): def _get_backend_type(): if os.getenv("RED_STORAGE_TYPE") == "postgres": return drivers.BackendType.POSTGRES - elif os.getenv("RED_STORAGE_TYPE") == "mongo": - return drivers.BackendType.MONGO else: return drivers.BackendType.JSON @@ -29,17 +27,7 @@ def _get_backend_type(): @pytest.fixture(scope="session", autouse=True) async def _setup_driver(): backend_type = _get_backend_type() - if backend_type == drivers.BackendType.MONGO: - storage_details = { - "URI": os.getenv("RED_MONGO_URI", "mongodb"), - "HOST": os.getenv("RED_MONGO_HOST", "localhost"), - "PORT": int(os.getenv("RED_MONGO_PORT", "27017")), - "USERNAME": os.getenv("RED_MONGO_USER", "red"), - "PASSWORD": os.getenv("RED_MONGO_PASSWORD", "red"), - "DB_NAME": os.getenv("RED_MONGO_DATABASE", "red_db"), - } - else: - storage_details = {} + storage_details = {} data_manager.storage_type = lambda: backend_type.value data_manager.storage_details = lambda: storage_details driver_cls = drivers.get_driver_class(backend_type) diff --git a/tools/dev-requirements.txt b/tools/dev-requirements.txt index ab52d3e8f..c2422e23d 100644 --- a/tools/dev-requirements.txt +++ b/tools/dev-requirements.txt @@ -1,3 +1,3 @@ packaging tox --e .[docs,mongo,postgres,style,test] +-e .[docs,postgres,style,test] diff --git a/tools/primary_deps.ini b/tools/primary_deps.ini index 0405d74c3..6cb5afd6d 100644 --- a/tools/primary_deps.ini +++ b/tools/primary_deps.ini @@ -34,9 +34,6 @@ docs = sphinx_rtd_theme sphinxcontrib-trio towncrier -mongo = - dnspython - motor postgres = asyncpg style = diff --git a/tox.ini b/tox.ini index 599d53eb1..1c079ae58 100644 --- a/tox.ini +++ b/tox.ini @@ -39,23 +39,6 @@ passenv = commands = pytest -[testenv:mongo] -description = Run pytest with MongoDB backend -whitelist_externals = - pytest -extras = voice, test, mongo -setenv = - RED_STORAGE_TYPE=mongo -passenv = - RED_MONGO_URI - RED_MONGO_HOST - RED_MONGO_PORT - RED_MONGO_USER - RED_MONGO_PASSWORD - RED_MONGO_DATABASE -commands = - pytest - [testenv:docs] description = Attempt to build docs with sphinx-build whitelist_externals = @@ -66,7 +49,7 @@ setenv = # Prioritise make.bat over any make.exe which might be on PATH PATHEXT=.BAT;.EXE basepython = python3.7 -extras = docs, mongo +extras = docs commands = sphinx-build -d "{toxworkdir}/docs_doctree" docs "{toxworkdir}/docs_out/html" -W -bhtml sphinx-build -d "{toxworkdir}/docs_doctree" docs "{toxworkdir}/docs_out/linkcheck" -W -blinkcheck From a05508a9f2c8ba84ccdad4be6ce5dddaacff74ad Mon Sep 17 00:00:00 2001 From: Vexed <51716387+Vexed01@users.noreply.github.com> Date: Sat, 9 Nov 2019 14:50:40 +0000 Subject: [PATCH 12/41] [Docs] It's 2019. Let's not say it's 2018. (#3105) * guys... it's still not 2018 * changelog i miss git * review --- changelog.d/3105.docs.rst | 1 + docs/conf.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3105.docs.rst diff --git a/changelog.d/3105.docs.rst b/changelog.d/3105.docs.rst new file mode 100644 index 000000000..b0882fbca --- /dev/null +++ b/changelog.d/3105.docs.rst @@ -0,0 +1 @@ +Update docs footer copyright to 2019. diff --git a/docs/conf.py b/docs/conf.py index f1f8b34bc..8aea2c5b9 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -58,7 +58,7 @@ master_doc = "index" # General information about the project. project = "Red - Discord Bot" -copyright = "2018, Cog Creators" +copyright = "2018-2019, Cog Creators" author = "Cog Creators" # The version info for the project you're documenting, acts as replacement for From 418f957332b0b65a5c7c388f90677efb6a886e2e Mon Sep 17 00:00:00 2001 From: Kowlin Date: Sat, 9 Nov 2019 19:54:41 +0100 Subject: [PATCH 13/41] Update the issue templates (#3109) This update will automatically attach the relevant labels to the dedicated issue templates. --- .github/ISSUE_TEMPLATE/command_bug.md | 3 +++ .github/ISSUE_TEMPLATE/feature_req.md | 3 +++ .github/ISSUE_TEMPLATE/other_bug.md | 3 +++ 3 files changed, 9 insertions(+) diff --git a/.github/ISSUE_TEMPLATE/command_bug.md b/.github/ISSUE_TEMPLATE/command_bug.md index 4f68179c9..4701397ec 100644 --- a/.github/ISSUE_TEMPLATE/command_bug.md +++ b/.github/ISSUE_TEMPLATE/command_bug.md @@ -1,6 +1,9 @@ --- name: Bug reports for commands about: For bugs that involve commands found within Red +title: '' +labels: 'Type: Bug' +assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/feature_req.md b/.github/ISSUE_TEMPLATE/feature_req.md index f21474574..6bb6dc98f 100644 --- a/.github/ISSUE_TEMPLATE/feature_req.md +++ b/.github/ISSUE_TEMPLATE/feature_req.md @@ -1,6 +1,9 @@ --- name: Feature request about: For feature requests regarding Red itself. +title: '' +labels: 'Type: Feature' +assignees: '' --- diff --git a/.github/ISSUE_TEMPLATE/other_bug.md b/.github/ISSUE_TEMPLATE/other_bug.md index 20ce41318..46378506d 100644 --- a/.github/ISSUE_TEMPLATE/other_bug.md +++ b/.github/ISSUE_TEMPLATE/other_bug.md @@ -1,6 +1,9 @@ --- name: Bug report about: For bugs that don't involve a command. +title: '' +labels: 'Type: Bug' +assignees: '' --- From 6852b7a1d126d4032c659e445be25c683da944ef Mon Sep 17 00:00:00 2001 From: Michael H Date: Sat, 9 Nov 2019 14:06:07 -0500 Subject: [PATCH 14/41] License info command (#3090) * Adds a licenseinfo command * good enough for now * changelog * *sigh* Fine, have it your way Draper * thanks Flame --- changelog.d/3090.feature.rst | 1 + redbot/__main__.py | 5 ++-- redbot/cogs/permissions/permissions.py | 16 ++++++++++++ redbot/core/commands/commands.py | 26 ++++++++++++++++++++ redbot/core/core_commands.py | 34 ++++++++++++++++++++++++-- 5 files changed, 78 insertions(+), 4 deletions(-) create mode 100644 changelog.d/3090.feature.rst diff --git a/changelog.d/3090.feature.rst b/changelog.d/3090.feature.rst new file mode 100644 index 000000000..ca19a6c6d --- /dev/null +++ b/changelog.d/3090.feature.rst @@ -0,0 +1 @@ +adds a licenseinfo command \ No newline at end of file diff --git a/redbot/__main__.py b/redbot/__main__.py index 8f61504bb..b9a87cecc 100644 --- a/redbot/__main__.py +++ b/redbot/__main__.py @@ -26,8 +26,8 @@ from redbot.core.cog_manager import CogManagerUI from redbot.core.global_checks import init_global_checks from redbot.core.events import init_events from redbot.core.cli import interactive_config, confirm, parse_cli_flags +from redbot.core.core_commands import Core, license_info_command from redbot.setup import get_data_dir, get_name, save_config -from redbot.core.core_commands import Core from redbot.core.dev_commands import Dev from redbot.core import __version__, modlog, bank, data_manager, drivers from signal import SIGTERM @@ -223,7 +223,7 @@ async def sigterm_handler(red, log): def main(): - description = "Red V3 (c) Cog Creators" + description = "Red V3" cli_flags = parse_cli_flags(sys.argv[1:]) if cli_flags.list_instances: list_instances() @@ -282,6 +282,7 @@ def main(): red.add_cog(Core(red)) red.add_cog(CogManagerUI()) + red.add_command(license_info_command) if cli_flags.dev: red.add_cog(Dev()) # noinspection PyProtectedMember diff --git a/redbot/cogs/permissions/permissions.py b/redbot/cogs/permissions/permissions.py index e3cd995b0..7910ac986 100644 --- a/redbot/cogs/permissions/permissions.py +++ b/redbot/cogs/permissions/permissions.py @@ -299,6 +299,14 @@ class Permissions(commands.Cog): if not who_or_what: await ctx.send_help() return + if isinstance(cog_or_command.obj, commands.commands._AlwaysAvailableCommand): + await ctx.send( + _( + "This command is designated as being always available and " + "cannot be modified by permission rules." + ) + ) + return for w in who_or_what: await self._add_rule( rule=cast(bool, allow_or_deny), @@ -334,6 +342,14 @@ class Permissions(commands.Cog): if not who_or_what: await ctx.send_help() return + if isinstance(cog_or_command.obj, commands.commands._AlwaysAvailableCommand): + await ctx.send( + _( + "This command is designated as being always available and " + "cannot be modified by permission rules." + ) + ) + return for w in who_or_what: await self._add_rule( rule=cast(bool, allow_or_deny), diff --git a/redbot/core/commands/commands.py b/redbot/core/commands/commands.py index 7d120b14f..c2adf2ee1 100644 --- a/redbot/core/commands/commands.py +++ b/redbot/core/commands/commands.py @@ -699,3 +699,29 @@ def get_command_disabler(guild: discord.Guild) -> Callable[["Context"], Awaitabl __command_disablers[guild] = disabler return disabler + + +# This is intentionally left out of `__all__` as it is not intended for general use +class _AlwaysAvailableCommand(Command): + """ + This should be used only for informational commands + which should not be disabled or removed + + These commands cannot belong to a cog. + + These commands do not respect most forms of checks, and + should only be used with that in mind. + + This particular class is not supported for 3rd party use + """ + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if self.cog is not None: + raise TypeError("This command may not be added to a cog") + + async def can_run(self, ctx, *args, **kwargs) -> bool: + return not ctx.author.bot + + async def _verify_checks(self, ctx) -> bool: + return not ctx.author.bot diff --git a/redbot/core/core_commands.py b/redbot/core/core_commands.py index 0d304de9a..98344c772 100644 --- a/redbot/core/core_commands.py +++ b/redbot/core/core_commands.py @@ -1136,7 +1136,7 @@ class Core(commands.Cog, CoreLogic): @checks.is_owner() async def api(self, ctx: commands.Context, service: str, *, tokens: TokenConverter): """Set various external API tokens. - + This setting will be asked for by some 3rd party cogs and some core cogs. To add the keys provide the service name and the tokens as a comma separated @@ -1162,7 +1162,7 @@ class Core(commands.Cog, CoreLogic): Allows the help command to be sent as a paginated menu instead of seperate messages. - This defaults to False. + This defaults to False. Using this without a setting will toggle. """ if use_menus is None: @@ -1907,6 +1907,12 @@ class Core(commands.Cog, CoreLogic): ) return + if isinstance(command_obj, commands.commands._AlwaysAvailableCommand): + await ctx.send( + _("This command is designated as being always available and cannot be disabled.") + ) + return + async with ctx.bot._config.disabled_commands() as disabled_commands: if command not in disabled_commands: disabled_commands.append(command_obj.qualified_name) @@ -1935,6 +1941,12 @@ class Core(commands.Cog, CoreLogic): ) return + if isinstance(command_obj, commands.commands._AlwaysAvailableCommand): + await ctx.send( + _("This command is designated as being always available and cannot be disabled.") + ) + return + if command_obj.requires.privilege_level > await PrivilegeLevel.from_ctx(ctx): await ctx.send(_("You are not allowed to disable that command.")) return @@ -2215,3 +2227,21 @@ class Core(commands.Cog, CoreLogic): async def rpc_reload(self, request): await self.rpc_unload(request) await self.rpc_load(request) + + +# Removing this command from forks is a violation of the GPLv3 under which it is licensed. +# Otherwise interfering with the ability for this command to be accessible is also a violation. +@commands.command(cls=commands.commands._AlwaysAvailableCommand, name="licenseinfo", i18n=_) +async def license_info_command(ctx): + """ + Get info about Red's licenses + """ + + message = ( + "This bot is an instance of Red-DiscordBot (hereafter refered to as Red)\n" + "Red is a free and open source application made available to the public and " + "licensed under the GNU GPLv3. The full text of this license is available to you at " + "" + ) + await ctx.send(message) + # We need a link which contains a thank you to other projects which we use at some point. From b3363acf77e834a46e2948ea24feb38603a61ee2 Mon Sep 17 00:00:00 2001 From: Michael H Date: Sat, 9 Nov 2019 14:19:57 -0500 Subject: [PATCH 15/41] reorder some startup to prevent heartbeat issues (#3073) * reorder some startup to prevent heartbeat issues * changelog * handle startup cleanup in audio * style * rebased to handle conflict * be a little smarter to prevent (some) infinite hangs * Fix a pre-existing NoneType Error * Migrate config before things are using it... * another place we should ensure we're ready * rename-toavoid-issues * fix cache ordering and mis-use of ensure_future * remove incorrect typehints * style --- changelog.d/3045.enhance.rst | 1 + changelog.d/3073.breaking.rst | 1 + redbot/__main__.py | 2 +- redbot/cogs/audio/__init__.py | 3 +- redbot/cogs/audio/apis.py | 48 ++--- redbot/cogs/audio/audio.py | 193 ++++++++++-------- .../{dataclasses.py => audio_dataclasses.py} | 0 redbot/cogs/audio/utils.py | 13 +- redbot/core/bot.py | 53 ++++- redbot/core/events.py | 34 --- 10 files changed, 192 insertions(+), 156 deletions(-) create mode 100644 changelog.d/3045.enhance.rst create mode 100644 changelog.d/3073.breaking.rst rename redbot/cogs/audio/{dataclasses.py => audio_dataclasses.py} (100%) diff --git a/changelog.d/3045.enhance.rst b/changelog.d/3045.enhance.rst new file mode 100644 index 000000000..65bc0182f --- /dev/null +++ b/changelog.d/3045.enhance.rst @@ -0,0 +1 @@ +Bot now handles more things prior to connecting to discord to reduce issues with initial load \ No newline at end of file diff --git a/changelog.d/3073.breaking.rst b/changelog.d/3073.breaking.rst new file mode 100644 index 000000000..d12ae4859 --- /dev/null +++ b/changelog.d/3073.breaking.rst @@ -0,0 +1 @@ +``bot.wait_until_ready`` should no longer be used during extension setup \ No newline at end of file diff --git a/redbot/__main__.py b/redbot/__main__.py index b9a87cecc..e947758d0 100644 --- a/redbot/__main__.py +++ b/redbot/__main__.py @@ -312,7 +312,7 @@ def main(): loop.run_until_complete(red.http.close()) sys.exit(0) try: - loop.run_until_complete(red.start(token, bot=True)) + loop.run_until_complete(red.start(token, bot=True, cli_flags=cli_flags)) except discord.LoginFailure: log.critical("This token doesn't seem to be valid.") db_token = loop.run_until_complete(red._config.token()) diff --git a/redbot/cogs/audio/__init__.py b/redbot/cogs/audio/__init__.py index d36ffc7e3..e69258734 100644 --- a/redbot/cogs/audio/__init__.py +++ b/redbot/cogs/audio/__init__.py @@ -3,7 +3,6 @@ from redbot.core import commands from .audio import Audio -async def setup(bot: commands.Bot): +def setup(bot: commands.Bot): cog = Audio(bot) - await cog.initialize() bot.add_cog(cog) diff --git a/redbot/cogs/audio/apis.py b/redbot/cogs/audio/apis.py index 31f208da2..fa254ffc2 100644 --- a/redbot/cogs/audio/apis.py +++ b/redbot/cogs/audio/apis.py @@ -9,7 +9,7 @@ import random import time import traceback from collections import namedtuple -from typing import Callable, Dict, List, Mapping, NoReturn, Optional, Tuple, Union +from typing import Callable, Dict, List, Mapping, Optional, Tuple, Union try: from sqlite3 import Error as SQLError @@ -32,7 +32,7 @@ from lavalink.rest_api import LoadResult from redbot.core import Config, commands from redbot.core.bot import Red from redbot.core.i18n import Translator, cog_i18n -from . import dataclasses +from . import audio_dataclasses from .errors import InvalidTableError, SpotifyFetchError, YouTubeApiError from .playlists import get_playlist from .utils import CacheLevel, Notifier, is_allowed, queue_duration, track_limit @@ -193,7 +193,7 @@ class SpotifyAPI: ) return await r.json() - async def _get_auth(self) -> NoReturn: + async def _get_auth(self): if self.client_id is None or self.client_secret is None: tokens = await self.bot.get_shared_api_tokens("spotify") self.client_id = tokens.get("client_id", "") @@ -331,7 +331,7 @@ class MusicCache: self._lock: asyncio.Lock = asyncio.Lock() self.config: Optional[Config] = None - async def initialize(self, config: Config) -> NoReturn: + async def initialize(self, config: Config): if HAS_SQL: await self.database.connect() @@ -348,12 +348,12 @@ class MusicCache: await self.database.execute(query=_CREATE_UNIQUE_INDEX_SPOTIFY_TABLE) self.config = config - async def close(self) -> NoReturn: + async def close(self): if HAS_SQL: await self.database.execute(query="PRAGMA optimize;") await self.database.disconnect() - async def insert(self, table: str, values: List[dict]) -> NoReturn: + async def insert(self, table: str, values: List[dict]): # if table == "spotify": # return if HAS_SQL: @@ -363,7 +363,7 @@ class MusicCache: await self.database.execute_many(query=query, values=values) - async def update(self, table: str, values: Dict[str, str]) -> NoReturn: + async def update(self, table: str, values: Dict[str, str]): # if table == "spotify": # return if HAS_SQL: @@ -746,7 +746,7 @@ class MusicCache: if val: try: result, called_api = await self.lavalink_query( - ctx, player, dataclasses.Query.process_input(val) + ctx, player, audio_dataclasses.Query.process_input(val) ) except (RuntimeError, aiohttp.ServerDisconnectedError): lock(ctx, False) @@ -805,7 +805,7 @@ class MusicCache: ctx.guild, ( f"{single_track.title} {single_track.author} {single_track.uri} " - f"{str(dataclasses.Query.process_input(single_track))}" + f"{str(audio_dataclasses.Query.process_input(single_track))}" ), ): has_not_allowed = True @@ -911,7 +911,7 @@ class MusicCache: self, ctx: commands.Context, player: lavalink.Player, - query: dataclasses.Query, + query: audio_dataclasses.Query, forced: bool = False, ) -> Tuple[LoadResult, bool]: """ @@ -925,7 +925,7 @@ class MusicCache: The context this method is being called under. player : lavalink.Player The player who's requesting the query. - query: dataclasses.Query + query: audio_dataclasses.Query The Query object for the query in question. forced:bool Whether or not to skip cache and call API first.. @@ -939,7 +939,7 @@ class MusicCache: ) cache_enabled = CacheLevel.set_lavalink().is_subset(current_cache_level) val = None - _raw_query = dataclasses.Query.process_input(query) + _raw_query = audio_dataclasses.Query.process_input(query) query = str(_raw_query) if cache_enabled and not forced and not _raw_query.is_local: update = True @@ -1003,14 +1003,10 @@ class MusicCache: tasks = self._tasks[ctx.message.id] del self._tasks[ctx.message.id] await asyncio.gather( - *[asyncio.ensure_future(self.insert(*a)) for a in tasks["insert"]], - loop=self.bot.loop, - return_exceptions=True, + *[self.insert(*a) for a in tasks["insert"]], return_exceptions=True ) await asyncio.gather( - *[asyncio.ensure_future(self.update(*a)) for a in tasks["update"]], - loop=self.bot.loop, - return_exceptions=True, + *[self.update(*a) for a in tasks["update"]], return_exceptions=True ) log.debug(f"Completed database writes for {lock_id} " f"({lock_author})") @@ -1025,14 +1021,10 @@ class MusicCache: self._tasks = {} await asyncio.gather( - *[asyncio.ensure_future(self.insert(*a)) for a in tasks["insert"]], - loop=self.bot.loop, - return_exceptions=True, + *[self.insert(*a) for a in tasks["insert"]], return_exceptions=True ) await asyncio.gather( - *[asyncio.ensure_future(self.update(*a)) for a in tasks["update"]], - loop=self.bot.loop, - return_exceptions=True, + *[self.update(*a) for a in tasks["update"]], return_exceptions=True ) log.debug("Completed pending writes to database have finished") @@ -1096,7 +1088,9 @@ class MusicCache: if not tracks: ctx = namedtuple("Context", "message") results, called_api = await self.lavalink_query( - ctx(player.channel.guild), player, dataclasses.Query.process_input(_TOP_100_US) + ctx(player.channel.guild), + player, + audio_dataclasses.Query.process_input(_TOP_100_US), ) tracks = list(results.tracks) if tracks: @@ -1107,7 +1101,7 @@ class MusicCache: while valid is False and multiple: track = random.choice(tracks) - query = dataclasses.Query.process_input(track) + query = audio_dataclasses.Query.process_input(track) if not query.valid: continue if query.is_local and not query.track.exists(): @@ -1116,7 +1110,7 @@ class MusicCache: player.channel.guild, ( f"{track.title} {track.author} {track.uri} " - f"{str(dataclasses.Query.process_input(track))}" + f"{str(audio_dataclasses.Query.process_input(track))}" ), ): log.debug( diff --git a/redbot/cogs/audio/audio.py b/redbot/cogs/audio/audio.py index e442b80af..970de7653 100644 --- a/redbot/cogs/audio/audio.py +++ b/redbot/cogs/audio/audio.py @@ -34,7 +34,7 @@ from redbot.core.utils.menus import ( start_adding_reactions, ) from redbot.core.utils.predicates import MessagePredicate, ReactionPredicate -from . import dataclasses +from . import audio_dataclasses from .apis import MusicCache, HAS_SQL, _ERROR from .checks import can_have_caching from .converters import ComplexScopeParser, ScopeParser, get_lazy_converter, get_playlist_converter @@ -142,7 +142,11 @@ class Audio(commands.Cog): self.play_lock = {} self._manager: Optional[ServerManager] = None - self.bot.dispatch("red_audio_initialized", self) + # These has to be a task since this requires the bot to be ready + # If it waits for ready in startup, we cause a deadlock during initial load + # as initial load happens before the bot can ever be ready. + self._init_task = self.bot.loop.create_task(self.initialize()) + self._ready_event = asyncio.Event() @property def owns_autoplay(self): @@ -166,9 +170,14 @@ class Audio(commands.Cog): self._cog_id = None async def cog_before_invoke(self, ctx: commands.Context): + await self._ready_event.wait() + # check for unsupported arch + # Check on this needs refactoring at a later date + # so that we have a better way to handle the tasks if self.llsetup in [ctx.command, ctx.command.root_parent]: pass - elif self._connect_task.cancelled(): + + elif self._connect_task and self._connect_task.cancelled(): await ctx.send( "You have attempted to run Audio's Lavalink server on an unsupported" " architecture. Only settings related commands will be available." @@ -176,6 +185,7 @@ class Audio(commands.Cog): raise RuntimeError( "Not running audio command due to invalid machine architecture for Lavalink." ) + dj_enabled = await self.config.guild(ctx.guild).dj_enabled() if dj_enabled: dj_role_obj = ctx.guild.get_role(await self.config.guild(ctx.guild).dj_role()) @@ -185,13 +195,13 @@ class Audio(commands.Cog): await self._embed_msg(ctx, _("No DJ role found. Disabling DJ mode.")) async def initialize(self): - pass_config_to_dependencies(self.config, self.bot, await self.config.localpath()) + await self.bot.wait_until_ready() + # Unlike most cases, we want the cache to exit before migration. await self.music_cache.initialize(self.config) - asyncio.ensure_future( - self._migrate_config( - from_version=await self.config.schema_version(), to_version=_SCHEMA_VERSION - ) + await self._migrate_config( + from_version=await self.config.schema_version(), to_version=_SCHEMA_VERSION ) + pass_config_to_dependencies(self.config, self.bot, await self.config.localpath()) self._restart_connect() self._disconnect_task = self.bot.loop.create_task(self.disconnect_timer()) lavalink.register_event_listener(self.event_handler) @@ -209,6 +219,9 @@ class Audio(commands.Cog): await self.bot.send_to_owners(page) log.critical(error_message) + self._ready_event.set() + self.bot.dispatch("red_audio_initialized", self) + async def _migrate_config(self, from_version: int, to_version: int): database_entries = [] time_now = str(datetime.datetime.now(datetime.timezone.utc)) @@ -253,7 +266,7 @@ class Audio(commands.Cog): cast(discord.Guild, discord.Object(id=guild_id)) ).clear_raw("playlists") if database_entries and HAS_SQL: - asyncio.ensure_future(self.music_cache.insert("lavalink", database_entries)) + await self.music_cache.insert("lavalink", database_entries) def _restart_connect(self): if self._connect_task: @@ -366,7 +379,9 @@ class Audio(commands.Cog): async def _players_check(): try: get_single_title = lavalink.active_players()[0].current.title - query = dataclasses.Query.process_input(lavalink.active_players()[0].current.uri) + query = audio_dataclasses.Query.process_input( + lavalink.active_players()[0].current.uri + ) if get_single_title == "Unknown title": get_single_title = lavalink.active_players()[0].current.uri if not get_single_title.startswith("http"): @@ -463,18 +478,18 @@ class Audio(commands.Cog): ) await notify_channel.send(embed=embed) - query = dataclasses.Query.process_input(player.current.uri) + query = audio_dataclasses.Query.process_input(player.current.uri) if query.is_local if player.current else False: if player.current.title != "Unknown title": description = "**{} - {}**\n{}".format( player.current.author, player.current.title, - dataclasses.LocalPath(player.current.uri).to_string_hidden(), + audio_dataclasses.LocalPath(player.current.uri).to_string_hidden(), ) else: description = "{}".format( - dataclasses.LocalPath(player.current.uri).to_string_hidden() + audio_dataclasses.LocalPath(player.current.uri).to_string_hidden() ) else: description = "**[{}]({})**".format(player.current.title, player.current.uri) @@ -532,9 +547,9 @@ class Audio(commands.Cog): message_channel = player.fetch("channel") if message_channel: message_channel = self.bot.get_channel(message_channel) - query = dataclasses.Query.process_input(player.current.uri) + query = audio_dataclasses.Query.process_input(player.current.uri) if player.current and query.is_local: - query = dataclasses.Query.process_input(player.current.uri) + query = audio_dataclasses.Query.process_input(player.current.uri) if player.current.title == "Unknown title": description = "{}".format(query.track.to_string_hidden()) else: @@ -590,7 +605,7 @@ class Audio(commands.Cog): player.store("channel", channel.id) player.store("guild", guild.id) await self._data_check(guild.me) - query = dataclasses.Query.process_input(query) + query = audio_dataclasses.Query.process_input(query) ctx = namedtuple("Context", "message") results, called_api = await self.music_cache.lavalink_query(ctx(guild), player, query) @@ -1094,7 +1109,7 @@ class Audio(commands.Cog): with contextlib.suppress(discord.HTTPException): await info.delete() return - temp = dataclasses.LocalPath(local_path, forced=True) + temp = audio_dataclasses.LocalPath(local_path, forced=True) if not temp.exists() or not temp.is_dir(): return await self._embed_msg( ctx, @@ -1536,7 +1551,7 @@ class Audio(commands.Cog): int((datetime.datetime.utcnow() - connect_start).total_seconds()) ) try: - query = dataclasses.Query.process_input(p.current.uri) + query = audio_dataclasses.Query.process_input(p.current.uri) if query.is_local: if p.current.title == "Unknown title": current_title = localtracks.LocalPath(p.current.uri).to_string_hidden() @@ -1606,9 +1621,9 @@ class Audio(commands.Cog): bump_song = player.queue[bump_index] player.queue.insert(0, bump_song) removed = player.queue.pop(index) - query = dataclasses.Query.process_input(removed.uri) + query = audio_dataclasses.Query.process_input(removed.uri) if query.is_local: - localtrack = dataclasses.LocalPath(removed.uri) + localtrack = audio_dataclasses.LocalPath(removed.uri) if removed.title != "Unknown title": description = "**{} - {}**\n{}".format( removed.author, removed.title, localtrack.to_string_hidden() @@ -1997,12 +2012,12 @@ class Audio(commands.Cog): await ctx.invoke(self.local_play, play_subfolders=play_subfolders) else: folder = folder.strip() - _dir = dataclasses.LocalPath.joinpath(folder) + _dir = audio_dataclasses.LocalPath.joinpath(folder) if not _dir.exists(): return await self._embed_msg( ctx, _("No localtracks folder named {name}.").format(name=folder) ) - query = dataclasses.Query.process_input(_dir, search_subfolders=play_subfolders) + query = audio_dataclasses.Query.process_input(_dir, search_subfolders=play_subfolders) await self._local_play_all(ctx, query, from_search=False if not folder else True) @local.command(name="play") @@ -2064,8 +2079,8 @@ class Audio(commands.Cog): all_tracks = await self._folder_list( ctx, ( - dataclasses.Query.process_input( - dataclasses.LocalPath( + audio_dataclasses.Query.process_input( + audio_dataclasses.LocalPath( await self.config.localpath() ).localtrack_folder.absolute(), search_subfolders=play_subfolders, @@ -2081,18 +2096,18 @@ class Audio(commands.Cog): return await ctx.invoke(self.search, query=search_list) async def _localtracks_folders(self, ctx: commands.Context, search_subfolders=False): - audio_data = dataclasses.LocalPath( - dataclasses.LocalPath(None).localtrack_folder.absolute() + audio_data = audio_dataclasses.LocalPath( + audio_dataclasses.LocalPath(None).localtrack_folder.absolute() ) if not await self._localtracks_check(ctx): return return audio_data.subfolders_in_tree() if search_subfolders else audio_data.subfolders() - async def _folder_list(self, ctx: commands.Context, query: dataclasses.Query): + async def _folder_list(self, ctx: commands.Context, query: audio_dataclasses.Query): if not await self._localtracks_check(ctx): return - query = dataclasses.Query.process_input(query) + query = audio_dataclasses.Query.process_input(query) if not query.track.exists(): return return ( @@ -2102,12 +2117,12 @@ class Audio(commands.Cog): ) async def _folder_tracks( - self, ctx, player: lavalink.player_manager.Player, query: dataclasses.Query + self, ctx, player: lavalink.player_manager.Player, query: audio_dataclasses.Query ): if not await self._localtracks_check(ctx): return - audio_data = dataclasses.LocalPath(None) + audio_data = audio_dataclasses.LocalPath(None) try: query.track.path.relative_to(audio_data.to_string()) except ValueError: @@ -2120,17 +2135,17 @@ class Audio(commands.Cog): return local_tracks async def _local_play_all( - self, ctx: commands.Context, query: dataclasses.Query, from_search=False + self, ctx: commands.Context, query: audio_dataclasses.Query, from_search=False ): if not await self._localtracks_check(ctx): return if from_search: - query = dataclasses.Query.process_input( + query = audio_dataclasses.Query.process_input( query.track.to_string(), invoked_from="local folder" ) await ctx.invoke(self.search, query=query) - async def _all_folder_tracks(self, ctx: commands.Context, query: dataclasses.Query): + async def _all_folder_tracks(self, ctx: commands.Context, query: audio_dataclasses.Query): if not await self._localtracks_check(ctx): return @@ -2141,7 +2156,7 @@ class Audio(commands.Cog): ) async def _localtracks_check(self, ctx: commands.Context): - folder = dataclasses.LocalPath(None) + folder = audio_dataclasses.LocalPath(None) if folder.localtrack_folder.exists(): return True if ctx.invoked_with != "start": @@ -2177,7 +2192,7 @@ class Audio(commands.Cog): dur = "LIVE" else: dur = lavalink.utils.format_time(player.current.length) - query = dataclasses.Query.process_input(player.current.uri) + query = audio_dataclasses.Query.process_input(player.current.uri) if query.is_local: if not player.current.title == "Unknown title": song = "**{track.author} - {track.title}**\n{uri}\n" @@ -2189,8 +2204,8 @@ class Audio(commands.Cog): song += "\n\n{arrow}`{pos}`/`{dur}`" song = song.format( track=player.current, - uri=dataclasses.LocalPath(player.current.uri).to_string_hidden() - if dataclasses.Query.process_input(player.current.uri).is_local + uri=audio_dataclasses.LocalPath(player.current.uri).to_string_hidden() + if audio_dataclasses.Query.process_input(player.current.uri).is_local else player.current.uri, arrow=arrow, pos=pos, @@ -2301,9 +2316,9 @@ class Audio(commands.Cog): if not player.current: return await self._embed_msg(ctx, _("Nothing playing.")) - query = dataclasses.Query.process_input(player.current.uri) + query = audio_dataclasses.Query.process_input(player.current.uri) if query.is_local: - query = dataclasses.Query.process_input(player.current.uri) + query = audio_dataclasses.Query.process_input(player.current.uri) if player.current.title == "Unknown title": description = "{}".format(query.track.to_string_hidden()) else: @@ -2436,7 +2451,7 @@ class Audio(commands.Cog): ) if not await self._currency_check(ctx, guild_data["jukebox_price"]): return - query = dataclasses.Query.process_input(query) + query = audio_dataclasses.Query.process_input(query) if not query.valid: return await self._embed_msg(ctx, _("No tracks to play.")) if query.is_spotify: @@ -2593,7 +2608,7 @@ class Audio(commands.Cog): ) playlists_search_page_list.append(embed) playlists_pick = await menu(ctx, playlists_search_page_list, playlist_search_controls) - query = dataclasses.Query.process_input(playlists_pick) + query = audio_dataclasses.Query.process_input(playlists_pick) if not query.valid: return await self._embed_msg(ctx, _("No tracks to play.")) if not await self._currency_check(ctx, guild_data["jukebox_price"]): @@ -2728,7 +2743,7 @@ class Audio(commands.Cog): elif player.current: await self._embed_msg(ctx, _("Adding a track to queue.")) - async def _get_spotify_tracks(self, ctx: commands.Context, query: dataclasses.Query): + async def _get_spotify_tracks(self, ctx: commands.Context, query: audio_dataclasses.Query): if ctx.invoked_with in ["play", "genre"]: enqueue_tracks = True else: @@ -2771,12 +2786,12 @@ class Audio(commands.Cog): self._play_lock(ctx, False) try: if enqueue_tracks: - new_query = dataclasses.Query.process_input(res[0]) + new_query = audio_dataclasses.Query.process_input(res[0]) new_query.start_time = query.start_time return await self._enqueue_tracks(ctx, new_query) else: result, called_api = await self.music_cache.lavalink_query( - ctx, player, dataclasses.Query.process_input(res[0]) + ctx, player, audio_dataclasses.Query.process_input(res[0]) ) tracks = result.tracks if not tracks: @@ -2808,7 +2823,9 @@ class Audio(commands.Cog): ctx, _("This doesn't seem to be a supported Spotify URL or code.") ) - async def _enqueue_tracks(self, ctx: commands.Context, query: Union[dataclasses.Query, list]): + async def _enqueue_tracks( + self, ctx: commands.Context, query: Union[audio_dataclasses.Query, list] + ): player = lavalink.get_player(ctx.guild.id) try: if self.play_lock[ctx.message.guild.id]: @@ -2863,7 +2880,7 @@ class Audio(commands.Cog): ctx.guild, ( f"{track.title} {track.author} {track.uri} " - f"{str(dataclasses.Query.process_input(track))}" + f"{str(audio_dataclasses.Query.process_input(track))}" ), ): log.debug(f"Query is not allowed in {ctx.guild} ({ctx.guild.id})") @@ -2923,7 +2940,7 @@ class Audio(commands.Cog): ctx.guild, ( f"{single_track.title} {single_track.author} {single_track.uri} " - f"{str(dataclasses.Query.process_input(single_track))}" + f"{str(audio_dataclasses.Query.process_input(single_track))}" ), ): log.debug(f"Query is not allowed in {ctx.guild} ({ctx.guild.id})") @@ -2956,17 +2973,17 @@ class Audio(commands.Cog): return await self._embed_msg( ctx, _("Nothing found. Check your Lavalink logs for details.") ) - query = dataclasses.Query.process_input(single_track.uri) + query = audio_dataclasses.Query.process_input(single_track.uri) if query.is_local: if single_track.title != "Unknown title": description = "**{} - {}**\n{}".format( single_track.author, single_track.title, - dataclasses.LocalPath(single_track.uri).to_string_hidden(), + audio_dataclasses.LocalPath(single_track.uri).to_string_hidden(), ) else: description = "{}".format( - dataclasses.LocalPath(single_track.uri).to_string_hidden() + audio_dataclasses.LocalPath(single_track.uri).to_string_hidden() ) else: description = "**[{}]({})**".format(single_track.title, single_track.uri) @@ -2987,7 +3004,11 @@ class Audio(commands.Cog): self._play_lock(ctx, False) async def _spotify_playlist( - self, ctx: commands.Context, stype: str, query: dataclasses.Query, enqueue: bool = False + self, + ctx: commands.Context, + stype: str, + query: audio_dataclasses.Query, + enqueue: bool = False, ): player = lavalink.get_player(ctx.guild.id) @@ -3340,7 +3361,7 @@ class Audio(commands.Cog): return player = lavalink.get_player(ctx.guild.id) to_append = await self._playlist_tracks( - ctx, player, dataclasses.Query.process_input(query) + ctx, player, audio_dataclasses.Query.process_input(query) ) if not to_append: return await self._embed_msg(ctx, _("Could not find a track matching your query.")) @@ -3993,7 +4014,7 @@ class Audio(commands.Cog): spaces = "\N{EN SPACE}" * (len(str(len(playlist.tracks))) + 2) for track in playlist.tracks: track_idx = track_idx + 1 - query = dataclasses.Query.process_input(track["info"]["uri"]) + query = audio_dataclasses.Query.process_input(track["info"]["uri"]) if query.is_local: if track["info"]["title"] != "Unknown title": msg += "`{}.` **{} - {}**\n{}{}\n".format( @@ -4398,7 +4419,7 @@ class Audio(commands.Cog): return player = lavalink.get_player(ctx.guild.id) tracklist = await self._playlist_tracks( - ctx, player, dataclasses.Query.process_input(playlist_url) + ctx, player, audio_dataclasses.Query.process_input(playlist_url) ) if tracklist is not None: playlist = await create_playlist( @@ -4488,14 +4509,14 @@ class Audio(commands.Cog): ctx.guild, ( f"{track.title} {track.author} {track.uri} " - f"{str(dataclasses.Query.process_input(track))}" + f"{str(audio_dataclasses.Query.process_input(track))}" ), ): log.debug(f"Query is not allowed in {ctx.guild} ({ctx.guild.id})") continue - query = dataclasses.Query.process_input(track.uri) + query = audio_dataclasses.Query.process_input(track.uri) if query.is_local: - local_path = dataclasses.LocalPath(track.uri) + local_path = audio_dataclasses.LocalPath(track.uri) if not await self._localtracks_check(ctx): pass if not local_path.exists() and not local_path.is_file(): @@ -4781,7 +4802,7 @@ class Audio(commands.Cog): or not match_yt_playlist(uploaded_playlist_url) or not ( await self.music_cache.lavalink_query( - ctx, player, dataclasses.Query.process_input(uploaded_playlist_url) + ctx, player, audio_dataclasses.Query.process_input(uploaded_playlist_url) ) )[0].tracks ): @@ -4966,7 +4987,7 @@ class Audio(commands.Cog): } ) if database_entries and HAS_SQL: - asyncio.ensure_future(self.music_cache.insert("lavalink", database_entries)) + await self.music_cache.insert("lavalink", database_entries) async def _load_v2_playlist( self, @@ -4993,7 +5014,7 @@ class Audio(commands.Cog): track_count += 1 try: result, called_api = await self.music_cache.lavalink_query( - ctx, player, dataclasses.Query.process_input(song_url) + ctx, player, audio_dataclasses.Query.process_input(song_url) ) track = result.tracks except Exception: @@ -5041,7 +5062,7 @@ class Audio(commands.Cog): return [], [], playlist results = {} updated_tracks = await self._playlist_tracks( - ctx, player, dataclasses.Query.process_input(playlist.url) + ctx, player, audio_dataclasses.Query.process_input(playlist.url) ) if not updated_tracks: # No Tracks available on url Lets set it to none to avoid repeated calls here @@ -5106,7 +5127,7 @@ class Audio(commands.Cog): self, ctx: commands.Context, player: lavalink.player_manager.Player, - query: dataclasses.Query, + query: audio_dataclasses.Query, ): search = query.is_search tracklist = [] @@ -5175,7 +5196,7 @@ class Audio(commands.Cog): player.queue.insert(0, bump_song) player.queue.pop(queue_len) await player.skip() - query = dataclasses.Query.process_input(player.current.uri) + query = audio_dataclasses.Query.process_input(player.current.uri) if query.is_local: if player.current.title == "Unknown title": @@ -5227,7 +5248,7 @@ class Audio(commands.Cog): else: dur = lavalink.utils.format_time(player.current.length) - query = dataclasses.Query.process_input(player.current) + query = audio_dataclasses.Query.process_input(player.current) if query.is_local: if player.current.title != "Unknown title": @@ -5240,8 +5261,8 @@ class Audio(commands.Cog): song += "\n\n{arrow}`{pos}`/`{dur}`" song = song.format( track=player.current, - uri=dataclasses.LocalPath(player.current.uri).to_string_hidden() - if dataclasses.Query.process_input(player.current.uri).is_local + uri=audio_dataclasses.LocalPath(player.current.uri).to_string_hidden() + if audio_dataclasses.Query.process_input(player.current.uri).is_local else player.current.uri, arrow=arrow, pos=pos, @@ -5313,7 +5334,7 @@ class Audio(commands.Cog): else: dur = lavalink.utils.format_time(player.current.length) - query = dataclasses.Query.process_input(player.current) + query = audio_dataclasses.Query.process_input(player.current) if query.is_stream: queue_list += _("**Currently livestreaming:**\n") @@ -5327,7 +5348,7 @@ class Audio(commands.Cog): ( _("Playing: ") + "**{current.author} - {current.title}**".format(current=player.current), - dataclasses.LocalPath(player.current.uri).to_string_hidden(), + audio_dataclasses.LocalPath(player.current.uri).to_string_hidden(), _("Requested by: **{user}**\n").format(user=player.current.requester), f"{arrow}`{pos}`/`{dur}`\n\n", ) @@ -5336,7 +5357,7 @@ class Audio(commands.Cog): queue_list += "\n".join( ( _("Playing: ") - + dataclasses.LocalPath(player.current.uri).to_string_hidden(), + + audio_dataclasses.LocalPath(player.current.uri).to_string_hidden(), _("Requested by: **{user}**\n").format(user=player.current.requester), f"{arrow}`{pos}`/`{dur}`\n\n", ) @@ -5357,13 +5378,13 @@ class Audio(commands.Cog): track_title = track.title req_user = track.requester track_idx = i + 1 - query = dataclasses.Query.process_input(track) + query = audio_dataclasses.Query.process_input(track) if query.is_local: if track.title == "Unknown title": queue_list += f"`{track_idx}.` " + ", ".join( ( - bold(dataclasses.LocalPath(track.uri).to_string_hidden()), + bold(audio_dataclasses.LocalPath(track.uri).to_string_hidden()), _("requested by **{user}**\n").format(user=req_user), ) ) @@ -5420,7 +5441,7 @@ class Audio(commands.Cog): for track in queue_list: queue_idx = queue_idx + 1 if not match_url(track.uri): - query = dataclasses.Query.process_input(track) + query = audio_dataclasses.Query.process_input(track) if track.title == "Unknown title": track_title = query.track.to_string_hidden() else: @@ -5449,7 +5470,7 @@ class Audio(commands.Cog): ): track_idx = i + 1 if type(track) is str: - track_location = dataclasses.LocalPath(track).to_string_hidden() + track_location = audio_dataclasses.LocalPath(track).to_string_hidden() track_match += "`{}.` **{}**\n".format(track_idx, track_location) else: track_match += "`{}.` **{}**\n".format(track[0], track[1]) @@ -5674,9 +5695,9 @@ class Audio(commands.Cog): ) index -= 1 removed = player.queue.pop(index) - query = dataclasses.Query.process_input(removed.uri) + query = audio_dataclasses.Query.process_input(removed.uri) if query.is_local: - local_path = dataclasses.LocalPath(removed.uri).to_string_hidden() + local_path = audio_dataclasses.LocalPath(removed.uri).to_string_hidden() if removed.title == "Unknown title": removed_title = local_path else: @@ -5762,7 +5783,7 @@ class Audio(commands.Cog): await self._data_check(ctx) if not isinstance(query, list): - query = dataclasses.Query.process_input(query) + query = audio_dataclasses.Query.process_input(query) if query.invoked_from == "search list" or query.invoked_from == "local folder": if query.invoked_from == "search list": result, called_api = await self.music_cache.lavalink_query(ctx, player, query) @@ -5791,7 +5812,7 @@ class Audio(commands.Cog): ctx.guild, ( f"{track.title} {track.author} {track.uri} " - f"{str(dataclasses.Query.process_input(track))}" + f"{str(audio_dataclasses.Query.process_input(track))}" ), ): log.debug(f"Query is not allowed in {ctx.guild} ({ctx.guild.id})") @@ -5905,10 +5926,10 @@ class Audio(commands.Cog): except IndexError: search_choice = tracks[-1] try: - query = dataclasses.Query.process_input(search_choice.uri) + query = audio_dataclasses.Query.process_input(search_choice.uri) if query.is_local: - localtrack = dataclasses.LocalPath(search_choice.uri) + localtrack = audio_dataclasses.LocalPath(search_choice.uri) if search_choice.title != "Unknown title": description = "**{} - {}**\n{}".format( search_choice.author, search_choice.title, localtrack.to_string_hidden() @@ -5919,7 +5940,7 @@ class Audio(commands.Cog): description = "**[{}]({})**".format(search_choice.title, search_choice.uri) except AttributeError: - search_choice = dataclasses.Query.process_input(search_choice) + search_choice = audio_dataclasses.Query.process_input(search_choice) if search_choice.track.exists() and search_choice.track.is_dir(): return await ctx.invoke(self.search, query=search_choice) elif search_choice.track.exists() and search_choice.track.is_file(): @@ -5935,7 +5956,7 @@ class Audio(commands.Cog): ctx.guild, ( f"{search_choice.title} {search_choice.author} {search_choice.uri} " - f"{str(dataclasses.Query.process_input(search_choice))}" + f"{str(audio_dataclasses.Query.process_input(search_choice))}" ), ): log.debug(f"Query is not allowed in {ctx.guild} ({ctx.guild.id})") @@ -5984,12 +6005,12 @@ class Audio(commands.Cog): if search_track_num == 0: search_track_num = 5 try: - query = dataclasses.Query.process_input(track.uri) + query = audio_dataclasses.Query.process_input(track.uri) if query.is_local: search_list += "`{0}.` **{1}**\n[{2}]\n".format( search_track_num, track.title, - dataclasses.LocalPath(track.uri).to_string_hidden(), + audio_dataclasses.LocalPath(track.uri).to_string_hidden(), ) else: search_list += "`{0}.` **[{1}]({2})**\n".format( @@ -5997,7 +6018,7 @@ class Audio(commands.Cog): ) except AttributeError: # query = Query.process_input(track) - track = dataclasses.Query.process_input(track) + track = audio_dataclasses.Query.process_input(track) if track.is_local and command != "search": search_list += "`{}.` **{}**\n".format( search_track_num, track.to_string_user() @@ -6890,6 +6911,7 @@ class Audio(commands.Cog): async def on_voice_state_update( self, member: discord.Member, before: discord.VoiceState, after: discord.VoiceState ): + await self._ready_event.wait() if after.channel != before.channel: try: self.skip_votes[before.channel.guild].remove(member.id) @@ -6907,6 +6929,9 @@ class Audio(commands.Cog): if self._connect_task: self._connect_task.cancel() + if self._init_task: + self._init_task.cancel() + lavalink.unregister_event_listener(self.event_handler) self.bot.loop.create_task(lavalink.close()) if self._manager is not None: diff --git a/redbot/cogs/audio/dataclasses.py b/redbot/cogs/audio/audio_dataclasses.py similarity index 100% rename from redbot/cogs/audio/dataclasses.py rename to redbot/cogs/audio/audio_dataclasses.py diff --git a/redbot/cogs/audio/utils.py b/redbot/cogs/audio/utils.py index 3f0d9972a..682d36d50 100644 --- a/redbot/cogs/audio/utils.py +++ b/redbot/cogs/audio/utils.py @@ -3,7 +3,6 @@ import contextlib import os import re import time -from typing import NoReturn from urllib.parse import urlparse import discord @@ -11,7 +10,7 @@ import lavalink from redbot.core import Config, commands from redbot.core.bot import Red -from . import dataclasses +from . import audio_dataclasses from .converters import _pass_config_to_converters @@ -51,7 +50,7 @@ def pass_config_to_dependencies(config: Config, bot: Red, localtracks_folder: st _config = config _pass_config_to_playlist(config, bot) _pass_config_to_converters(config, bot) - dataclasses._pass_config_to_dataclasses(config, bot, localtracks_folder) + audio_dataclasses._pass_config_to_dataclasses(config, bot, localtracks_folder) def track_limit(track, maxlength): @@ -168,7 +167,7 @@ async def clear_react(bot: Red, message: discord.Message, emoji: dict = None): async def get_description(track): if any(x in track.uri for x in [f"{os.sep}localtracks", f"localtracks{os.sep}"]): - local_track = dataclasses.LocalPath(track.uri) + local_track = audio_dataclasses.LocalPath(track.uri) if track.title != "Unknown title": return "**{} - {}**\n{}".format( track.author, track.title, local_track.to_string_hidden() @@ -389,7 +388,7 @@ class Notifier: key: str = None, seconds_key: str = None, seconds: str = None, - ) -> NoReturn: + ): """ This updates an existing message. Based on the message found in :variable:`Notifier.updates` as per the `key` param @@ -410,14 +409,14 @@ class Notifier: except discord.errors.NotFound: pass - async def update_text(self, text: str) -> NoReturn: + async def update_text(self, text: str): embed2 = discord.Embed(colour=self.color, title=text) try: await self.message.edit(embed=embed2) except discord.errors.NotFound: pass - async def update_embed(self, embed: discord.Embed) -> NoReturn: + async def update_embed(self, embed: discord.Embed): try: await self.message.edit(embed=embed) self.last_msg_time = time.time() diff --git a/redbot/core/bot.py b/redbot/core/bot.py index a7becdf73..d2d26cd70 100644 --- a/redbot/core/bot.py +++ b/redbot/core/bot.py @@ -132,7 +132,6 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d self._main_dir = bot_dir self._cog_mgr = CogManager() - super().__init__(*args, help_command=None, **kwargs) # Do not manually use the help formatter attribute here, see `send_help_for`, # for a documented API. The internals of this object are still subject to change. @@ -325,6 +324,7 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d get_embed_colour = get_embed_color + # start config migrations async def _maybe_update_config(self): """ This should be run prior to loading cogs or connecting to discord. @@ -375,6 +375,57 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d await self._config.guild(guild_obj).admin_role.set(admin_roles) log.info("Done updating guild configs to support multiple mod/admin roles") + # end Config migrations + + async def pre_flight(self, cli_flags): + """ + This should only be run once, prior to connecting to discord. + """ + await self._maybe_update_config() + + packages = [] + + if cli_flags.no_cogs is False: + packages.extend(await self._config.packages()) + + if cli_flags.load_cogs: + packages.extend(cli_flags.load_cogs) + + if packages: + # Load permissions first, for security reasons + try: + packages.remove("permissions") + except ValueError: + pass + else: + packages.insert(0, "permissions") + + to_remove = [] + print("Loading packages...") + for package in packages: + try: + spec = await self._cog_mgr.find_cog(package) + await asyncio.wait_for(self.load_extension(spec), 30) + except asyncio.TimeoutError: + log.exception("Failed to load package %s (timeout)", package) + to_remove.append(package) + except Exception as e: + log.exception("Failed to load package {}".format(package), exc_info=e) + await self.remove_loaded_package(package) + to_remove.append(package) + for package in to_remove: + packages.remove(package) + if packages: + print("Loaded packages: " + ", ".join(packages)) + + if self.rpc_enabled: + await self.rpc.initialize(self.rpc_port) + + async def start(self, *args, **kwargs): + cli_flags = kwargs.pop("cli_flags") + await self.pre_flight(cli_flags=cli_flags) + return await super().start(*args, **kwargs) + async def send_help_for( self, ctx: commands.Context, help_for: Union[commands.Command, commands.GroupMixin, str] ): diff --git a/redbot/core/events.py b/redbot/core/events.py index 20970e670..5fd418d36 100644 --- a/redbot/core/events.py +++ b/redbot/core/events.py @@ -46,40 +46,6 @@ def init_events(bot, cli_flags): return bot._uptime = datetime.datetime.utcnow() - packages = [] - - if cli_flags.no_cogs is False: - packages.extend(await bot._config.packages()) - - if cli_flags.load_cogs: - packages.extend(cli_flags.load_cogs) - - if packages: - # Load permissions first, for security reasons - try: - packages.remove("permissions") - except ValueError: - pass - else: - packages.insert(0, "permissions") - - to_remove = [] - print("Loading packages...") - for package in packages: - try: - spec = await bot._cog_mgr.find_cog(package) - await bot.load_extension(spec) - except Exception as e: - log.exception("Failed to load package {}".format(package), exc_info=e) - await bot.remove_loaded_package(package) - to_remove.append(package) - for package in to_remove: - packages.remove(package) - if packages: - print("Loaded packages: " + ", ".join(packages)) - - if bot.rpc_enabled: - await bot.rpc.initialize(bot.rpc_port) guilds = len(bot.guilds) users = len(set([m for m in bot.get_all_members()])) From 710b520da9e5b94c07b06de0bb5a2f9f92bbfd65 Mon Sep 17 00:00:00 2001 From: flaree <31554168+flaree@users.noreply.github.com> Date: Sat, 9 Nov 2019 19:58:10 +0000 Subject: [PATCH 16/41] [Docs] get_shared_api_keys -> get_shared_api_tokens (#3110) * keys -> tokens * Changelog --- changelog.d/3110.docs.rst | 1 + docs/framework_apikeys.rst | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 changelog.d/3110.docs.rst diff --git a/changelog.d/3110.docs.rst b/changelog.d/3110.docs.rst new file mode 100644 index 000000000..676ce6461 --- /dev/null +++ b/changelog.d/3110.docs.rst @@ -0,0 +1 @@ +Update apikey framework documentation. Change bot.get_shared_api_keys() to bot.get_shared_api_tokens(). \ No newline at end of file diff --git a/docs/framework_apikeys.rst b/docs/framework_apikeys.rst index bfaadac40..3d68854cd 100644 --- a/docs/framework_apikeys.rst +++ b/docs/framework_apikeys.rst @@ -18,7 +18,7 @@ and when accessed in the code it should be done by .. code-block:: python - await self.bot.get_shared_api_keys("twitch") + await self.bot.get_shared_api_tokens("twitch") Each service has its own dict of key, value pairs for each required key type. If there's only one key required then a name for the key is still required for storing and accessing. @@ -30,7 +30,7 @@ and when accessed in the code it should be done by .. code-block:: python - await self.bot.get_shared_api_keys("youtube") + await self.bot.get_shared_api_tokens("youtube") *********** @@ -42,7 +42,7 @@ Basic Usage class MyCog: @commands.command() async def youtube(self, ctx, user: str): - youtube_keys = await self.bot.get_shared_api_keys("youtube") + youtube_keys = await self.bot.get_shared_api_tokens("youtube") if youtube_keys.get("api_key") is None: return await ctx.send("The YouTube API key has not been set.") # Use the API key to access content as you normally would From 33178ef0347f59ff37b331694c233d9d7f828546 Mon Sep 17 00:00:00 2001 From: Draper <27962761+Drapersniper@users.noreply.github.com> Date: Mon, 11 Nov 2019 16:59:51 +0000 Subject: [PATCH 17/41] [Audio-3.2] Fix an issue with mixplaylist being recognised as single tracks (#3104) * Fix an issue with mixplaylist being recognised as single tracks Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> * Add changelogs Signed-off-by: Drapersniper <27962761+drapersniper@users.noreply.github.com> --- changelog.d/audio/3104.misc.1.rst | 1 + redbot/cogs/audio/audio_dataclasses.py | 9 ++++++--- 2 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 changelog.d/audio/3104.misc.1.rst diff --git a/changelog.d/audio/3104.misc.1.rst b/changelog.d/audio/3104.misc.1.rst new file mode 100644 index 000000000..82855a786 --- /dev/null +++ b/changelog.d/audio/3104.misc.1.rst @@ -0,0 +1 @@ +Fix an issue where some YouTube playlists were being recognised as single tracks. \ No newline at end of file diff --git a/redbot/cogs/audio/audio_dataclasses.py b/redbot/cogs/audio/audio_dataclasses.py index eee695aa5..16dbc27fb 100644 --- a/redbot/cogs/audio/audio_dataclasses.py +++ b/redbot/cogs/audio/audio_dataclasses.py @@ -381,14 +381,17 @@ class Query: match = re.search(_re_youtube_index, track) if match: returning["track_index"] = int(match.group(1)) - 1 - if all(k in track for k in ["&list=", "watch?"]): returning["track_index"] = 0 returning["playlist"] = True returning["single"] = False elif all(x in track for x in ["playlist?"]): - returning["playlist"] = True if not _has_index else False - returning["single"] = True if _has_index else False + returning["playlist"] = not _has_index + returning["single"] = _has_index + elif any(k in track for k in ["list="]): + returning["track_index"] = 0 + returning["playlist"] = True + returning["single"] = False else: returning["single"] = True elif url_domain == "spotify.com": From 6aeca83c63959f85e30fe512acfaaad777465794 Mon Sep 17 00:00:00 2001 From: Vexed <51716387+Vexed01@users.noreply.github.com> Date: Thu, 14 Nov 2019 18:04:00 +0000 Subject: [PATCH 18/41] Increased clarity of wording in info command (#3121) * make branch + preliminary code * correction * towncrier * sinbad's changes --- changelog.d/3121.enhance.rst | 1 + redbot/core/core_commands.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3121.enhance.rst diff --git a/changelog.d/3121.enhance.rst b/changelog.d/3121.enhance.rst new file mode 100644 index 000000000..ee92455a5 --- /dev/null +++ b/changelog.d/3121.enhance.rst @@ -0,0 +1 @@ +Change ``[p]info`` to say "This bot is an..." instead of "This is an..." for clarity. diff --git a/redbot/core/core_commands.py b/redbot/core/core_commands.py index 98344c772..218db18b1 100644 --- a/redbot/core/core_commands.py +++ b/redbot/core/core_commands.py @@ -293,7 +293,7 @@ class Core(commands.Cog, CoreLogic): data = await r.json() outdated = VersionInfo.from_str(data["info"]["version"]) > red_version_info about = _( - "This is an instance of [Red, an open source Discord bot]({}) " + "This bot is an instance of [Red, an open source Discord bot]({}) " "created by [Twentysix]({}) and [improved by many]({}).\n\n" "Red is backed by a passionate community who contributes and " "creates content for everyone to enjoy. [Join us today]({}) " From 19e8e60a4dcb0c1676939123ce712478a5b87a60 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Thu, 14 Nov 2019 19:05:48 +0100 Subject: [PATCH 19/41] [Audio] Stop player before destroying on emptydisconnect (#3119) * fix(audio): stop player before disconnect in emptydisconnect * chore(changelog): add towncrier entry --- changelog.d/audio/3050.bugfix.rst | 1 + redbot/cogs/audio/audio.py | 4 +++- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog.d/audio/3050.bugfix.rst diff --git a/changelog.d/audio/3050.bugfix.rst b/changelog.d/audio/3050.bugfix.rst new file mode 100644 index 000000000..79a5b8e67 --- /dev/null +++ b/changelog.d/audio/3050.bugfix.rst @@ -0,0 +1 @@ +Bot's status is now properly cleared on emptydisconnect. \ No newline at end of file diff --git a/redbot/cogs/audio/audio.py b/redbot/cogs/audio/audio.py index 970de7653..bdeb6eb59 100644 --- a/redbot/cogs/audio/audio.py +++ b/redbot/cogs/audio/audio.py @@ -6740,7 +6740,9 @@ class Audio(commands.Cog): if (time.time() - stop_times[sid]) >= emptydc_timer: stop_times.pop(sid) try: - await lavalink.get_player(sid).disconnect() + player = lavalink.get_player(sid) + await player.stop() + await player.disconnect() except Exception: log.error("Exception raised in Audio's emptydc_timer.", exc_info=True) pass From 548a50b9844053e224bc6d93ab392c7530ef4be5 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Fri, 15 Nov 2019 22:28:17 +0100 Subject: [PATCH 20/41] [Docs] Add information about ``info.json``'s ``min_python_version`` key in Downloader Framework page. (#3125) * docs(downloader): add missing `min_python_version` key * chore(changelog): add towncrier entry --- changelog.d/3124.docs.rst | 1 + docs/framework_downloader.rst | 3 +++ 2 files changed, 4 insertions(+) create mode 100644 changelog.d/3124.docs.rst diff --git a/changelog.d/3124.docs.rst b/changelog.d/3124.docs.rst new file mode 100644 index 000000000..3eb9e27a9 --- /dev/null +++ b/changelog.d/3124.docs.rst @@ -0,0 +1 @@ +Add information about ``info.json``'s ``min_python_version`` key in Downloader Framework docs. \ No newline at end of file diff --git a/docs/framework_downloader.rst b/docs/framework_downloader.rst index e7b3de700..c1feb9ed9 100644 --- a/docs/framework_downloader.rst +++ b/docs/framework_downloader.rst @@ -35,6 +35,9 @@ Keys specific to the cog info.json (case sensitive) - ``max_bot_version`` (string) - Max version number of Red in the format ``MAJOR.MINOR.MICRO``, if ``min_bot_version`` is newer than ``max_bot_version``, ``max_bot_version`` will be ignored +- ``min_python_version`` (list of integers) - Min version number of Python + in the format ``[MAJOR, MINOR, PATCH]`` + - ``hidden`` (bool) - Determines if a cog is visible in the cog list for a repo. - ``disabled`` (bool) - Determines if a cog is available for install. From 8a90996b36f264f98a52671beda2bdd42487080d Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 17 Nov 2019 16:25:15 +0100 Subject: [PATCH 21/41] [Downloader] Add `Repo.clean_url` and use it in `[p]findcog` (#3129) * enhance(downloader): add `Repo.clean_url` and use it in `[p]findcog` * chore(changelog): add towncrier entries --- changelog.d/downloader/3129.enhance.rst | 1 + changelog.d/downloader/3129.misc.rst | 1 + redbot/cogs/downloader/downloader.py | 2 +- redbot/cogs/downloader/repo_manager.py | 8 ++++++++ 4 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 changelog.d/downloader/3129.enhance.rst create mode 100644 changelog.d/downloader/3129.misc.rst diff --git a/changelog.d/downloader/3129.enhance.rst b/changelog.d/downloader/3129.enhance.rst new file mode 100644 index 000000000..21b5b8394 --- /dev/null +++ b/changelog.d/downloader/3129.enhance.rst @@ -0,0 +1 @@ +Use sanitized url (without HTTP Basic Auth fragments) in `[p]findcog` command. \ No newline at end of file diff --git a/changelog.d/downloader/3129.misc.rst b/changelog.d/downloader/3129.misc.rst new file mode 100644 index 000000000..685ee9daa --- /dev/null +++ b/changelog.d/downloader/3129.misc.rst @@ -0,0 +1 @@ +Add `clean_url` property to :class:`redbot.cogs.downloader.repo_manager.Repo` which contains sanitized repo URL (without HTTP Basic Auth). \ No newline at end of file diff --git a/redbot/cogs/downloader/downloader.py b/redbot/cogs/downloader/downloader.py index be682431a..b12ac7ae2 100644 --- a/redbot/cogs/downloader/downloader.py +++ b/redbot/cogs/downloader/downloader.py @@ -1150,7 +1150,7 @@ class Downloader(commands.Cog): repo_url = ( _("Missing from installed repos") if cog_installable.repo is None - else cog_installable.repo.url + else cog_installable.repo.clean_url ) cog_name = cog_installable.name else: diff --git a/redbot/cogs/downloader/repo_manager.py b/redbot/cogs/downloader/repo_manager.py index 39ee18dce..276e59114 100644 --- a/redbot/cogs/downloader/repo_manager.py +++ b/redbot/cogs/downloader/repo_manager.py @@ -7,6 +7,7 @@ import pkgutil import shlex import shutil import re +import yarl from concurrent.futures import ThreadPoolExecutor from pathlib import Path from subprocess import run as sp_run, PIPE, CompletedProcess @@ -147,6 +148,13 @@ class Repo(RepoJSONMixin): self._loop = loop if loop is not None else asyncio.get_event_loop() + @property + def clean_url(self): + """Sanitized repo URL (with removed HTTP Basic Auth)""" + url = yarl.URL(self.url) + clean_url = url.with_user(None) + return clean_url + @classmethod async def convert(cls, ctx: commands.Context, argument: str) -> Repo: downloader_cog = ctx.bot.get_cog("Downloader") From 5a7c36c5813221f682e1f70575505398ee88132a Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 17 Nov 2019 16:35:48 +0100 Subject: [PATCH 22/41] chore(changelog): fix wrong references in changelog entries for Downloader (#3130) --- changelog.d/downloader/2527.docs.rst | 2 +- changelog.d/downloader/2527.misc.1.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/changelog.d/downloader/2527.docs.rst b/changelog.d/downloader/2527.docs.rst index 3b4b47f61..21e5bc6b9 100644 --- a/changelog.d/downloader/2527.docs.rst +++ b/changelog.d/downloader/2527.docs.rst @@ -1 +1 @@ -Added :func:`redbot.cogs.downloader.repo_manager.InstalledModule` to Downloader's framework docs. \ No newline at end of file +Added :func:`redbot.cogs.downloader.installable.InstalledModule` to Downloader's framework docs. \ No newline at end of file diff --git a/changelog.d/downloader/2527.misc.1.rst b/changelog.d/downloader/2527.misc.1.rst index 8b1d6db9e..f7c3143f9 100644 --- a/changelog.d/downloader/2527.misc.1.rst +++ b/changelog.d/downloader/2527.misc.1.rst @@ -1,4 +1,4 @@ Added :func:`redbot.cogs.downloader.installable.InstalledModule` which is used instead of :func:`redbot.cogs.downloader.installable.Installable` when we refer to installed cog or shared library. Therefore: - ``to_json`` and ``from_json`` methods were moved from :func:`redbot.cogs.downloader.installable.Installable` to :func:`redbot.cogs.downloader.installable.InstalledModule` - - return types changed for :func:`redbot.cogs.converter.InstalledCog.convert`, :func:`redbot.cogs.downloader.Downloader.installed_cogs`, :func:`redbot.cogs.downloader.Repo.install_cog` to use :func:`redbot.cogs.downloader.installable.InstalledModule`. \ No newline at end of file + - return types changed for :func:`redbot.cogs.downloader.converters.InstalledCog.convert`, :func:`redbot.cogs.downloader.downloader.Downloader.installed_cogs`, :func:`redbot.cogs.downloader.repo_manager.Repo.install_cog` to use :func:`redbot.cogs.downloader.installable.InstalledModule`. \ No newline at end of file From 141b48d3cf1419decdf2e2e2ea58cc76a5356e04 Mon Sep 17 00:00:00 2001 From: Michael H Date: Sun, 17 Nov 2019 11:00:26 -0500 Subject: [PATCH 23/41] Add .codeclimate.yml (#3131) - This is still not ready to be used as a PR check - Can be used to get an idea of where we can look to clean up code --- .codeclimate.yml | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) create mode 100644 .codeclimate.yml diff --git a/.codeclimate.yml b/.codeclimate.yml new file mode 100644 index 000000000..af8faac04 --- /dev/null +++ b/.codeclimate.yml @@ -0,0 +1,43 @@ +version: "2" # required to adjust maintainability checks +checks: + argument-count: + config: + threshold: 6 + complex-logic: + enabled: false # Disabled in favor of using Radon for this + config: + threshold: 4 + file-lines: + config: + threshold: 1000 # I would set this lower if not for cogs as command containers. + method-complexity: + enabled: false # Disabled in favor of using Radon for this + config: + threshold: 5 + method-count: + enabled: false # I would set this lower if not for cogs as command containers. + threshold: 20 + method-lines: + enabled: false + config: + threshold: 25 # I'm fine with long methods, cautious about the complexity of a single method. + nested-control-flow: + config: + threshold: 4 + return-statements: + config: + threshold: 6 + similar-code: + enabled: false + config: + threshold: # language-specific defaults. an override will affect all languages. + identical-code: + config: + threshold: # language-specific defaults. an override will affect all languages. +plugins: + bandit: + enabled: true + radon: + enabled: true + config: + threshold: "D" \ No newline at end of file From 51298f156bcdb745c6146b75ae9e08dc824340e9 Mon Sep 17 00:00:00 2001 From: Michael H Date: Sun, 17 Nov 2019 11:08:30 -0500 Subject: [PATCH 24/41] pt2 (#3132) --- .codeclimate.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.codeclimate.yml b/.codeclimate.yml index af8faac04..fba462070 100644 --- a/.codeclimate.yml +++ b/.codeclimate.yml @@ -16,6 +16,7 @@ checks: threshold: 5 method-count: enabled: false # I would set this lower if not for cogs as command containers. + config: threshold: 20 method-lines: enabled: false @@ -40,4 +41,4 @@ plugins: radon: enabled: true config: - threshold: "D" \ No newline at end of file + threshold: "D" From ddfabb0c0e70210e6cfc2fd6d6c607f0bacb602a Mon Sep 17 00:00:00 2001 From: Michael H Date: Mon, 18 Nov 2019 23:45:32 -0500 Subject: [PATCH 25/41] Changes from 3.1.8 (#3139) --- redbot/__init__.py | 2 +- setup.cfg | 6 ++---- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/redbot/__init__.py b/redbot/__init__.py index a7982024c..2a5c0a7e7 100644 --- a/redbot/__init__.py +++ b/redbot/__init__.py @@ -192,7 +192,7 @@ def _update_event_loop_policy(): _asyncio.set_event_loop_policy(_uvloop.EventLoopPolicy()) -__version__ = "3.1.7" +__version__ = "3.1.8" version_info = VersionInfo.from_str(__version__) # Filter fuzzywuzzy slow sequence matcher warning diff --git a/setup.cfg b/setup.cfg index 65b4a2635..2e5c828d5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -38,7 +38,7 @@ install_requires = colorama==0.4.1 contextlib2==0.5.5 databases[sqlite]==0.2.5 - discord.py==1.2.4 + discord.py==1.2.5 distro==1.4.0; sys_platform == "linux" fuzzywuzzy==0.17.0 idna==2.8 @@ -49,9 +49,7 @@ install_requires = Red-Lavalink==0.4.0 schema==0.7.0 tqdm==4.35.0 - # Below is due to an issue with uvloop < 0.14 with python 3.8, move both to 0.14 at full release - uvloop==0.13.0; sys_platform != "win32" and platform_python_implementation == "CPython" and python_version<"3.8" - uvloop==0.14.0rc2; sys_platform != "win32" and platform_python_implementation == "CPython" and python_version>="3.8" + uvloop==0.14.0; sys_platform != "win32" and platform_python_implementation == "CPython" websockets==6.0 yarl==1.3.0 From 4b62598a3d56185ef0fe5119e58d2d5611f6e432 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Tue, 19 Nov 2019 19:14:22 +0100 Subject: [PATCH 26/41] [Downloader] Make `Repo.clean_url` work with relative urls. (#3142) * fix(downloader): return string, catch ValueError for relative urls * chore(changelog): add towncrier entry --- changelog.d/downloader/3141.bugfix.rst | 1 + redbot/cogs/downloader/repo_manager.py | 8 +++++--- 2 files changed, 6 insertions(+), 3 deletions(-) create mode 100644 changelog.d/downloader/3141.bugfix.rst diff --git a/changelog.d/downloader/3141.bugfix.rst b/changelog.d/downloader/3141.bugfix.rst new file mode 100644 index 000000000..c329da021 --- /dev/null +++ b/changelog.d/downloader/3141.bugfix.rst @@ -0,0 +1 @@ +Make :attr:`redbot.cogs.downloader.repo_manager.Repo.clean_url` work with relative urls. This property uses `str` type now. \ No newline at end of file diff --git a/redbot/cogs/downloader/repo_manager.py b/redbot/cogs/downloader/repo_manager.py index 276e59114..255ca8d67 100644 --- a/redbot/cogs/downloader/repo_manager.py +++ b/redbot/cogs/downloader/repo_manager.py @@ -149,11 +149,13 @@ class Repo(RepoJSONMixin): self._loop = loop if loop is not None else asyncio.get_event_loop() @property - def clean_url(self): + def clean_url(self) -> str: """Sanitized repo URL (with removed HTTP Basic Auth)""" url = yarl.URL(self.url) - clean_url = url.with_user(None) - return clean_url + try: + return url.with_user(None).human_repr() + except ValueError: + return self.url @classmethod async def convert(cls, ctx: commands.Context, argument: str) -> Repo: From a3140b665960b5847595973788f76dc4b170d4fa Mon Sep 17 00:00:00 2001 From: kennnyshiwa <44236678+kennnyshiwa@users.noreply.github.com> Date: Fri, 22 Nov 2019 18:53:42 -0500 Subject: [PATCH 27/41] [audio] adds typing indicator to playlist dedupe (#3058) * [audio] adds typing indicator to playlist dedupe * [audio] not sure what happened here lol * [audio] forgot the return * add changelog * [audio] fix for black --- changelog.d/audio/3058.enhancement.rst | 1 + redbot/cogs/audio/audio.py | 155 +++++++++++++------------ 2 files changed, 80 insertions(+), 76 deletions(-) create mode 100644 changelog.d/audio/3058.enhancement.rst diff --git a/changelog.d/audio/3058.enhancement.rst b/changelog.d/audio/3058.enhancement.rst new file mode 100644 index 000000000..37981047b --- /dev/null +++ b/changelog.d/audio/3058.enhancement.rst @@ -0,0 +1 @@ +Add typing indicator to playlist dedupe \ No newline at end of file diff --git a/redbot/cogs/audio/audio.py b/redbot/cogs/audio/audio.py index bdeb6eb59..202edd3ef 100644 --- a/redbot/cogs/audio/audio.py +++ b/redbot/cogs/audio/audio.py @@ -3737,89 +3737,92 @@ class Audio(commands.Cog): ​ ​ ​ ​ [p]playlist dedupe MyGlobalPlaylist --scope Global ​ ​ ​ ​ [p]playlist dedupe MyPersonalPlaylist --scope User """ - if scope_data is None: - scope_data = [PlaylistScope.GUILD.value, ctx.author, ctx.guild, False] - scope, author, guild, specified_user = scope_data - scope_name = humanize_scope( - scope, ctx=guild if scope == PlaylistScope.GUILD.value else author - ) - - try: - playlist_id, playlist_arg = await self._get_correct_playlist_id( - ctx, playlist_matches, scope, author, guild, specified_user - ) - except TooManyMatches as e: - return await self._embed_msg(ctx, str(e)) - if playlist_id is None: - return await self._embed_msg( - ctx, _("Could not match '{arg}' to a playlist.").format(arg=playlist_arg) + async with ctx.typing(): + if scope_data is None: + scope_data = [PlaylistScope.GUILD.value, ctx.author, ctx.guild, False] + scope, author, guild, specified_user = scope_data + scope_name = humanize_scope( + scope, ctx=guild if scope == PlaylistScope.GUILD.value else author ) - try: - playlist = await get_playlist(playlist_id, scope, self.bot, guild, author) - except RuntimeError: - return await self._embed_msg( - ctx, - _("Playlist {id} does not exist in {scope} scope.").format( - id=playlist_id, scope=humanize_scope(scope, the=True) - ), - ) - except MissingGuild: - return await self._embed_msg( - ctx, _("You need to specify the Guild ID for the guild to lookup.") - ) + try: + playlist_id, playlist_arg = await self._get_correct_playlist_id( + ctx, playlist_matches, scope, author, guild, specified_user + ) + except TooManyMatches as e: + return await self._embed_msg(ctx, str(e)) + if playlist_id is None: + return await self._embed_msg( + ctx, _("Could not match '{arg}' to a playlist.").format(arg=playlist_arg) + ) - if not await self.can_manage_playlist(scope, playlist, ctx, author, guild): - return + try: + playlist = await get_playlist(playlist_id, scope, self.bot, guild, author) + except RuntimeError: + return await self._embed_msg( + ctx, + _("Playlist {id} does not exist in {scope} scope.").format( + id=playlist_id, scope=humanize_scope(scope, the=True) + ), + ) + except MissingGuild: + return await self._embed_msg( + ctx, _("You need to specify the Guild ID for the guild to lookup.") + ) - track_objects = playlist.tracks_obj - original_count = len(track_objects) - unique_tracks = set() - unique_tracks_add = unique_tracks.add - track_objects = [ - x for x in track_objects if not (x in unique_tracks or unique_tracks_add(x)) - ] + if not await self.can_manage_playlist(scope, playlist, ctx, author, guild): + return - tracklist = [] - for track in track_objects: - track_keys = track._info.keys() - track_values = track._info.values() - track_id = track.track_identifier - track_info = {} - for k, v in zip(track_keys, track_values): - track_info[k] = v - keys = ["track", "info"] - values = [track_id, track_info] - track_obj = {} - for key, value in zip(keys, values): - track_obj[key] = value - tracklist.append(track_obj) + track_objects = playlist.tracks_obj + original_count = len(track_objects) + unique_tracks = set() + unique_tracks_add = unique_tracks.add + track_objects = [ + x for x in track_objects if not (x in unique_tracks or unique_tracks_add(x)) + ] - final_count = len(tracklist) - if original_count - final_count != 0: - update = {"tracks": tracklist, "url": None} - await playlist.edit(update) + tracklist = [] + for track in track_objects: + track_keys = track._info.keys() + track_values = track._info.values() + track_id = track.track_identifier + track_info = {} + for k, v in zip(track_keys, track_values): + track_info[k] = v + keys = ["track", "info"] + values = [track_id, track_info] + track_obj = {} + for key, value in zip(keys, values): + track_obj[key] = value + tracklist.append(track_obj) - if original_count - final_count != 0: - await self._embed_msg( - ctx, - _( - "Removed {track_diff} duplicated " - "tracks from {name} (`{id}`) [**{scope}**] playlist." - ).format( - name=playlist.name, - id=playlist.id, - track_diff=original_count - final_count, - scope=scope_name, - ), - ) - else: - await self._embed_msg( - ctx, - _("{name} (`{id}`) [**{scope}**] playlist has no duplicate tracks.").format( - name=playlist.name, id=playlist.id, scope=scope_name - ), - ) + final_count = len(tracklist) + if original_count - final_count != 0: + update = {"tracks": tracklist, "url": None} + await playlist.edit(update) + + if original_count - final_count != 0: + await self._embed_msg( + ctx, + _( + "Removed {track_diff} duplicated " + "tracks from {name} (`{id}`) [**{scope}**] playlist." + ).format( + name=playlist.name, + id=playlist.id, + track_diff=original_count - final_count, + scope=scope_name, + ), + ) + return + else: + await self._embed_msg( + ctx, + _("{name} (`{id}`) [**{scope}**] playlist has no duplicate tracks.").format( + name=playlist.name, id=playlist.id, scope=scope_name + ), + ) + return @checks.is_owner() @playlist.command(name="download", usage=" [v2=False] [args]") From 77742179c01bd820b6488ee3f7ce103c77c87614 Mon Sep 17 00:00:00 2001 From: PredaaA <46051820+PredaaA@users.noreply.github.com> Date: Sat, 23 Nov 2019 00:54:01 +0100 Subject: [PATCH 28/41] [Core] Add [p]listdisabled command (#3118) * Update core_commands.py * Create 3115.feature.rst * Rename 3115.feature.rst to 3118.feature.rst * Add a message if there's any disabled commands. * Use the same format as [p]command disable/enable * Make strings more i18n friendly. * Flame's requested changes. --- changelog.d/3118.feature.rst | 1 + redbot/core/core_commands.py | 56 +++++++++++++++++++++++++++++++++++- 2 files changed, 56 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3118.feature.rst diff --git a/changelog.d/3118.feature.rst b/changelog.d/3118.feature.rst new file mode 100644 index 000000000..dab83632a --- /dev/null +++ b/changelog.d/3118.feature.rst @@ -0,0 +1 @@ +Adds a command to list disabled commands globally or per guild. diff --git a/redbot/core/core_commands.py b/redbot/core/core_commands.py index 218db18b1..f0362b230 100644 --- a/redbot/core/core_commands.py +++ b/redbot/core/core_commands.py @@ -33,7 +33,14 @@ from . import ( ) from .utils import create_backup from .utils.predicates import MessagePredicate -from .utils.chat_formatting import humanize_timedelta, pagify, box, inline, humanize_list +from .utils.chat_formatting import ( + box, + humanize_list, + humanize_number, + humanize_timedelta, + inline, + pagify, +) from .commands.requires import PrivilegeLevel @@ -1877,6 +1884,53 @@ class Core(commands.Cog, CoreLogic): """Manage the bot's commands.""" pass + @command_manager.group(name="listdisabled", invoke_without_command=True) + async def list_disabled(self, ctx: commands.Context): + """ + List disabled commands. + + If you're the bot owner, this will show global disabled commands by default. + """ + # Select the scope based on the author's privileges + if await ctx.bot.is_owner(ctx.author): + await ctx.invoke(self.list_disabled_global) + else: + await ctx.invoke(self.list_disabled_guild) + + @list_disabled.command(name="global") + async def list_disabled_global(self, ctx: commands.Context): + """List disabled commands globally.""" + disabled_list = await self.bot._config.disabled_commands() + if not disabled_list: + return await ctx.send(_("There aren't any globally disabled commands.")) + + if len(disabled_list) > 1: + header = _("{} commands are disabled globally.\n").format( + humanize_number(len(disabled_list)) + ) + else: + header = _("1 command is disabled globally.\n") + paged = [box(x) for x in pagify(humanize_list(disabled_list), page_length=1000)] + paged[0] = header + paged[0] + await ctx.send_interactive(paged) + + @list_disabled.command(name="guild") + async def list_disabled_guild(self, ctx: commands.Context): + """List disabled commands in this server.""" + disabled_list = await self.bot._config.guild(ctx.guild).disabled_commands() + if not disabled_list: + return await ctx.send(_("There aren't any disabled commands in {}.").format(ctx.guild)) + + if len(disabled_list) > 1: + header = _("{} commands are disabled in {}.\n").format( + humanize_number(len(disabled_list)), ctx.guild + ) + else: + header = _("1 command is disabled in {}.\n").format(ctx.guild) + paged = [box(x) for x in pagify(humanize_list(disabled_list), page_length=1000)] + paged[0] = header + paged[0] + await ctx.send_interactive(paged) + @command_manager.group(name="disable", invoke_without_command=True) async def command_disable(self, ctx: commands.Context, *, command: str): """Disable a command. From ec834a06662ea8a2e939bc357dd6e60b81f553df Mon Sep 17 00:00:00 2001 From: Vexed <51716387+Vexed01@users.noreply.github.com> Date: Fri, 22 Nov 2019 23:54:39 +0000 Subject: [PATCH 29/41] Audio playlist capitalisation changes (#3048) * commit une * changelog * help me whi can i mot thnik * i can make changelogs correctly --- changelog.d/audio/3048.bugfix.rst | 1 + redbot/cogs/audio/audio.py | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) create mode 100644 changelog.d/audio/3048.bugfix.rst diff --git a/changelog.d/audio/3048.bugfix.rst b/changelog.d/audio/3048.bugfix.rst new file mode 100644 index 000000000..6a5353584 --- /dev/null +++ b/changelog.d/audio/3048.bugfix.rst @@ -0,0 +1 @@ +Unify capitalisation in ``[p]help playlist``. diff --git a/redbot/cogs/audio/audio.py b/redbot/cogs/audio/audio.py index 202edd3ef..0f148c3ae 100644 --- a/redbot/cogs/audio/audio.py +++ b/redbot/cogs/audio/audio.py @@ -3276,8 +3276,8 @@ class Audio(commands.Cog): ​ ​ ​ ​ ​ ​ ​ ​ Only editable by bot owner. ​ ​ ​ ​ **Guild**: ​ ​ ​ ​ ​ ​ ​ ​ Visible to all users in this guild. - ​ ​ ​ ​ ​ ​ ​ ​ Editable By Bot Owner, Guild Owner, Guild Admins, - ​ ​ ​ ​ ​ ​ ​ ​ Guild Mods, DJ Role and playlist creator. + ​ ​ ​ ​ ​ ​ ​ ​ Editable by bot owner, guild owner, guild admins, + ​ ​ ​ ​ ​ ​ ​ ​ guild mods, DJ role and playlist creator. ​ ​ ​ ​ **User**: ​ ​ ​ ​ ​ ​ ​ ​ Visible to all bot users, if --author is passed. ​ ​ ​ ​ ​ ​ ​ ​ Editable by bot owner and creator. From bc5c2513f6054a28479230521dd7d5d2350d5877 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sat, 23 Nov 2019 00:59:38 +0100 Subject: [PATCH 30/41] [Audio] Improve help string for `[p]audioset emptydisconnect` (#3051) * enhance(audio): improve help string for [p]audioset emptydisconnect * chore(changelog): add towncrier entry --- changelog.d/audio/3051.enhance.rst | 1 + redbot/cogs/audio/audio.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/audio/3051.enhance.rst diff --git a/changelog.d/audio/3051.enhance.rst b/changelog.d/audio/3051.enhance.rst new file mode 100644 index 000000000..9d41828f1 --- /dev/null +++ b/changelog.d/audio/3051.enhance.rst @@ -0,0 +1 @@ +Improved explanation in help string for ``[p]audioset emptydisconnect``. \ No newline at end of file diff --git a/redbot/cogs/audio/audio.py b/redbot/cogs/audio/audio.py index 0f148c3ae..07cbeb91a 100644 --- a/redbot/cogs/audio/audio.py +++ b/redbot/cogs/audio/audio.py @@ -1000,7 +1000,7 @@ class Audio(commands.Cog): @audioset.command() @checks.mod_or_permissions(administrator=True) async def emptydisconnect(self, ctx: commands.Context, seconds: int): - """Auto-disconnection after x seconds while stopped. 0 to disable.""" + """Auto-disconnect from channel when bot is alone in it for x seconds. 0 to disable.""" if seconds < 0: return await self._embed_msg(ctx, _("Can't be less than zero.")) if 10 > seconds > 0: From f0836d7182d99239d1fde24cf2231c6ebf206f72 Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sat, 23 Nov 2019 22:58:35 +0100 Subject: [PATCH 31/41] [Core] Dispatch `on_red_api_tokens_update` event on api keys update (#3146) * feat: dispatch `on_red_api_tokens_update` event on api keys update * docs: add event reference in Shared API Keys docs * chore(changelog): add tonwcrier entries * fix: wrap dispatched api tokens in MappingProxyType * docs: reflect change of type change to read-only Mapping --- changelog.d/3134.docs.rst | 1 + changelog.d/3134.feature.rst | 1 + docs/framework_apikeys.rst | 14 ++++++++++++++ redbot/core/bot.py | 2 ++ 4 files changed, 18 insertions(+) create mode 100644 changelog.d/3134.docs.rst create mode 100644 changelog.d/3134.feature.rst diff --git a/changelog.d/3134.docs.rst b/changelog.d/3134.docs.rst new file mode 100644 index 000000000..2b294cb65 --- /dev/null +++ b/changelog.d/3134.docs.rst @@ -0,0 +1 @@ +Add event reference for ``on_red_api_tokens_update`` event in Shared API Keys docs. \ No newline at end of file diff --git a/changelog.d/3134.feature.rst b/changelog.d/3134.feature.rst new file mode 100644 index 000000000..272a2c0b3 --- /dev/null +++ b/changelog.d/3134.feature.rst @@ -0,0 +1 @@ +New event ``on_red_api_tokens_update`` is now dispatched when shared api keys for the service are updated. \ No newline at end of file diff --git a/docs/framework_apikeys.rst b/docs/framework_apikeys.rst index 3d68854cd..b27d2e713 100644 --- a/docs/framework_apikeys.rst +++ b/docs/framework_apikeys.rst @@ -46,3 +46,17 @@ Basic Usage if youtube_keys.get("api_key") is None: return await ctx.send("The YouTube API key has not been set.") # Use the API key to access content as you normally would + + +*************** +Event Reference +*************** + +.. function:: on_red_api_tokens_update(service_name, api_tokens) + + Dispatched when service's api keys are updated. + + :param service_name: Name of the service. + :type service_name: :class:`str` + :param api_tokens: New Mapping of token names to tokens. This contains api tokens that weren't changed too. + :type api_tokens: Mapping[:class:`str`, :class:`str`] diff --git a/redbot/core/bot.py b/redbot/core/bot.py index d2d26cd70..18e57055d 100644 --- a/redbot/core/bot.py +++ b/redbot/core/bot.py @@ -8,6 +8,7 @@ from enum import Enum from importlib.machinery import ModuleSpec from pathlib import Path from typing import Optional, Union, List, Dict, NoReturn +from types import MappingProxyType import discord from discord.ext.commands import when_mentioned_or @@ -582,6 +583,7 @@ class RedBase(commands.GroupMixin, commands.bot.BotBase, RPCMixin): # pylint: d async with self._config.custom(SHARED_API_TOKENS, service_name).all() as group: group.update(tokens) + self.dispatch("red_api_tokens_update", service_name, MappingProxyType(group)) async def remove_shared_api_tokens(self, service_name: str, *token_names: str): """ From d07e718ab8ab67e991c4a820e7ad32cf0b05fd4f Mon Sep 17 00:00:00 2001 From: Flame442 <34169552+Flame442@users.noreply.github.com> Date: Mon, 2 Dec 2019 11:41:28 -0500 Subject: [PATCH 32/41] Fixes errors on repo add from empty string values for `install_msg` (#3153) * Fixes errors on repo add from empty string values for `install_msg` * Create 3153.bugfix.rst --- changelog.d/downloader/3153.bugfix.rst | 1 + redbot/cogs/downloader/downloader.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/downloader/3153.bugfix.rst diff --git a/changelog.d/downloader/3153.bugfix.rst b/changelog.d/downloader/3153.bugfix.rst new file mode 100644 index 000000000..fd8d74945 --- /dev/null +++ b/changelog.d/downloader/3153.bugfix.rst @@ -0,0 +1 @@ +Fixed an error on repo add from empty string values for the `install_msg` info.json field. diff --git a/redbot/cogs/downloader/downloader.py b/redbot/cogs/downloader/downloader.py index b12ac7ae2..094f33f3e 100644 --- a/redbot/cogs/downloader/downloader.py +++ b/redbot/cogs/downloader/downloader.py @@ -462,7 +462,7 @@ class Downloader(commands.Cog): ) else: await ctx.send(_("Repo `{name}` successfully added.").format(name=name)) - if repo.install_msg is not None: + if repo.install_msg: await ctx.send(repo.install_msg.replace("[p]", ctx.prefix)) @repo.command(name="delete", aliases=["remove", "del"], usage="") From 02d6b7d6588adc5fcc852c2383646a64bf221ada Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 8 Dec 2019 00:56:23 +0100 Subject: [PATCH 33/41] [Launcher] To make experience with launcher amazing /s (#3175) * Update launcher.py * Create 3174.bugfix.rst * Revert launcher.py * Update launcher.py * Update launcher.py --- changelog.d/3174.bugfix.rst | 1 + redbot/launcher.py | 7 ++++++- 2 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3174.bugfix.rst diff --git a/changelog.d/3174.bugfix.rst b/changelog.d/3174.bugfix.rst new file mode 100644 index 000000000..cd743eefe --- /dev/null +++ b/changelog.d/3174.bugfix.rst @@ -0,0 +1 @@ +``--owner`` and ``-p`` cli flags now work when added from launcher. diff --git a/redbot/launcher.py b/redbot/launcher.py index 481d2428b..0188d9da0 100644 --- a/redbot/launcher.py +++ b/redbot/launcher.py @@ -201,7 +201,12 @@ def cli_flag_getter(): choice = user_choice() if choice == "y": print("Done selecting cli flags") - return flags + # this is safe only because arguments like prefix and owner id can't have spaces + cli_flags = [] + for flag_with_spaces in flags: + for flag in flag_with_spaces.split(): + cli_flags.append(flag) + return cli_flags else: print("Starting over") return cli_flag_getter() From b4186d6724b34414715edf6893c53f035f048bd9 Mon Sep 17 00:00:00 2001 From: Flame442 <34169552+Flame442@users.noreply.github.com> Date: Sat, 7 Dec 2019 19:24:07 -0500 Subject: [PATCH 34/41] Clarifies what `[p]backup` does (#3172) * Clarify `[p]backup` * Create 3172.enhance.rst --- changelog.d/3172.enhance.rst | 1 + redbot/core/core_commands.py | 3 ++- 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/3172.enhance.rst diff --git a/changelog.d/3172.enhance.rst b/changelog.d/3172.enhance.rst new file mode 100644 index 000000000..8838abbed --- /dev/null +++ b/changelog.d/3172.enhance.rst @@ -0,0 +1 @@ +Clarified that ``[p]backup`` saves the **bot's** data in the help text. diff --git a/redbot/core/core_commands.py b/redbot/core/core_commands.py index f0362b230..763a9391a 100644 --- a/redbot/core/core_commands.py +++ b/redbot/core/core_commands.py @@ -1323,8 +1323,9 @@ class Core(commands.Cog, CoreLogic): @commands.command() @checks.is_owner() async def backup(self, ctx: commands.Context, *, backup_dir: str = None): - """Creates a backup of all data for the instance. + """Creates a backup of all data for this bot instance. + This backs up the bot's data and settings. You may provide a path to a directory for the backup archive to be placed in. If the directory does not exist, the bot will attempt to create it. From d136d594f48804c05e21da3a671c9acc7e37b60e Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 8 Dec 2019 01:29:41 +0100 Subject: [PATCH 35/41] [Changelogs] Remove info about branch support in revision argument (#3158) * chore(changelog): remove info about branch support * chore(changelog): fix grammar issue --- changelog.d/downloader/2527.feature.2.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/changelog.d/downloader/2527.feature.2.rst b/changelog.d/downloader/2527.feature.2.rst index 116d40435..72a7b7d13 100644 --- a/changelog.d/downloader/2527.feature.2.rst +++ b/changelog.d/downloader/2527.feature.2.rst @@ -1 +1 @@ -Added ``[p]cog installversion `` command that allows you to install cogs from specified revision (commit, tag, branch) of given repo. When using this command, cog will automatically be pinned. \ No newline at end of file +Added ``[p]cog installversion `` command that allows you to install cogs from specified revision (commit, tag) of given repo. When using this command, the cog will automatically be pinned. \ No newline at end of file From 672050727f836374f05f6fa3aa12f0150407ad8d Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 8 Dec 2019 01:31:43 +0100 Subject: [PATCH 36/41] [Downloader] Ensure consistent output from git commands (#3160) * fix(downloader): ensure consistent output from git commands * chore(changelog): add towncrier entry --- changelog.d/downloader/3160.misc.rst | 1 + redbot/cogs/downloader/repo_manager.py | 6 ++++++ 2 files changed, 7 insertions(+) create mode 100644 changelog.d/downloader/3160.misc.rst diff --git a/changelog.d/downloader/3160.misc.rst b/changelog.d/downloader/3160.misc.rst new file mode 100644 index 000000000..1093dc0ee --- /dev/null +++ b/changelog.d/downloader/3160.misc.rst @@ -0,0 +1 @@ +Ensure consistent output from git commands for purpose of parsing. \ No newline at end of file diff --git a/redbot/cogs/downloader/repo_manager.py b/redbot/cogs/downloader/repo_manager.py index 255ca8d67..51a01ae37 100644 --- a/redbot/cogs/downloader/repo_manager.py +++ b/redbot/cogs/downloader/repo_manager.py @@ -513,6 +513,12 @@ class Repo(RepoJSONMixin): """ env = os.environ.copy() env["GIT_TERMINAL_PROMPT"] = "0" + # attempt to force all output to plain ascii english + # some methods that parse output may expect it + # according to gettext manual both variables have to be set: + # https://www.gnu.org/software/gettext/manual/gettext.html#Locale-Environment-Variables + env["LC_ALL"] = "C" + env["LANGUAGE"] = "C" kwargs["env"] = env async with self._repo_lock: p: CompletedProcess = await self._loop.run_in_executor( From 0f626140553469fef111c03739d9b6a138fa785c Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 8 Dec 2019 01:54:49 +0100 Subject: [PATCH 37/41] [Downloader] Disable all git auth prompts on clone/pull (#3159) * fix(downloader): disable all git auth prompts on clone/pull * chore(changelog): add towncrier entry --- changelog.d/downloader/3159.bugfix.rst | 1 + redbot/cogs/downloader/repo_manager.py | 15 ++++++++++++--- 2 files changed, 13 insertions(+), 3 deletions(-) create mode 100644 changelog.d/downloader/3159.bugfix.rst diff --git a/changelog.d/downloader/3159.bugfix.rst b/changelog.d/downloader/3159.bugfix.rst new file mode 100644 index 000000000..2177da2ce --- /dev/null +++ b/changelog.d/downloader/3159.bugfix.rst @@ -0,0 +1 @@ +Disable all git auth prompts when adding/updating repo with Downloader. \ No newline at end of file diff --git a/redbot/cogs/downloader/repo_manager.py b/redbot/cogs/downloader/repo_manager.py index 51a01ae37..8e8ba7f33 100644 --- a/redbot/cogs/downloader/repo_manager.py +++ b/redbot/cogs/downloader/repo_manager.py @@ -87,13 +87,21 @@ class ProcessFormatter(Formatter): class Repo(RepoJSONMixin): - GIT_CLONE = "git clone --recurse-submodules -b {branch} {url} {folder}" - GIT_CLONE_NO_BRANCH = "git clone --recurse-submodules {url} {folder}" + GIT_CLONE = ( + "git clone -c credential.helper= -c core.askpass=" + " --recurse-submodules -b {branch} {url} {folder}" + ) + GIT_CLONE_NO_BRANCH = ( + "git -c credential.helper= -c core.askpass= clone --recurse-submodules {url} {folder}" + ) GIT_CURRENT_BRANCH = "git -C {path} symbolic-ref --short HEAD" GIT_CURRENT_COMMIT = "git -C {path} rev-parse HEAD" GIT_LATEST_COMMIT = "git -C {path} rev-parse {branch}" GIT_HARD_RESET = "git -C {path} reset --hard origin/{branch} -q" - GIT_PULL = "git -C {path} pull --recurse-submodules -q --ff-only" + GIT_PULL = ( + "git -c credential.helper= -c core.askpass= -C {path}" + " pull --recurse-submodules -q --ff-only" + ) GIT_DIFF_FILE_STATUS = ( "git -C {path} diff-tree --no-commit-id --name-status" " -r -z --line-prefix='\t' {old_rev} {new_rev}" @@ -513,6 +521,7 @@ class Repo(RepoJSONMixin): """ env = os.environ.copy() env["GIT_TERMINAL_PROMPT"] = "0" + env.pop("GIT_ASKPASS", None) # attempt to force all output to plain ascii english # some methods that parse output may expect it # according to gettext manual both variables have to be set: From 9a051ef2c66c4d254f2379befc01d877b03bae5a Mon Sep 17 00:00:00 2001 From: jack1142 <6032823+jack1142@users.noreply.github.com> Date: Sun, 8 Dec 2019 23:58:43 +0100 Subject: [PATCH 38/41] [Downloader] Fix `[p]findcog` not working with different levels of imports (#3178) * Update downloader.py * Create 3177.bugfix.rst --- changelog.d/downloader/3177.bugfix.rst | 1 + redbot/cogs/downloader/downloader.py | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) create mode 100644 changelog.d/downloader/3177.bugfix.rst diff --git a/changelog.d/downloader/3177.bugfix.rst b/changelog.d/downloader/3177.bugfix.rst new file mode 100644 index 000000000..462e8d4ca --- /dev/null +++ b/changelog.d/downloader/3177.bugfix.rst @@ -0,0 +1 @@ +``[p]findcog`` now properly works for cogs with less typical folder structure. diff --git a/redbot/cogs/downloader/downloader.py b/redbot/cogs/downloader/downloader.py index 094f33f3e..7438e5f4f 100644 --- a/redbot/cogs/downloader/downloader.py +++ b/redbot/cogs/downloader/downloader.py @@ -1179,7 +1179,7 @@ class Downloader(commands.Cog): """ splitted = instance.__module__.split(".") - return splitted[-2] + return splitted[0] @commands.command() async def findcog(self, ctx: commands.Context, command_name: str) -> None: From 064d97f87bced45215ed0444dd7c0aa8a4822fed Mon Sep 17 00:00:00 2001 From: Tomas S Date: Sun, 8 Dec 2019 23:59:53 +0100 Subject: [PATCH 39/41] [Downloader] Catch and handle erorr in update_all when target repository/branch is missing (#3080) * [Downloader] Catch and handle erorr in update_all when target repository/branch is removed from remote * Rewrite fix, remove ctx from repo_manager, edit docstring, add annotations * Text formatting * Group failed repo messages into padded table, catch single updated repo fails * Error catching v2; repo_manager design change * Docstrings, typos and changelog * Add Optional to update_repos annotatition * Wrong logic * Clear-er log message. * add format_failed_repos, change _repo_update for failed messages * Merge cog updating with fail repo logic; Filter out failed repos * Merge cog updating with fail repo logic; Cog updating logic shuffled to support sending fails at the end * Docstring typo * format_failed_repos - proper docstring * repo_manager.update_repos argument name fix * downloader._cog_checkforupdates added missed failed message * downloader._cog_update_logic place back return on some errors * Purge unused stuff from downloader._repo_update * downloader._cog_update_logic Change exception catching * _cog_update_logic purging obsolete * Remove obsolete 'message' from _cog_checkforupdates * Fix forgotten ctx.send * Wording * Removed obsolete 'message' * Fix wrong type hint in , update docstring * repo update logic fix * format_failed_repos type hint and docstring repair * Extend _get_cogs_to_check with 'update_repos' * Fix type mangling in _get_cogs_to_check * fix: typo Co-Authored-By: jack1142 <6032823+jack1142@users.noreply.github.com> * _repo_update; Added single repo up-to-date message --- changelog.d/downloader/2936.bugfix.rst | 1 + changelog.d/downloader/3080.misc.1.rst | 1 + changelog.d/downloader/3080.misc.2.rst | 1 + redbot/cogs/downloader/downloader.py | 202 ++++++++++++++++--------- redbot/cogs/downloader/repo_manager.py | 53 +++++-- 5 files changed, 178 insertions(+), 80 deletions(-) create mode 100644 changelog.d/downloader/2936.bugfix.rst create mode 100644 changelog.d/downloader/3080.misc.1.rst create mode 100644 changelog.d/downloader/3080.misc.2.rst diff --git a/changelog.d/downloader/2936.bugfix.rst b/changelog.d/downloader/2936.bugfix.rst new file mode 100644 index 000000000..3e1e5c98f --- /dev/null +++ b/changelog.d/downloader/2936.bugfix.rst @@ -0,0 +1 @@ +Catch errors if remote repository or branch is deleted, notify user which repository failed and continue updating others. \ No newline at end of file diff --git a/changelog.d/downloader/3080.misc.1.rst b/changelog.d/downloader/3080.misc.1.rst new file mode 100644 index 000000000..4ce910b55 --- /dev/null +++ b/changelog.d/downloader/3080.misc.1.rst @@ -0,0 +1 @@ +`RepoManager.update_all_repos` replaced by new method `update_repos` which additionally handles failing repositories. \ No newline at end of file diff --git a/changelog.d/downloader/3080.misc.2.rst b/changelog.d/downloader/3080.misc.2.rst new file mode 100644 index 000000000..a01f183b6 --- /dev/null +++ b/changelog.d/downloader/3080.misc.2.rst @@ -0,0 +1 @@ +Added `Downloader.format_failed_repos` for formatting error message of repos failing to update. \ No newline at end of file diff --git a/redbot/cogs/downloader/downloader.py b/redbot/cogs/downloader/downloader.py index 7438e5f4f..26262bcce 100644 --- a/redbot/cogs/downloader/downloader.py +++ b/redbot/cogs/downloader/downloader.py @@ -5,7 +5,7 @@ import re import shutil import sys from pathlib import Path -from typing import Tuple, Union, Iterable, Optional, Dict, Set, List, cast +from typing import Tuple, Union, Iterable, Collection, Optional, Dict, Set, List, cast from collections import defaultdict import discord @@ -499,24 +499,24 @@ class Downloader(commands.Cog): """Update all repos, or ones of your choosing.""" async with ctx.typing(): updated: Set[str] - if not repos: - updated = {repo.name for repo in await self._repo_manager.update_all_repos()} - else: - updated = set() - for repo in repos: - old, new = await repo.update() - if old != new: - updated.add(repo.name) + + updated_repos, failed = await self._repo_manager.update_repos(repos) + updated = {repo.name for repo in updated_repos} if updated: message = _("Repo update completed successfully.") message += _("\nUpdated: ") + humanize_list(tuple(map(inline, updated))) - elif repos is None: - await ctx.send(_("All installed repos are already up to date.")) - return + elif not repos: + message = _("All installed repos are already up to date.") else: - await ctx.send(_("These repos are already up to date.")) - return + if len(updated_repos) > 1: + message = _("These repos are already up to date.") + else: + message = _("This repo is already up to date.") + + if failed: + message += "\n" + self.format_failed_repos(failed) + await ctx.send(message) @commands.group() @@ -721,8 +721,9 @@ class Downloader(commands.Cog): This command doesn't update cogs, it only checks for updates. Use `[p]cog update` to update cogs. """ + async with ctx.typing(): - cogs_to_check = await self._get_cogs_to_check() + cogs_to_check, failed = await self._get_cogs_to_check() cogs_to_update, libs_to_update = await self._available_updates(cogs_to_check) message = "" if cogs_to_update: @@ -735,10 +736,14 @@ class Downloader(commands.Cog): message += _("\nThese shared libraries can be updated: ") + humanize_list( tuple(map(inline, libnames)) ) - if message: - await ctx.send(message) - else: - await ctx.send(_("All installed cogs are up to date.")) + + if not message: + message = _("All installed cogs are up to date.") + + if failed: + message += "\n" + self.format_failed_repos(failed) + + await ctx.send(message) @cog.command(name="update") async def _cog_update(self, ctx: commands.Context, *cogs: InstalledCog) -> None: @@ -774,11 +779,22 @@ class Downloader(commands.Cog): rev: Optional[str] = None, cogs: Optional[List[InstalledModule]] = None, ) -> None: + message = "" + failed_repos = set() + updates_available = set() + async with ctx.typing(): # this is enough to be sure that `rev` is not None (based on calls to this method) if repo is not None: rev = cast(str, rev) - await repo.update() + + try: + await repo.update() + except errors.UpdateError: + message = self.format_failed_repos([repo.name]) + await ctx.send(message) + return + try: commit = await repo.get_full_sha1(rev) except errors.AmbiguousRevision as e: @@ -794,61 +810,71 @@ class Downloader(commands.Cog): await ctx.send(msg) return except errors.UnknownRevision: - await ctx.send( - _("Error: there is no revision `{rev}` in repo `{repo.name}`").format( - rev=rev, repo=repo - ) - ) + message += _( + "Error: there is no revision `{rev}` in repo `{repo.name}`" + ).format(rev=rev, repo=repo) + await ctx.send(message) return + await repo.checkout(commit) - cogs_to_check = await self._get_cogs_to_check(repos=[repo], cogs=cogs) + cogs_to_check, __ = await self._get_cogs_to_check( + repos=[repo], cogs=cogs, update_repos=False + ) + else: - cogs_to_check = await self._get_cogs_to_check(repos=repos, cogs=cogs) + cogs_to_check, check_failed = await self._get_cogs_to_check(repos=repos, cogs=cogs) + failed_repos.update(check_failed) pinned_cogs = {cog for cog in cogs_to_check if cog.pinned} cogs_to_check -= pinned_cogs if not cogs_to_check: - message = _("There were no cogs to check.") + message += _("There were no cogs to check.") if pinned_cogs: cognames = [cog.name for cog in pinned_cogs] message += _( "\nThese cogs are pinned and therefore weren't checked: " ) + humanize_list(tuple(map(inline, cognames))) - await ctx.send(message) - return - cogs_to_update, libs_to_update = await self._available_updates(cogs_to_check) - - updates_available = cogs_to_update or libs_to_update - cogs_to_update, filter_message = self._filter_incorrect_cogs(cogs_to_update) - message = "" - if updates_available: - updated_cognames, message = await self._update_cogs_and_libs( - cogs_to_update, libs_to_update - ) else: - if repos: - message = _("Cogs from provided repos are already up to date.") - elif repo: - if cogs: - message = _("Provided cogs are already up to date with this revision.") - else: - message = _( - "Cogs from provided repo are already up to date with this revision." - ) + cogs_to_update, libs_to_update = await self._available_updates(cogs_to_check) + + updates_available = cogs_to_update or libs_to_update + cogs_to_update, filter_message = self._filter_incorrect_cogs(cogs_to_update) + + if updates_available: + updated_cognames, message = await self._update_cogs_and_libs( + cogs_to_update, libs_to_update + ) else: - if cogs: - message = _("Provided cogs are already up to date.") + if repos: + message += _("Cogs from provided repos are already up to date.") + elif repo: + if cogs: + message += _( + "Provided cogs are already up to date with this revision." + ) + else: + message += _( + "Cogs from provided repo are already up to date with this revision." + ) else: - message = _("All installed cogs are already up to date.") - if repo is not None: - await repo.checkout(repo.branch) - if pinned_cogs: - cognames = [cog.name for cog in pinned_cogs] - message += _( - "\nThese cogs are pinned and therefore weren't checked: " - ) + humanize_list(tuple(map(inline, cognames))) - message += filter_message + if cogs: + message += _("Provided cogs are already up to date.") + else: + message += _("All installed cogs are already up to date.") + if repo is not None: + await repo.checkout(repo.branch) + if pinned_cogs: + cognames = [cog.name for cog in pinned_cogs] + message += _( + "\nThese cogs are pinned and therefore weren't checked: " + ) + humanize_list(tuple(map(inline, cognames))) + message += filter_message + + if failed_repos: + message += "\n" + self.format_failed_repos(failed_repos) + await ctx.send(message) + if updates_available and updated_cognames: await self._ask_for_cog_reload(ctx, updated_cognames) @@ -1024,23 +1050,31 @@ class Downloader(commands.Cog): *, repos: Optional[Iterable[Repo]] = None, cogs: Optional[Iterable[InstalledModule]] = None, - ) -> Set[InstalledModule]: + update_repos: bool = True, + ) -> Tuple[Set[InstalledModule], List[str]]: + failed = [] if not (cogs or repos): - await self._repo_manager.update_all_repos() - cogs_to_check = {cog for cog in await self.installed_cogs() if cog.repo is not None} + if update_repos: + __, failed = await self._repo_manager.update_repos() + + cogs_to_check = { + cog + for cog in await self.installed_cogs() + if cog.repo is not None and cog.repo.name not in failed + } else: # this is enough to be sure that `cogs` is not None (based on if above) if not repos: cogs = cast(Iterable[InstalledModule], cogs) repos = {cog.repo for cog in cogs if cog.repo is not None} - for repo in repos: - if await repo.is_on_branch(): - exit_to_commit = None - else: - exit_to_commit = repo.commit - await repo.update() - await repo.checkout(exit_to_commit) + if update_repos: + __, failed = await self._repo_manager.update_repos(repos) + + if failed: + # remove failed repos + repos = {repo for repo in repos if repo.name not in failed} + if cogs: cogs_to_check = {cog for cog in cogs if cog.repo is not None and cog.repo in repos} else: @@ -1050,7 +1084,7 @@ class Downloader(commands.Cog): if cog.repo is not None and cog.repo in repos } - return cogs_to_check + return (cogs_to_check, failed) async def _update_cogs_and_libs( self, cogs_to_update: Iterable[Installable], libs_to_update: Iterable[Installable] @@ -1207,3 +1241,31 @@ class Downloader(commands.Cog): msg = _("This command is not provided by a cog.") await ctx.send(box(msg)) + + @staticmethod + def format_failed_repos(failed: Collection[str]) -> str: + """Format collection of ``Repo.name``'s into failed message. + + Parameters + ---------- + failed : Collection + Collection of ``Repo.name`` + + Returns + ------- + str + formatted message + """ + + message = ( + _("Failed to update the following repositories:") + if len(failed) > 1 + else _("Failed to update the following repository:") + ) + message += " " + humanize_list(tuple(map(inline, failed))) + "\n" + message += _( + "The repository's branch might have been removed or" + " the repository is no longer accessible at set url." + " See logs for more information." + ) + return message diff --git a/redbot/cogs/downloader/repo_manager.py b/redbot/cogs/downloader/repo_manager.py index 8e8ba7f33..0edf29fa6 100644 --- a/redbot/cogs/downloader/repo_manager.py +++ b/redbot/cogs/downloader/repo_manager.py @@ -795,7 +795,10 @@ class Repo(RepoJSONMixin): ------- `tuple` of `str` :py:code`(old commit hash, new commit hash)` - + + Raises + ------- + `UpdateError` - if git pull results with non-zero exit code """ old_commit = await self.latest_commit() @@ -1134,28 +1137,58 @@ class RepoManager: Tuple[Repo, Tuple[str, str]] A 2-`tuple` with Repo object and a 2-`tuple` of `str` containing old and new commit hashes. - """ repo = self._repos[repo_name] old, new = await repo.update() return (repo, (old, new)) - async def update_all_repos(self) -> Dict[Repo, Tuple[str, str]]: - """Call `Repo.update` on all repositories. + async def update_repos( + self, repos: Optional[Iterable[Repo]] = None + ) -> Tuple[Dict[Repo, Tuple[str, str]], List[str]]: + """Calls `Repo.update` on passed repositories and + catches failing ones. + + Calling without params updates all currently installed repos. + + Parameters + ---------- + repos: Iterable + Iterable of Repos, None to update all Returns ------- - Dict[Repo, Tuple[str, str]] + tuple of Dict and list A mapping of `Repo` objects that received new commits to a 2-`tuple` of `str` containing old and new commit hashes. - + + `list` of failed `Repo` names """ + failed = [] ret = {} - for repo_name, __ in self._repos.items(): - repo, (old, new) = await self.update_repo(repo_name) + + # select all repos if not specified + if not repos: + repos = self.repos + + for repo in repos: + try: + updated_repo, (old, new) = await self.update_repo(repo.name) + except errors.UpdateError as err: + log.error( + "Repository '%s' failed to update. URL: '%s' on branch '%s'", + repo.name, + repo.url, + repo.branch, + exc_info=err, + ) + + failed.append(repo.name) + continue + if old != new: - ret[repo] = (old, new) - return ret + ret[updated_repo] = (old, new) + + return ret, failed async def _load_repos(self, set_repos: bool = False) -> Dict[str, Repo]: ret = {} From 203cde9805b9a113043503a21a1622e4d072f8ea Mon Sep 17 00:00:00 2001 From: Vexed <51716387+Vexed01@users.noreply.github.com> Date: Sun, 8 Dec 2019 23:02:40 +0000 Subject: [PATCH 40/41] Slightly reword installation guide to attempt to stop people using dev (#3079) * create * add the code * change de logs * lets remember linux exists * fix formtting and change wording * change the wording a bit more... after running `sphinx-build` it def looks like i used bold a lot god that was a long commit mesage * review changes * draper's review (see description) i have put or worse partially to scare users and partially as there are other possible effects of a downgrade * review * clarity --- changelog.d/3079.docs.rst | 1 + docs/install_linux_mac.rst | 6 +++++- docs/install_windows.rst | 6 +++++- 3 files changed, 11 insertions(+), 2 deletions(-) create mode 100644 changelog.d/3079.docs.rst diff --git a/changelog.d/3079.docs.rst b/changelog.d/3079.docs.rst new file mode 100644 index 000000000..36c4166e7 --- /dev/null +++ b/changelog.d/3079.docs.rst @@ -0,0 +1 @@ +Word using dev during install more strongly, to try to avoid end users using dev. diff --git a/docs/install_linux_mac.rst b/docs/install_linux_mac.rst index 8377e84bc..af99cf783 100644 --- a/docs/install_linux_mac.rst +++ b/docs/install_linux_mac.rst @@ -280,7 +280,11 @@ Or, to install with PostgreSQL support: .. note:: To install the development version, replace ``Red-DiscordBot`` in the above commands with the - following link: + link below. **The development version of the bot contains experimental changes. It is not + intended for normal users.** We will not support anyone using the development version in any + support channels. Using the development version may break third party cogs and not all core + commands may work. Downgrading to stable after installing the development version may cause + data loss, crashes or worse. .. code-block:: none diff --git a/docs/install_windows.rst b/docs/install_windows.rst index f3bcd156a..54bc87d6f 100644 --- a/docs/install_windows.rst +++ b/docs/install_windows.rst @@ -85,7 +85,11 @@ Installing Red .. note:: To install the development version, replace ``Red-DiscordBot`` in the above commands with the - following link: + link below. **The development version of the bot contains experimental changes. It is not + intended for normal users.** We will not support anyone using the development version in any + support channels. Using the development version may break third party cogs and not all core + commands may work. Downgrading to stable after installing the development version may cause + data loss, crashes or worse. .. code-block:: none From 8cba47f382d086330bb225231206f3bdb83cacb7 Mon Sep 17 00:00:00 2001 From: Flame442 <34169552+Flame442@users.noreply.github.com> Date: Sun, 8 Dec 2019 18:04:57 -0500 Subject: [PATCH 41/41] Fixed [p]announce failing due to errors messaging the owner. (#3166) * Fixed owner message behavior * Create 3166.bugfix.rst * Reduce messages * Fix plurality --- changelog.d/admin/3166.bugfix.rst | 1 + redbot/cogs/admin/announcer.py | 14 ++++++++++---- 2 files changed, 11 insertions(+), 4 deletions(-) create mode 100644 changelog.d/admin/3166.bugfix.rst diff --git a/changelog.d/admin/3166.bugfix.rst b/changelog.d/admin/3166.bugfix.rst new file mode 100644 index 000000000..532d4d6d0 --- /dev/null +++ b/changelog.d/admin/3166.bugfix.rst @@ -0,0 +1 @@ +Fixed ``[p]announce`` failing after encountering an error attempting to message the bot owner. diff --git a/redbot/cogs/admin/announcer.py b/redbot/cogs/admin/announcer.py index 154eacb48..0a9dac2a2 100644 --- a/redbot/cogs/admin/announcer.py +++ b/redbot/cogs/admin/announcer.py @@ -3,6 +3,7 @@ import asyncio import discord from redbot.core import commands from redbot.core.i18n import Translator +from redbot.core.utils.chat_formatting import humanize_list, inline _ = Translator("Announcer", __file__) @@ -53,7 +54,7 @@ class Announcer: async def announcer(self): guild_list = self.ctx.bot.guilds - bot_owner = (await self.ctx.bot.application_info()).owner + failed = [] for g in guild_list: if not self.active: return @@ -66,9 +67,14 @@ class Announcer: try: await channel.send(self.message) except discord.Forbidden: - await bot_owner.send( - _("I could not announce to server: {server.id}").format(server=g) - ) + failed.append(str(g.id)) await asyncio.sleep(0.5) + msg = ( + _("I could not announce to the following server: ") + if len(failed) == 1 + else _("I could not announce to the following servers: ") + ) + msg += humanize_list(tuple(map(inline, failed))) + await self.ctx.bot.send_to_owners(msg) self.active = False