mirror of
https://github.com/Cog-Creators/Red-DiscordBot.git
synced 2026-05-14 03:51:46 -04:00
Compare commits
33 Commits
3.5.23
..
V3/develop
| Author | SHA1 | Date | |
|---|---|---|---|
| 7e2a74b276 | |||
| 899f24ceca | |||
| 13f45f69ac | |||
| a234fc1e02 | |||
| edce32364f | |||
| 7305f44f68 | |||
| cbd4643bd3 | |||
| b02fa38423 | |||
| 99babf9ad3 | |||
| 169d0eed49 | |||
| 70faa8cd52 | |||
| 2ea4c766ad | |||
| 6ceb45b35c | |||
| 4032648dcc | |||
| f70c48ec30 | |||
| fcb8bc0265 | |||
| ee1db01a2f | |||
| e2acec0862 | |||
| b83b882921 | |||
| 99d7b0e3b7 | |||
| 9270373c56 | |||
| e8f0ea0510 | |||
| b42bab4de9 | |||
| e868872214 | |||
| bee0ddbffc | |||
| 2de3d03cc9 | |||
| 056f2de557 | |||
| 34cbd15ba9 | |||
| 9a458fdd83 | |||
| 0e78051c5d | |||
| 53766173d0 | |||
| 36a5f752a2 | |||
| b2007a718d |
+8
-5
@@ -50,10 +50,6 @@
|
||||
- redbot/cogs/downloader/*
|
||||
# Docs
|
||||
- docs/cog_guides/downloader.rst
|
||||
# Tests
|
||||
- redbot/pytest/downloader.py
|
||||
- redbot/pytest/downloader_testrepo.*
|
||||
- tests/cogs/downloader/**/*
|
||||
"Category: Cogs - Economy":
|
||||
# Source
|
||||
- redbot/cogs/economy/*
|
||||
@@ -208,10 +204,18 @@
|
||||
- docs/cog_guides/core.rst
|
||||
"Category: Core - Command-line Interfaces":
|
||||
- redbot/__main__.py
|
||||
- redbot/_update/**/*
|
||||
- redbot/logging.py
|
||||
- redbot/core/_cli.py
|
||||
- redbot/core/_debuginfo.py
|
||||
- redbot/setup.py
|
||||
"Category: Core - Downloader":
|
||||
# Source
|
||||
- redbot/core/_downloader/**/*
|
||||
# Tests
|
||||
- redbot/pytest/downloader.py
|
||||
- redbot/pytest/downloader_testrepo.*
|
||||
- tests/core/_downloader/**/*
|
||||
"Category: Core - Help":
|
||||
- redbot/core/commands/help.py
|
||||
"Category: Core - i18n":
|
||||
@@ -263,7 +267,6 @@
|
||||
- docs/framework_events.rst
|
||||
- docs/guide_cog_creation.rst
|
||||
- docs/guide_cog_creators.rst
|
||||
- docs/guide_migration.rst
|
||||
- docs/guide_publish_cogs.rst
|
||||
- docs/guide_slash_and_interactions.rst
|
||||
"Category: Docs - Install Guides":
|
||||
|
||||
@@ -7,18 +7,24 @@ on:
|
||||
required: false
|
||||
default: 'auto'
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
crowdin_download_translations:
|
||||
environment: Prepare Release
|
||||
needs: pr_stable_bump
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.RED_RELEASER_CLIENT_ID }}
|
||||
private-key: ${{ secrets.RED_RELEASER_PRIVATE_KEY }}
|
||||
|
||||
# Checkout repository and install Python
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
- name: Install dependencies
|
||||
@@ -43,7 +49,7 @@ jobs:
|
||||
id: cpr_crowdin
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
commit-message: Automated Crowdin downstream
|
||||
title: "Automated Crowdin downstream"
|
||||
body: |
|
||||
@@ -51,31 +57,32 @@ jobs:
|
||||
Please ensure that there are no errors or invalid files are in the PR.
|
||||
labels: "Automated PR, Changelog Entry: Skipped"
|
||||
branch: "automated/i18n"
|
||||
author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
|
||||
committer: >-
|
||||
${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
<263745220+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>
|
||||
author: >-
|
||||
${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
<263745220+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>
|
||||
milestone: ${{ needs.pr_stable_bump.outputs.milestone_number }}
|
||||
|
||||
- name: Close and reopen the PR with different token to trigger CI
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.cpr_crowdin.outputs.pull-request-number }}
|
||||
PR_OPERATION: ${{ steps.cpr_crowdin.outputs.pull-request-operation }}
|
||||
with:
|
||||
github-token: ${{ secrets.cogcreators_bot_repo_scoped }}
|
||||
script: |
|
||||
const script = require(
|
||||
`${process.env.GITHUB_WORKSPACE}/.github/workflows/scripts/close_and_reopen_pr.js`
|
||||
);
|
||||
console.log(script({github, context}));
|
||||
|
||||
pr_stable_bump:
|
||||
environment: Prepare Release
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
milestone_number: ${{ steps.get_milestone_number.outputs.result }}
|
||||
steps:
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.RED_RELEASER_CLIENT_ID }}
|
||||
private-key: ${{ secrets.RED_RELEASER_PRIVATE_KEY }}
|
||||
|
||||
# Checkout repository and install Python
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
@@ -105,7 +112,7 @@ jobs:
|
||||
id: cpr_bump_stable
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
commit-message: Version bump to ${{ steps.bump_version_stable.outputs.new_version }}
|
||||
title: Version bump to ${{ steps.bump_version_stable.outputs.new_version }}
|
||||
body: |
|
||||
@@ -113,18 +120,10 @@ jobs:
|
||||
Please ensure that there are no errors or invalid files are in the PR.
|
||||
labels: "Automated PR, Changelog Entry: Skipped"
|
||||
branch: "automated/pr_bumps/${{ steps.bump_version_stable.outputs.new_version }}"
|
||||
author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
|
||||
committer: >-
|
||||
${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
<263745220+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>
|
||||
author: >-
|
||||
${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
<263745220+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>
|
||||
milestone: ${{ steps.get_milestone_number.outputs.result }}
|
||||
|
||||
- name: Close and reopen the PR with different token to trigger CI
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.cpr_bump_stable.outputs.pull-request-number }}
|
||||
PR_OPERATION: ${{ steps.cpr_bump_stable.outputs.pull-request-operation }}
|
||||
with:
|
||||
github-token: ${{ secrets.cogcreators_bot_repo_scoped }}
|
||||
script: |
|
||||
const script = require(
|
||||
`${process.env.GITHUB_WORKSPACE}/.github/workflows/scripts/close_and_reopen_pr.js`
|
||||
);
|
||||
console.log(await script({github, context}));
|
||||
|
||||
@@ -147,9 +147,7 @@ jobs:
|
||||
print-hash: true
|
||||
|
||||
pr_dev_bump:
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
environment: Prepare Release
|
||||
needs: release_to_pypi
|
||||
name: Update Red version number to dev
|
||||
runs-on: ubuntu-latest
|
||||
@@ -160,11 +158,18 @@ jobs:
|
||||
run: |
|
||||
echo "BASE_BRANCH=${TAG_BASE_BRANCH#'refs/heads/'}" >> $GITHUB_ENV
|
||||
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/create-github-app-token@v2
|
||||
id: app-token
|
||||
with:
|
||||
app-id: ${{ secrets.RED_RELEASER_CLIENT_ID }}
|
||||
private-key: ${{ secrets.RED_RELEASER_PRIVATE_KEY }}
|
||||
|
||||
- uses: actions/checkout@v6
|
||||
with:
|
||||
ref: ${{ env.BASE_BRANCH }}
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v6
|
||||
with:
|
||||
python-version: '3.8'
|
||||
|
||||
@@ -194,7 +199,7 @@ jobs:
|
||||
id: cpr_bump_dev
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
token: ${{ steps.app-token.outputs.token }}
|
||||
commit-message: Version bump to ${{ steps.bump_version_dev.outputs.new_version }}
|
||||
title: Version bump to ${{ steps.bump_version_dev.outputs.new_version }}
|
||||
body: |
|
||||
@@ -202,19 +207,11 @@ jobs:
|
||||
Please ensure that there are no errors or invalid files are in the PR.
|
||||
labels: "Automated PR, Changelog Entry: Skipped"
|
||||
branch: "automated/pr_bumps/${{ steps.bump_version_dev.outputs.new_version }}"
|
||||
author: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
|
||||
committer: >-
|
||||
${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
<263745220+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>
|
||||
author: >-
|
||||
${{ steps.app-token.outputs.app-slug }}[bot]
|
||||
<263745220+${{ steps.app-token.outputs.app-slug }}[bot]@users.noreply.github.com>
|
||||
milestone: ${{ steps.get_milestone_number.outputs.result }}
|
||||
base: ${{ env.BASE_BRANCH }}
|
||||
|
||||
- name: Close and reopen the PR with different token to trigger CI
|
||||
uses: actions/github-script@v6
|
||||
env:
|
||||
PR_NUMBER: ${{ steps.cpr_bump_dev.outputs.pull-request-number }}
|
||||
PR_OPERATION: ${{ steps.cpr_bump_dev.outputs.pull-request-operation }}
|
||||
with:
|
||||
github-token: ${{ secrets.cogcreators_bot_repo_scoped }}
|
||||
script: |
|
||||
const script = require(
|
||||
`${process.env.GITHUB_WORKSPACE}/.github/workflows/scripts/close_and_reopen_pr.js`
|
||||
);
|
||||
console.log(await script({github, context}));
|
||||
|
||||
@@ -7,6 +7,10 @@ build:
|
||||
jobs:
|
||||
install:
|
||||
- pip install .[doc]
|
||||
post_build:
|
||||
- mkdir -p docs/_build/doctrees docs/_build/markdown "$READTHEDOCS_OUTPUT/html/_markdown"
|
||||
- python -m sphinx -T -b markdown -d docs/_build/doctrees -D "language=$READTHEDOCS_LANGUAGE" docs docs/_build/markdown
|
||||
- cp docs/_build/markdown/changelog.md "$READTHEDOCS_OUTPUT/html/_markdown/changelog.md"
|
||||
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
+957
-130
File diff suppressed because it is too large
Load Diff
@@ -64,6 +64,12 @@ liking, making it completely customizable. This is a *self-hosted bot* – meani
|
||||
to host and maintain your own instance. You can turn Red into an admin bot, music bot, trivia bot,
|
||||
new best friend or all of these together!
|
||||
|
||||
Red is built for [Discord](https://discord.com/), a popular VOIP and instant messaging platform.
|
||||
It's best suited for use in guilds (also known as servers), where it utilizes Discord's
|
||||
well-documented API to communicate and deliver its many features. Discord offers its API to
|
||||
encourage developers to explore their creativity by building programs, tools, and services that
|
||||
enhance the Discord experience.
|
||||
|
||||
[Installation](#installation) is easy, and you do **NOT** need to know anything about coding! Aside
|
||||
from installing and updating, every part of the bot can be controlled from within Discord.
|
||||
|
||||
|
||||
@@ -0,0 +1,43 @@
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from docutils import nodes
|
||||
from sphinx.application import Sphinx
|
||||
from sphinx.util.docutils import SphinxDirective
|
||||
|
||||
|
||||
class ChangelogContributors(SphinxDirective):
|
||||
has_content = True
|
||||
|
||||
def run(self) -> List[nodes.Node]:
|
||||
contributors = [contributor for line in self.content for contributor in line.split()]
|
||||
|
||||
comment_value = " ".join(contributors)
|
||||
line_nodes = []
|
||||
for contributor in contributors:
|
||||
if line_nodes:
|
||||
line_nodes.append(nodes.Text(", "))
|
||||
line_nodes.append(
|
||||
nodes.reference(
|
||||
contributor,
|
||||
f"@{contributor}",
|
||||
internal=False,
|
||||
refuri=f"https://github.com/sponsors/{contributor}",
|
||||
)
|
||||
)
|
||||
|
||||
node = nodes.line_block(
|
||||
"",
|
||||
nodes.comment("", f"RED-CHANGELOG-CONTRIBUTORS: {comment_value}"),
|
||||
nodes.line("", "Thanks to all these amazing people who contributed to this release:"),
|
||||
nodes.line("", "", *line_nodes),
|
||||
)
|
||||
return [node]
|
||||
|
||||
|
||||
def setup(app: Sphinx) -> Dict[str, Any]:
|
||||
app.add_directive("changelog-contributors", ChangelogContributors)
|
||||
return {
|
||||
"version": "1.0",
|
||||
"parallel_read_safe": True,
|
||||
"parallel_write_safe": True,
|
||||
}
|
||||
+17
-17
@@ -3,16 +3,14 @@
|
||||
==========================
|
||||
About Virtual Environments
|
||||
==========================
|
||||
Creating a virtual environment is really easy and usually prevents many common installation
|
||||
problems.
|
||||
Creating a virtual environment is simple and helps prevent installation problems.
|
||||
|
||||
**What Are Virtual Environments For?**
|
||||
|
||||
Virtual environments allow you to isolate Red's library dependencies, cog dependencies and python
|
||||
binaries from the rest of your system. There is no performance overhead to using virtual environment
|
||||
and it saves you from a lot of troubles during setup. It also makes sure Red and its dependencies
|
||||
are installed to a predictable location which makes uninstalling Red as simple as removing a single folder,
|
||||
without worrying about losing your data or other things on your system becoming broken.
|
||||
Virtual environments allow you to isolate Red's library dependencies, cog dependencies, and Python
|
||||
binaries from the rest of your system with no performance overhead, ensuring those dependencies
|
||||
and Red are installed to a predictable location. This makes uninstalling Red as simple as removing
|
||||
a single folder, preventing any data loss or breaking other things on your system.
|
||||
|
||||
|
||||
--------------------------------------------
|
||||
@@ -21,19 +19,21 @@ Virtual Environments with Multiple Instances
|
||||
If you are running multiple instances of Red on the same machine, you have the option of either
|
||||
using the same virtual environment for all of them, or creating separate ones.
|
||||
|
||||
.. note::
|
||||
Using a *single* virtual environment for all of your instances means you:
|
||||
|
||||
This only applies for multiple instances of V3. If you are running a V2 instance as well,
|
||||
you **must** use separate virtual environments.
|
||||
- Only need to update Red once for all instances.
|
||||
- Must shut down all instances prior to updating.
|
||||
- Will save space on your hard drive.
|
||||
- Want all instances to share the same version/dependencies.
|
||||
|
||||
The advantages of using a *single* virtual environment for all of your V3 instances are:
|
||||
Using *multiple* virtual environments for each individual or select groups of instances means you:
|
||||
|
||||
- When updating Red, you will only need to update it once for all instances (however you will still need to restart all instances for the changes to take effect)
|
||||
- It will save space on your hard drive
|
||||
|
||||
On the other hand, you may wish to update each of your instances individually.
|
||||
- Need to update Red within each virtual environment separately.
|
||||
- Can update Red without needing to update all instances.
|
||||
- Only need to shut down the instance(s) being updated.
|
||||
- Want different Red/dependency versions on different instances.
|
||||
|
||||
.. important::
|
||||
|
||||
Windows users with multiple instances should create *separate* virtual environments, as
|
||||
updating multiple running instances at once is likely to cause errors.
|
||||
Regardless of which option you choose, do not update while any instances within that virtual
|
||||
environment are running. This is especially true for Windows, as files are locked by the system while in use.
|
||||
+128
-20
@@ -4,32 +4,140 @@
|
||||
Backing Up and Restoring Red
|
||||
============================
|
||||
|
||||
Red can be backed up and restored to any device as long as it is supported operating system. See page: :ref:`end-user-guarantees`.
|
||||
|
||||
Backup steps are to be done in order and carefully to avoid any issues.
|
||||
|
||||
#. Take note of the installed cogs with ``[p]cogs``; and cog repositories with ``[p]load downloader``, then ``[p]repo list`` (``[p]`` is your bot's prefix).
|
||||
#. Stop the bot, ideally with ``[p]shutdown``.
|
||||
#. Activate your venv, and run ``redbot-setup backup <instancename>``, replacing ``<instancename>`` with the name of your instance.
|
||||
#. Copy your backup file to the new machine/location.
|
||||
#. Extract the file to a location of your choice (remember the full path and make sure that the user you are going to install/run Red under can access this path).
|
||||
#. :ref:`Install Red <install-guides>` as normal on the new machine/location.
|
||||
#. Run ``redbot-setup`` in your venv to create a new instance, using the path you remembered above as your data path.
|
||||
#. Start your new instance.
|
||||
#. Re-add the cog repositories using the same names as before.
|
||||
#. Do ``[p]cog update``.
|
||||
#. Re-add any cogs that were not re-installed (you may have to uninstall them first as Downloader may think they are still installed).
|
||||
Red can be backed up and restored to any system as long as it is a supported per our `end-user-guarantees`.
|
||||
The system it's restored to can be different from the system that was backed up.
|
||||
|
||||
.. note::
|
||||
|
||||
The config (data) from cogs has been saved, but not the code itself.
|
||||
Some 3rd-party cogs may not support all systems that Core Red supports and such cogs may therefore not work,
|
||||
if restored to an unsupported system. This does not affect cogs that do not impose additional restrictions.
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
:depth: 2
|
||||
|
||||
Creating backups
|
||||
****************
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
To make a backup, perform the following steps:
|
||||
|
||||
#. Stop the bot, ideally with ``[p]shutdown``.
|
||||
#. Activate your venv.
|
||||
|
||||
.. prompt:: batch
|
||||
|
||||
"%userprofile%\redenv\Scripts\activate.bat"
|
||||
#. Backup your Red instance with the following command:
|
||||
|
||||
.. prompt:: batch
|
||||
:prompts: (redenv) C:\\>
|
||||
|
||||
redbot-setup backup <your instance name>
|
||||
|
||||
.. attention::
|
||||
|
||||
Replace ``<your instance name>`` with the name of the instance you want to backup.
|
||||
#. The command will create a backup file for you and show you the path to it.
|
||||
|
||||
.. tip::
|
||||
|
||||
You can fix permissions (if needed) on your directory using:
|
||||
If you want to backup your instance to a custom folder,
|
||||
you can run the ``redbot-setup backup`` command as shown below,
|
||||
replacing ``C:\path\to\backup\folder`` with the path to the folder that
|
||||
you want to backup your instance to:
|
||||
|
||||
.. code-block:: bash
|
||||
.. prompt:: batch
|
||||
:prompts: (redenv) C:\\>
|
||||
|
||||
sudo chown -R <user>:<user> ~/.local
|
||||
redbot-setup backup <your instance name> C:\path\to\backup\folder
|
||||
|
||||
Replace ``<user>`` with your actual username.
|
||||
Linux & Mac
|
||||
-----------
|
||||
|
||||
To make a backup, perform the following steps:
|
||||
|
||||
#. Stop the bot, ideally with ``[p]shutdown``.
|
||||
#. Activate your venv.
|
||||
|
||||
.. prompt:: bash
|
||||
|
||||
source ~/redenv/bin/activate
|
||||
#. Backup your Red instance with the following command:
|
||||
|
||||
.. prompt:: bash
|
||||
:prompts: (redenv) $
|
||||
|
||||
redbot-setup backup <your instance name>
|
||||
|
||||
.. attention::
|
||||
|
||||
Replace ``<your instance name>`` with the name of the instance you want to backup.
|
||||
#. The command will create a backup file for you and show you the path to it.
|
||||
|
||||
.. tip::
|
||||
|
||||
If you want to backup your instance to a custom folder,
|
||||
you can run the ``redbot-setup backup`` command as shown below,
|
||||
replacing ``/path/to/backup/folder`` with the path to the folder that
|
||||
you want to backup your instance to:
|
||||
|
||||
.. prompt:: bash
|
||||
:prompts: (redenv) $
|
||||
|
||||
redbot-setup backup <your instance name> /path/to/backup/folder
|
||||
|
||||
Restoring backups
|
||||
*****************
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
To restore a backup, perform the following steps:
|
||||
|
||||
#. `Install Red <windows-install-guide>` on the new machine/location, skipping the ``redbot-setup`` step.
|
||||
#. Activate your venv.
|
||||
|
||||
.. prompt:: batch
|
||||
|
||||
"%userprofile%\redenv\Scripts\activate.bat"
|
||||
#. Restore your Red instance with the following command:
|
||||
|
||||
.. prompt:: batch
|
||||
:prompts: (redenv) C:\\>
|
||||
|
||||
redbot-setup restore C:\path\to\backup\file.tar.gz
|
||||
|
||||
.. attention::
|
||||
|
||||
Replace ``C:\path\to\backup\file.tar.gz`` with the path to the backup file
|
||||
that you want to restore from.
|
||||
|
||||
#. The command will guide you through the restore process.
|
||||
|
||||
Linux & Mac
|
||||
-----------
|
||||
|
||||
To restore a backup, perform the following steps:
|
||||
|
||||
#. `Install Red <install-guides>` on the new machine/location, skipping the ``redbot-setup`` step.
|
||||
#. Activate your venv.
|
||||
|
||||
.. prompt:: bash
|
||||
|
||||
source ~/redenv/bin/activate
|
||||
#. Restore your Red instance with the following command:
|
||||
|
||||
.. prompt:: bash
|
||||
:prompts: (redenv) $
|
||||
|
||||
redbot-setup restore /path/to/backup/file.tar.gz
|
||||
|
||||
.. attention::
|
||||
|
||||
Replace ``/path/to/backup/file.tar.gz`` with the path to the backup file
|
||||
that you want to restore from.
|
||||
|
||||
#. The command will guide you through the restore process.
|
||||
|
||||
@@ -116,18 +116,18 @@ How can I use this playlist link with playlist commands in audio?**
|
||||
:ref:`setting up Audio for multiple bots<multibots>`. Otherwise, another process is using the
|
||||
port, so you need to figure out what is using port 2333 and terminate/disconnect it yourself.
|
||||
|
||||
**Q: My terminal is saying that I "must install Java 17 or 11 for Lavalink to run". How can I fix this?**
|
||||
**Q: My terminal is saying that I "must install Java 21 or 17 for Lavalink to run". How can I fix this?**
|
||||
|
||||
You are getting this error because you have a different version of Java installed, or you don't have
|
||||
Java installed at all. As the error states, Java 17 or 11 is required, and can be installed from
|
||||
`here <https://adoptium.net/temurin/releases/?version=17>`__.
|
||||
Java installed at all. As the error states, Java 21 or 17 is required, and can be installed from
|
||||
`here <https://adoptium.net/temurin/releases/?version=21>`__.
|
||||
|
||||
If you have Java 17 or 11 installed, and are still getting this error, you will have to manually tell Audio where your Java install is located.
|
||||
Use ``[p]llset java <path_to_java_17_or_11_executable>``, to make Audio launch Lavalink with a
|
||||
If you have Java 21 or 17 installed, and are still getting this error, you will have to manually tell Audio where your Java install is located.
|
||||
Use ``[p]llset java <path_to_java_21_or_17_executable>``, to make Audio launch Lavalink with a
|
||||
specific Java binary. To do this, you will need to locate your ``java.exe``/``java`` file
|
||||
in your **Java 17 or 11 install**.
|
||||
in your **Java 21 or 17 install**.
|
||||
|
||||
Alternatively, update your PATH settings so that Java 17 or 11 is the one used by ``java``. However,
|
||||
Alternatively, update your PATH settings so that Java 21 or 17 is the one used by ``java``. However,
|
||||
you should confirm that nothing other than Red is running on the machine that requires Java.
|
||||
|
||||
.. _queue_commands:
|
||||
@@ -550,7 +550,7 @@ uses OpenJDK 17 in the managed Lavalink configuration. It can be installed by ru
|
||||
|
||||
sudo apt install openjdk-17-jre-headless -y
|
||||
|
||||
Otherwise, Lavalink works well with most versions of Java 11, 13, 15, 16, 17, and 18. Azul
|
||||
Otherwise, Lavalink works well with most versions of Java 17 and higher. Azul
|
||||
Zulu builds are suggested, see `here <https://github.com/lavalink-devs/Lavalink/#requirements>`__ for more information.
|
||||
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
@@ -3651,7 +3651,7 @@ This command shouldn't need to be used most of the time,
|
||||
and is only useful if the host machine has conflicting Java versions.
|
||||
|
||||
If changing this make sure that the Java executable you set is supported by Audio.
|
||||
The current supported versions are Java 17 and 11.
|
||||
The current supported versions are Java 21 or 17.
|
||||
|
||||
**Arguments**
|
||||
|
||||
|
||||
@@ -324,7 +324,7 @@ Explains how to set the Twitch token.
|
||||
|
||||
To set the Twitch API tokens, follow these steps:
|
||||
|
||||
1. Go to this page: https://dev.twitch.tv/dashboard/apps.
|
||||
1. Go to this page: https://dev.twitch.tv/console/apps.
|
||||
|
||||
2. Click Register Your Application.
|
||||
|
||||
|
||||
@@ -44,7 +44,9 @@ extensions = [
|
||||
"sphinx.ext.napoleon",
|
||||
"sphinx.ext.doctest",
|
||||
"sphinxcontrib_trio",
|
||||
"sphinx_markdown_builder",
|
||||
"sphinx-prompt",
|
||||
"changelog_contributors",
|
||||
"deprecated_removed",
|
||||
"prompt_builder",
|
||||
]
|
||||
@@ -230,6 +232,14 @@ linkcheck_ignore = [r"https://java.com*", r"https://chocolatey.org*"]
|
||||
linkcheck_retries = 3
|
||||
|
||||
|
||||
# -- Options for markdown builder ----------------------------------------
|
||||
|
||||
markdown_http_base = os.environ.get(
|
||||
"READTHEDOCS_CANONICAL_URL", "https://docs.discord.red/en/stable"
|
||||
)
|
||||
markdown_uri_doc_suffix = ".html"
|
||||
|
||||
|
||||
# -- Options for extensions -----------------------------------------------
|
||||
|
||||
if dpy_version_info.releaselevel == "final":
|
||||
|
||||
@@ -7,9 +7,6 @@
|
||||
Bank
|
||||
====
|
||||
|
||||
Bank has now been separated from Economy for V3. New to bank is support for
|
||||
having a global bank.
|
||||
|
||||
***********
|
||||
Basic Usage
|
||||
***********
|
||||
|
||||
@@ -9,7 +9,7 @@ Bot
|
||||
Red
|
||||
^^^
|
||||
|
||||
.. autoclass:: Red
|
||||
.. autoclass:: Red()
|
||||
:members:
|
||||
:exclude-members: get_context, get_embed_color
|
||||
|
||||
|
||||
@@ -446,49 +446,6 @@ Of course, if we're less than responsible pet owners, there are consequences::
|
||||
"how poorly it was taken care of."
|
||||
)
|
||||
|
||||
|
||||
*************
|
||||
V2 Data Usage
|
||||
*************
|
||||
There has been much conversation on how to bring V2 data into V3 and, officially, we recommend that cog developers
|
||||
make use of the public interface in Config (using the categories as described in these docs) rather than simply
|
||||
copying and pasting your V2 data into V3. Using Config as recommended will result in a much better experience for
|
||||
you in the long run and will simplify cog creation and maintenance.
|
||||
|
||||
However.
|
||||
|
||||
We realize that many of our cog creators have expressed disinterest in writing converters for V2 to V3 style data.
|
||||
As a result we have opened up config to take standard V2 data and allow cog developers to manipulate it in V3 in
|
||||
much the same way they would in V2. The following examples will demonstrate how to accomplish this.
|
||||
|
||||
.. warning::
|
||||
|
||||
By following this method to use V2 data in V3 you may be at risk of data corruption if your cog is used on a bot
|
||||
with multiple shards. USE AT YOUR OWN RISK.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from redbot.core import Config, commands
|
||||
|
||||
|
||||
class ExampleCog(commands.Cog):
|
||||
def __init__(self):
|
||||
self.config = Config.get_conf(self, 1234567890)
|
||||
self.config.init_custom("V2", 1)
|
||||
self.data = {}
|
||||
|
||||
async def load_data(self):
|
||||
self.data = await self.config.custom("V2", "V2").all()
|
||||
|
||||
async def save_data(self):
|
||||
await self.config.custom("V2", "V2").set(self.data)
|
||||
|
||||
|
||||
async def setup(bot):
|
||||
cog = ExampleCog()
|
||||
await cog.load_data()
|
||||
await bot.add_cog(cog)
|
||||
|
||||
************************************
|
||||
Best practices and performance notes
|
||||
************************************
|
||||
|
||||
@@ -8,8 +8,6 @@
|
||||
Mod log
|
||||
=======
|
||||
|
||||
Mod log has now been separated from Mod for V3.
|
||||
|
||||
***********
|
||||
Basic Usage
|
||||
***********
|
||||
|
||||
@@ -9,7 +9,7 @@ RPC
|
||||
RPC support is included in Red on a `provisional <developer-guarantees-exclusions>` basis.
|
||||
Backwards incompatible changes (up to and including removal of the RPC) may occur if deemed necessary.
|
||||
|
||||
V3 comes default with an internal RPC server that may be used to remotely control the bot in various ways.
|
||||
Red comes default with an internal RPC server that may be used to remotely control the bot in various ways.
|
||||
Cogs must register functions to be exposed to RPC clients.
|
||||
Each of those functions must only take JSON serializable parameters and must return JSON serializable objects.
|
||||
|
||||
|
||||
@@ -3,11 +3,11 @@
|
||||
.. role:: python(code)
|
||||
:language: python
|
||||
|
||||
========================
|
||||
Creating cogs for Red V3
|
||||
========================
|
||||
=====================
|
||||
Creating cogs for Red
|
||||
=====================
|
||||
|
||||
This guide serves as a tutorial on creating cogs for Red V3.
|
||||
This guide serves as a tutorial on creating cogs for Red.
|
||||
It will cover the basics of setting up a package for your
|
||||
cog and the basics of setting up the file structure. We will
|
||||
also point you towards some further resources that may assist
|
||||
@@ -111,8 +111,8 @@ Make sure that both files are saved.
|
||||
Testing your cog
|
||||
----------------
|
||||
|
||||
To test your cog, you will need a running instance of V3.
|
||||
Assuming you installed V3 as outlined above, run :code:`redbot-setup`
|
||||
To test your cog, you will need a running instance of Red.
|
||||
Assuming you installed Red as outlined above, run :code:`redbot-setup`
|
||||
and provide the requested information. Once that's done, run Red
|
||||
by doing :code:`redbot <instance name> --dev` to start Red.
|
||||
Complete the initial setup by providing a valid token and setting a
|
||||
@@ -169,6 +169,4 @@ Becoming an Approved Cog Creator
|
||||
Additional resources
|
||||
--------------------
|
||||
|
||||
Be sure to check out the :doc:`/guide_migration` for some resources
|
||||
on developing cogs for V3. This will also cover differences between V2 and V3 for
|
||||
those who developed cogs for V2.
|
||||
If you've developed cogs for V2, you might find `incompatible_changes/v2_migration` document helpful.
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
.. Publishing cogs for V3
|
||||
|
||||
Publishing cogs for Red V3
|
||||
==========================
|
||||
Publishing cogs for Red
|
||||
=======================
|
||||
|
||||
Users of Red install 3rd-party cogs using Downloader cog. To make your cog available
|
||||
to install for others, you will have to create a git repository
|
||||
|
||||
+2
-2
@@ -86,7 +86,7 @@ Average Providers
|
||||
| `OVH <https://us.ovhcloud.com/vps/>`_ is a company focused on providing hosting
|
||||
and cloud services with locations in Europe, North America and Asia Pacific.
|
||||
|
||||
| `Time4VPS <https://www.time4vps.eu/>`_ is a Lithuanian VPS provider mainly focused
|
||||
| `Time4VPS <https://www.time4vps.com/>`_ is a Lithuanian VPS provider mainly focused
|
||||
on lower cost.
|
||||
|
||||
| `GalaxyGate <https://galaxygate.net/>`_ is a VPS and dedicated server provider
|
||||
@@ -113,7 +113,7 @@ Average Providers
|
||||
| `LowEndBox <http://lowendbox.com/>`_ is a website where hosting providers are
|
||||
discussed and curated, often with lower costs and less known providers.
|
||||
|
||||
| `AlphaVps <https://alphavps.com>`_ is a Bulgaria VPS and dedicated server provider
|
||||
| `AlphaVps <https://alphavps.com>`_ is a Bulgarian VPS and dedicated server provider
|
||||
with locations in Los Angeles, New York, England, Germany and Bulgaria.
|
||||
|
||||
--------------------
|
||||
|
||||
@@ -10,3 +10,4 @@ Backward incompatible changes
|
||||
|
||||
future
|
||||
3.5
|
||||
v2_migration
|
||||
|
||||
@@ -38,6 +38,49 @@ per-server/member/user/role/channel or global basis. Be sure to check
|
||||
out :doc:`/framework_config` for the API docs for Config as well as a
|
||||
tutorial on using Config.
|
||||
|
||||
*************
|
||||
V2 Data Usage
|
||||
*************
|
||||
|
||||
There has been much conversation on how to bring V2 data into V3 and, officially, we recommend that cog developers
|
||||
make use of the public interface in Config (using the categories as described in these docs) rather than simply
|
||||
copying and pasting your V2 data into V3. Using Config as recommended will result in a much better experience for
|
||||
you in the long run and will simplify cog creation and maintenance.
|
||||
|
||||
However.
|
||||
|
||||
We realize that many of our cog creators have expressed disinterest in writing converters for V2 to V3 style data.
|
||||
As a result we have opened up config to take standard V2 data and allow cog developers to manipulate it in V3 in
|
||||
much the same way they would in V2. The following examples will demonstrate how to accomplish this.
|
||||
|
||||
.. warning::
|
||||
|
||||
By following this method to use V2 data in V3 you may be at risk of data corruption if your cog is used on a bot
|
||||
with multiple shards. USE AT YOUR OWN RISK.
|
||||
|
||||
.. code-block:: python
|
||||
|
||||
from redbot.core import Config, commands
|
||||
|
||||
|
||||
class ExampleCog(commands.Cog):
|
||||
def __init__(self):
|
||||
self.config = Config.get_conf(self, 1234567890)
|
||||
self.config.init_custom("V2", 1)
|
||||
self.data = {}
|
||||
|
||||
async def load_data(self):
|
||||
self.data = await self.config.custom("V2", "V2").all()
|
||||
|
||||
async def save_data(self):
|
||||
await self.config.custom("V2", "V2").set(self.data)
|
||||
|
||||
|
||||
async def setup(bot):
|
||||
cog = ExampleCog()
|
||||
await cog.load_data()
|
||||
await bot.add_cog(cog)
|
||||
|
||||
----
|
||||
Bank
|
||||
----
|
||||
@@ -62,7 +62,6 @@ Welcome to Red - Discord Bot's documentation!
|
||||
:maxdepth: 2
|
||||
:caption: Red Development Framework Reference:
|
||||
|
||||
guide_migration
|
||||
guide_cog_creation
|
||||
guide_slash_and_interactions
|
||||
guide_publish_cogs
|
||||
|
||||
+48
-4
@@ -25,13 +25,57 @@ Updating differs depending on the version you currently have. Next sections will
|
||||
:depth: 1
|
||||
|
||||
|
||||
Red 3.5.0 or newer
|
||||
******************
|
||||
Red 3.5.25 or newer
|
||||
*******************
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
If you have Red 3.5.0 or newer, you can upgrade by following these steps:
|
||||
If you have Red 3.5.25 or newer, you can upgrade by following these steps:
|
||||
|
||||
#. Shut your bot down.
|
||||
#. Activate your venv with the following command:
|
||||
|
||||
.. prompt:: batch
|
||||
|
||||
"%userprofile%\redenv\Scripts\activate.bat"
|
||||
#. Update Red with this command:
|
||||
|
||||
.. prompt:: batch
|
||||
:prompts: (redenv) C:\\>
|
||||
|
||||
redbot-update
|
||||
#. Start your bot.
|
||||
|
||||
Linux & Mac
|
||||
-----------
|
||||
|
||||
If you have Red 3.5.25 or newer, you can upgrade by following these steps:
|
||||
|
||||
#. Shut your bot down.
|
||||
#. Activate your virtual environment.
|
||||
|
||||
If you used ``venv`` for your virtual environment, use:
|
||||
|
||||
.. prompt:: bash
|
||||
|
||||
source ~/redenv/bin/activate
|
||||
|
||||
#. Update Red with this command:
|
||||
|
||||
.. prompt:: bash
|
||||
:prompts: (redenv) $
|
||||
|
||||
redbot-update
|
||||
#. Start your bot.
|
||||
|
||||
Red 3.5.0-3.5.24
|
||||
****************
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
If you have a Red version between 3.5.0 and 3.5.24, you can upgrade by following these steps:
|
||||
|
||||
#. Shut your bot down.
|
||||
#. Activate your venv with the following command:
|
||||
@@ -55,7 +99,7 @@ If you have Red 3.5.0 or newer, you can upgrade by following these steps:
|
||||
Linux & Mac
|
||||
-----------
|
||||
|
||||
If you have Red 3.5.0 or newer, you can upgrade by following these steps:
|
||||
If you have a Red version between 3.5.0 and 3.5.24, you can upgrade by following these steps:
|
||||
|
||||
#. Shut your bot down.
|
||||
#. Activate your virtual environment.
|
||||
|
||||
@@ -59,7 +59,7 @@ Alma Linux 8 x86-64, aarch64 2029-05-31 (`securi
|
||||
Alma Linux 9 x86-64, aarch64 2032-05-31 (`security support <https://wiki.almalinux.org/release-notes/>`__)
|
||||
Amazon Linux 2023 x86-64, aarch64 2028-03-15 (`end-of-life <https://docs.aws.amazon.com/linux/al2023/release-notes/support-info-by-support-statement.html#support-info-by-support-statement-eol>`__)
|
||||
Arch Linux x86-64 forever (support is only provided for an up-to-date system)
|
||||
CentOS Stream 9 x86-64, aarch64 2027-05-31 (`expected EOL <https://centos.org/stream9/#timeline>`__)
|
||||
CentOS Stream 9 x86-64, aarch64 2027-05-31 (`Expected EOL <https://centos.org/stream9/#timeline>`__)
|
||||
Debian 12 Bookworm x86-64, aarch64, armv7l 2026-06-10 (`End of life <https://wiki.debian.org/DebianReleases#Production_Releases>`__)
|
||||
Fedora Linux 42 x86-64, aarch64 2026-05-13 (`End of Life <https://fedorapeople.org/groups/schedule/f-42/f-42-key-tasks.html>`__)
|
||||
Fedora Linux 43 x86-64, aarch64 2026-12-09 (`End of Life <https://fedorapeople.org/groups/schedule/f-43/f-43-key-tasks.html>`__)
|
||||
@@ -73,8 +73,8 @@ RHEL 8.10 x86-64, aarch64 2029-05-31 (`End of
|
||||
RHEL 9 (latest) x86-64, aarch64 2032-05-31 (`End of Maintenance Support <https://access.redhat.com/support/policy/updates/errata#Life_Cycle_Dates>`__)
|
||||
RHEL 9.4 x86-64, aarch64 2026-04-30 (`End of Extended Update Support <https://access.redhat.com/support/policy/updates/errata#Extended_Update_Support>`__)
|
||||
RHEL 9.6 x86-64, aarch64 2027-05-31 (`End of Extended Update Support <https://access.redhat.com/support/policy/updates/errata#Extended_Update_Support>`__)
|
||||
Rocky Linux 8 x86-64, aarch64 2029-05-31 (`(i) Planned EOL <https://rockylinux.org/download>`__)
|
||||
Rocky Linux 9 x86-64, aarch64 2032-05-31 (`(i) Planned EOL <https://rockylinux.org/download>`__)
|
||||
Rocky Linux 8 x86-64, aarch64 2029-05-31 (`End of Life <https://wiki.rockylinux.org/rocky/version/>`__)
|
||||
Rocky Linux 9 x86-64, aarch64 2032-05-31 (`End of Life <https://wiki.rockylinux.org/rocky/version/>`__)
|
||||
Ubuntu 22.04 LTS x86-64, aarch64 2027-06-30 (`End of Standard Support <https://wiki.ubuntu.com/Releases#Current>`__)
|
||||
Ubuntu 24.04 LTS x86-64, aarch64 2029-06-30 (`End of Standard Support <https://wiki.ubuntu.com/Releases#Current>`__)
|
||||
================================ ======================= ============================================================
|
||||
|
||||
+1
-15
@@ -289,19 +289,6 @@ class VersionInfo:
|
||||
return version("Red-DiscordBot")
|
||||
|
||||
|
||||
def _update_event_loop_policy():
|
||||
if _sys.implementation.name == "cpython":
|
||||
# Let's not force this dependency, uvloop is much faster on cpython
|
||||
try:
|
||||
import uvloop
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
import asyncio
|
||||
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
|
||||
|
||||
def _ensure_no_colorama():
|
||||
# a hacky way to ensure that nothing initialises colorama
|
||||
# if we're not running with legacy Windows command line mode
|
||||
@@ -334,12 +321,11 @@ def _early_init():
|
||||
# This function replaces logger so we preferably (though not necessarily) want that to happen
|
||||
# before importing anything that calls `logging.getLogger()`, i.e. `asyncio`.
|
||||
_update_logger_class()
|
||||
_update_event_loop_policy()
|
||||
_ensure_no_colorama()
|
||||
|
||||
|
||||
# This is bumped automatically by release workflow (`.github/workflows/scripts/bump_version.py`)
|
||||
_VERSION = "3.5.23"
|
||||
_VERSION = "3.5.25.dev1"
|
||||
|
||||
__version__, version_info = VersionInfo._get_version()
|
||||
|
||||
|
||||
+19
-37
@@ -25,9 +25,9 @@ import rich
|
||||
import redbot.logging
|
||||
from redbot import __version__
|
||||
from redbot.core.bot import Red, ExitCodes, _NoOwnerSet
|
||||
from redbot.core._cli import interactive_config, confirm, parse_cli_flags
|
||||
from redbot.core._cli import interactive_config, confirm, parse_cli_flags, new_event_loop
|
||||
from redbot.setup import get_data_dir, get_name, save_config
|
||||
from redbot.core import data_manager, _drivers
|
||||
from redbot.core import data_manager, _drivers, _downloader
|
||||
from redbot.core._debuginfo import DebugInfo
|
||||
from redbot.core._sharedlibdeprecation import SharedLibImportWarner
|
||||
|
||||
@@ -182,32 +182,10 @@ async def _edit_owner(red, owner, no_prompt):
|
||||
|
||||
def _edit_instance_name(old_name, new_name, confirm_overwrite, no_prompt):
|
||||
if new_name:
|
||||
name = new_name
|
||||
if name in _get_instance_names() and not confirm_overwrite:
|
||||
name = old_name
|
||||
print(
|
||||
"An instance with this name already exists.\n"
|
||||
"If you want to remove the existing instance and replace it with this one,"
|
||||
" run this command with --overwrite-existing-instance flag."
|
||||
)
|
||||
name = get_name(new_name, confirm_overwrite=confirm_overwrite)
|
||||
elif not no_prompt and confirm("Would you like to change the instance name?", default=False):
|
||||
name = get_name("")
|
||||
if name in _get_instance_names():
|
||||
print(
|
||||
"WARNING: An instance already exists with this name. "
|
||||
"Continuing will overwrite the existing instance config."
|
||||
)
|
||||
if not confirm(
|
||||
"Are you absolutely certain you want to continue with this instance name?",
|
||||
default=False,
|
||||
):
|
||||
print("Instance name will remain unchanged.")
|
||||
name = old_name
|
||||
else:
|
||||
print("Instance name updated.")
|
||||
else:
|
||||
print("Instance name updated.")
|
||||
print()
|
||||
name = get_name(confirm_overwrite=confirm_overwrite)
|
||||
print("Instance name updated.\n")
|
||||
else:
|
||||
name = old_name
|
||||
return name
|
||||
@@ -272,7 +250,7 @@ def early_exit_runner(
|
||||
"""
|
||||
This one exists to not log all the things like it's a full run of the bot.
|
||||
"""
|
||||
loop = asyncio.new_event_loop()
|
||||
loop = new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
if not cli_flags.instance_name:
|
||||
@@ -281,7 +259,7 @@ def early_exit_runner(
|
||||
return
|
||||
|
||||
data_manager.load_basic_configuration(cli_flags.instance_name)
|
||||
red = Red(cli_flags=cli_flags, description="Red V3", dm_help=None)
|
||||
red = Red(cli_flags=cli_flags)
|
||||
driver_cls = _drivers.get_driver_class()
|
||||
loop.run_until_complete(driver_cls.initialize(**data_manager.storage_details()))
|
||||
loop.run_until_complete(func(red, cli_flags))
|
||||
@@ -317,19 +295,23 @@ async def run_bot(red: Red, cli_flags: Namespace) -> None:
|
||||
redbot.logging.init_logging(
|
||||
level=cli_flags.logging_level,
|
||||
location=data_manager.core_data_path() / "logs",
|
||||
cli_flags=cli_flags,
|
||||
rich_logging=cli_flags.rich_logging,
|
||||
rich_tracebacks=cli_flags.rich_tracebacks,
|
||||
rich_traceback_extra_lines=cli_flags.rich_traceback_extra_lines,
|
||||
rich_traceback_show_locals=cli_flags.rich_traceback_show_locals,
|
||||
)
|
||||
|
||||
log.debug("====Basic Config====")
|
||||
log.debug("Data Path: %s", data_manager._base_data_path())
|
||||
log.debug("Storage Type: %s", data_manager.storage_type())
|
||||
|
||||
await _downloader._init(red)
|
||||
|
||||
# lib folder has to be in sys.path before trying to load any 3rd-party cog (GH-3061)
|
||||
# We might want to change handling of requirements in Downloader at later date
|
||||
LIB_PATH = data_manager.cog_data_path(raw_name="Downloader") / "lib"
|
||||
LIB_PATH.mkdir(parents=True, exist_ok=True)
|
||||
if str(LIB_PATH) not in sys.path:
|
||||
sys.path.append(str(LIB_PATH))
|
||||
lib_path = str(_downloader.LIB_PATH)
|
||||
if lib_path not in sys.path:
|
||||
sys.path.append(lib_path)
|
||||
|
||||
# "It's important to note that the global `working_set` object is initialized from
|
||||
# `sys.path` when `pkg_resources` is first imported, but is only updated if you do
|
||||
@@ -339,7 +321,7 @@ async def run_bot(red: Red, cli_flags: Namespace) -> None:
|
||||
# Source: https://setuptools.readthedocs.io/en/latest/pkg_resources.html#workingset-objects
|
||||
pkg_resources = sys.modules.get("pkg_resources")
|
||||
if pkg_resources is not None:
|
||||
pkg_resources.working_set.add_entry(str(LIB_PATH))
|
||||
pkg_resources.working_set.add_entry(lib_path)
|
||||
sys.meta_path.insert(0, SharedLibImportWarner())
|
||||
|
||||
if cli_flags.token:
|
||||
@@ -478,7 +460,7 @@ def main():
|
||||
early_exit_runner(cli_flags, edit_instance)
|
||||
return
|
||||
try:
|
||||
loop = asyncio.new_event_loop()
|
||||
loop = new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
|
||||
if cli_flags.no_instance:
|
||||
@@ -493,7 +475,7 @@ def main():
|
||||
|
||||
data_manager.load_basic_configuration(cli_flags.instance_name)
|
||||
|
||||
red = Red(cli_flags=cli_flags, description="Red V3", dm_help=None)
|
||||
red = Red(cli_flags=cli_flags)
|
||||
|
||||
if os.name != "nt":
|
||||
# None of this works on windows.
|
||||
|
||||
@@ -0,0 +1,237 @@
|
||||
import asyncio
|
||||
from pathlib import Path
|
||||
from typing import Any, Final, Optional, Tuple
|
||||
|
||||
import click
|
||||
from packaging.version import Version
|
||||
from python_discovery import PythonInfo
|
||||
|
||||
from redbot.core._cli import asyncio_run
|
||||
|
||||
from . import cmd, common, updater
|
||||
|
||||
|
||||
_CHECK_OTHER_PYTHON_INSTALLS_CMD_ARG_NAME: Final = "--check-other-python-installs"
|
||||
|
||||
|
||||
def _help_major_update_example() -> str:
|
||||
version = common.get_current_red_version().__replace__(dev=None, local=None)
|
||||
release = (version.major, version.minor + 1) + (0,) * (len(version.release) - 2)
|
||||
next_major_version = version.__replace__(release=release)
|
||||
return f"updating from Red {version} to Red {next_major_version}"
|
||||
|
||||
|
||||
def _help_minor_update_example() -> str:
|
||||
version = common.get_current_red_version().__replace__(dev=None, local=None)
|
||||
release = (version.major, version.minor, version.micro + 1) + (0,) * (len(version.release) - 3)
|
||||
next_minor_version = version.__replace__(release=release)
|
||||
return f"updating from Red {version} to Red {next_minor_version}"
|
||||
|
||||
|
||||
class _PythonInfoParamType(click.ParamType):
|
||||
name = "Python interpreter"
|
||||
|
||||
def convert(
|
||||
self, value: Any, param: Optional[click.Parameter], ctx: Optional[click.Context]
|
||||
) -> PythonInfo:
|
||||
if isinstance(value, PythonInfo):
|
||||
return value
|
||||
|
||||
try:
|
||||
return PythonInfo.from_exe(value)
|
||||
except RuntimeError:
|
||||
self.fail(f"{value!r} is not a valid Python executable.", param, ctx)
|
||||
|
||||
|
||||
@click.group(invoke_without_command=True)
|
||||
# command-specific options
|
||||
@click.option(
|
||||
"--include-instance",
|
||||
"included_instances",
|
||||
multiple=True,
|
||||
type=click.Choice(common.INSTANCE_LIST),
|
||||
help="The list of instances to backup and check cog compatibility for. If not specified,"
|
||||
" all instances that use the current virtual environment will be backed up and checked.",
|
||||
)
|
||||
@click.option(
|
||||
"--exclude-instance",
|
||||
"excluded_instances",
|
||||
multiple=True,
|
||||
type=click.Choice(common.INSTANCE_LIST),
|
||||
help="Exclude an instance from the list of instances to backup"
|
||||
" and check cog compatibility for.",
|
||||
)
|
||||
@click.option(
|
||||
"--backup-dir",
|
||||
default=None,
|
||||
type=click.Path(
|
||||
dir_okay=True, file_okay=False, resolve_path=True, writable=True, path_type=Path
|
||||
),
|
||||
help="The directory to place the backups of the virtual environment and instances.",
|
||||
)
|
||||
@click.option(
|
||||
"--no-backup",
|
||||
help="Do not make backups of the virtual environment and instances before update.",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--red-version",
|
||||
"--version",
|
||||
type=common.VersionParamType(),
|
||||
default=None,
|
||||
help="Version of Red to update to instead of the latest.",
|
||||
)
|
||||
@click.option(
|
||||
"--no-major-updates",
|
||||
help=f"Skip major updates. For example: {_help_major_update_example()} is a major update"
|
||||
f" but {_help_minor_update_example()} isn't.",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--no-full-changelog",
|
||||
help='Skip showing full changelog in a terminal user interface. The "Read before updating"'
|
||||
" sections will still be printed.",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--no-cog-compatibility-check",
|
||||
help="Skip performing cog compatibility check before the update.",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--new-python-interpreter",
|
||||
type=_PythonInfoParamType(),
|
||||
help="The new Python interpreter that should be used when creating a virtual environment"
|
||||
" for Red. This can either be a path to a Python executable or a name of a Python executable"
|
||||
" on the PATH.",
|
||||
)
|
||||
@click.option(
|
||||
"--update-cogs/--no-update-cogs",
|
||||
default=None,
|
||||
help="When this option is used, it determines whether the cogs should be updated after Red"
|
||||
" is updated. By default, you'll be asked, if you want to update.\n"
|
||||
"In non-interactive mode, cogs will be updated unless this option is used to override"
|
||||
" the default behavior.",
|
||||
)
|
||||
@click.option(
|
||||
# `pip install` having an option with the same name is coincidental,
|
||||
# this does not call `pip install` with the `--force-reinstall` option.
|
||||
# Not that there would be any point in doing so - we create a fresh virtual environment.
|
||||
"--force-reinstall",
|
||||
type=bool,
|
||||
is_flag=True,
|
||||
help="Force the update process to proceed even, if there is no new version detected."
|
||||
" This will essentially reinstall latest Red version into a fresh virtual environment. You can"
|
||||
" combine it with the --new-python-interpreter option to change Red's Python interpreter.",
|
||||
)
|
||||
@click.option(
|
||||
"--no-prompt",
|
||||
"interactive",
|
||||
type=bool,
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Don't ask for user input during the process (non-interactive mode).\n"
|
||||
"NOTE: If you want to use this to automate Red updates, consider specifying --no-major-update"
|
||||
" to avoid performing major updates without making an explicit decision to.\n"
|
||||
"When performing a major update where the current Python interpreter is no longer compatible,"
|
||||
" the --new-python-interpreter option has to be specified or the command will fail.",
|
||||
)
|
||||
# global options
|
||||
@click.option(
|
||||
cmd.arg_names.DEBUG,
|
||||
"--verbose",
|
||||
"-v",
|
||||
"logging_level",
|
||||
count=True,
|
||||
help=(
|
||||
"Increase the verbosity of the logs, each usage of this flag increases the verbosity"
|
||||
" level by 1."
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--check-other-venvs",
|
||||
_CHECK_OTHER_PYTHON_INSTALLS_CMD_ARG_NAME,
|
||||
"ignore_prefix",
|
||||
help="Check the compatibility of cogs for instances that are normally ran with"
|
||||
" a different Python installation and/or virtual environment than the current one.",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.pass_context
|
||||
def cli(
|
||||
ctx: click.Context,
|
||||
included_instances: Tuple[str, ...],
|
||||
excluded_instances: Tuple[str, ...],
|
||||
backup_dir: Optional[Path],
|
||||
no_backup: bool,
|
||||
red_version: Optional[Version],
|
||||
no_major_updates: bool,
|
||||
no_full_changelog: bool,
|
||||
no_cog_compatibility_check: bool,
|
||||
new_python_interpreter: Optional[PythonInfo],
|
||||
update_cogs: Optional[bool],
|
||||
force_reinstall: bool,
|
||||
interactive: bool,
|
||||
logging_level: int,
|
||||
ignore_prefix: bool,
|
||||
) -> None:
|
||||
common.ensure_supported_env()
|
||||
common.configure_logging(logging_level)
|
||||
|
||||
ctx.ensure_object(dict)
|
||||
ctx.obj["IGNORE_PREFIX"] = ignore_prefix
|
||||
|
||||
if ctx.invoked_subcommand is None:
|
||||
if included_instances:
|
||||
# de-duplicate with order intact
|
||||
instances = list(dict.fromkeys(included_instances))
|
||||
else:
|
||||
instances = list(common.INSTANCE_LIST)
|
||||
options = updater.UpdaterOptions(
|
||||
instances=instances,
|
||||
excluded_instances=set(excluded_instances),
|
||||
ignore_prefix=ignore_prefix,
|
||||
backup_dir=backup_dir,
|
||||
no_backup=no_backup,
|
||||
red_version=red_version,
|
||||
no_major_updates=no_major_updates,
|
||||
no_full_changelog=no_full_changelog,
|
||||
no_cog_compatibility_check=no_cog_compatibility_check,
|
||||
new_python_interpreter=new_python_interpreter,
|
||||
update_cogs=update_cogs,
|
||||
force_reinstall=force_reinstall,
|
||||
interactive=interactive,
|
||||
)
|
||||
app = updater.Updater(options)
|
||||
asyncio_run(app.run())
|
||||
# these should not be available to subcommands
|
||||
elif included_instances:
|
||||
raise click.NoSuchOption("--include-instance", ctx=ctx)
|
||||
elif excluded_instances:
|
||||
raise click.NoSuchOption("--exclude-instance", ctx=ctx)
|
||||
elif backup_dir is not None:
|
||||
raise click.NoSuchOption("--backup-dir", ctx=ctx)
|
||||
elif no_backup:
|
||||
raise click.NoSuchOption("--no-backup", ctx=ctx)
|
||||
elif red_version:
|
||||
raise click.NoSuchOption("--red-version", ctx=ctx)
|
||||
elif no_major_updates:
|
||||
raise click.NoSuchOption("--no-major-updates", ctx=ctx)
|
||||
elif no_cog_compatibility_check:
|
||||
raise click.NoSuchOption("--no-cog-compatibility-check", ctx=ctx)
|
||||
elif new_python_interpreter:
|
||||
raise click.NoSuchOption("--new-python-interpreter", ctx=ctx)
|
||||
elif update_cogs is True:
|
||||
raise click.NoSuchOption("--update-cogs", ctx=ctx)
|
||||
elif update_cogs is False:
|
||||
raise click.NoSuchOption("--no-update-cogs", ctx=ctx)
|
||||
elif not interactive:
|
||||
raise click.NoSuchOption("--no-prompt", ctx=ctx)
|
||||
elif force_reinstall:
|
||||
raise click.NoSuchOption("--force-reinstall", ctx=ctx)
|
||||
|
||||
|
||||
cli.add_command(cmd.cog_compatibility.check_cog_compatibility)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
@@ -0,0 +1,144 @@
|
||||
import dataclasses
|
||||
import datetime
|
||||
import functools
|
||||
import os
|
||||
import re
|
||||
from typing import Any, Dict, List
|
||||
|
||||
import aiohttp
|
||||
import yarl
|
||||
from packaging.version import Version
|
||||
from typing_extensions import Self
|
||||
|
||||
|
||||
_CHANGELOG_PATTERN = re.compile(
|
||||
r"\n<!--+ +RED-CHANGELOG-BEGIN: (?P<version>.+) +--+>\n"
|
||||
r"(?P<content>[\s\S]+?)"
|
||||
r"\n<!--+ +RED-CHANGELOG-END +--+>"
|
||||
)
|
||||
_RTD_CANONICAL_URL = os.getenv("_RED_RTD_CANONICAL_URL") or "https://docs.discord.red/en/stable/"
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class VersionChangelog:
|
||||
version: Version
|
||||
content: str
|
||||
_RELEASE_DATE_PATTERN = re.compile(
|
||||
r"^<!--+ +RED-CHANGELOG-RELEASE-DATE: (\d{4})-(\d{2})-(\d{2}) +--+>$",
|
||||
re.MULTILINE,
|
||||
)
|
||||
_CONTRIBUTORS_PATTERN = re.compile(
|
||||
r"^<!--+ +RED-CHANGELOG-CONTRIBUTORS: (?P<contributors>.+) +--+>$",
|
||||
re.MULTILINE,
|
||||
)
|
||||
_READ_BEFORE_UPDATING_SECTION_PATTERN = re.compile(
|
||||
r"\n<!--+ +RED-CHANGELOG-READ-BEFORE-UPDATE-BEGIN +--+>\n"
|
||||
r"(?P<content>[\s\S]+?)"
|
||||
r"\n<!--+ +RED-CHANGELOG-READ-BEFORE-UPDATE-END +--+>"
|
||||
)
|
||||
_USER_CHANGELOG_SECTION_PATTERN = re.compile(
|
||||
r"\n<!--+ +RED-CHANGELOG-USER-CHANGELOG-BEGIN +--+>\n"
|
||||
r"(?P<content>[\s\S]+?)"
|
||||
r"\n<!--+ +RED-CHANGELOG-USER-CHANGELOG-END +--+>"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
return cls(version=Version(data["version"]), content=data["content"])
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return {"version": str(self.version), "content": self.content}
|
||||
|
||||
@functools.cached_property
|
||||
def release_date(self) -> datetime.date:
|
||||
return datetime.date(*map(int, self._RELEASE_DATE_PATTERN.search(self.content).groups()))
|
||||
|
||||
@functools.cached_property
|
||||
def contributors(self) -> List[str]:
|
||||
match = self._CONTRIBUTORS_PATTERN.search(self.content)
|
||||
if match is None:
|
||||
return []
|
||||
return match["contributors"].split()
|
||||
|
||||
@functools.cached_property
|
||||
def read_before_updating_section(self) -> str:
|
||||
return "\n".join(
|
||||
match["content"].strip()
|
||||
for match in self._READ_BEFORE_UPDATING_SECTION_PATTERN.finditer(self.content)
|
||||
)
|
||||
|
||||
@functools.cached_property
|
||||
def user_changelog_section(self) -> str:
|
||||
return "\n".join(
|
||||
match["content"].strip()
|
||||
for match in self._USER_CHANGELOG_SECTION_PATTERN.finditer(self.content)
|
||||
)
|
||||
|
||||
|
||||
Changelogs = Dict[Version, VersionChangelog]
|
||||
|
||||
|
||||
def parse_changelogs(content: str) -> Changelogs:
|
||||
changelogs = {}
|
||||
for match in _CHANGELOG_PATTERN.finditer(content):
|
||||
changelog = VersionChangelog(Version(match["version"]), match["content"])
|
||||
changelogs[changelog.version] = changelog
|
||||
|
||||
return changelogs
|
||||
|
||||
|
||||
def render_markdown(changelogs: Changelogs, *, minimal: bool = False) -> str:
|
||||
if not changelogs:
|
||||
return ""
|
||||
|
||||
parts = ["# Read before updating"]
|
||||
for changelog in reversed(changelogs.values()):
|
||||
parts.append(f"## {changelog.version}")
|
||||
parts.append(changelog.read_before_updating_section)
|
||||
|
||||
contributors = sorted(
|
||||
{
|
||||
contributor
|
||||
for changelog in changelogs.values()
|
||||
for contributor in changelog.contributors
|
||||
}
|
||||
)
|
||||
if contributors:
|
||||
contributor_thanks = (
|
||||
" \n**The releases below were made with help from the following people:** \n"
|
||||
)
|
||||
contributor_thanks += ", ".join(
|
||||
f"[@{contributor}](https://github.com/sponsors/{contributor})"
|
||||
for contributor in contributors
|
||||
)
|
||||
contributor_thanks += " \n**Thank you** \N{HEAVY BLACK HEART}\N{VARIATION SELECTOR-16}"
|
||||
parts.append(contributor_thanks)
|
||||
|
||||
# show the header both at the top and the bottom
|
||||
parts.append(parts[0])
|
||||
|
||||
return "\n".join(parts)
|
||||
|
||||
|
||||
def get_changelogs_between(
|
||||
changelogs: Changelogs, newer_than: Version, not_newer_than: Version
|
||||
) -> Changelogs:
|
||||
return {
|
||||
changelog_version: changelog
|
||||
for changelog_version, changelog in changelogs.items()
|
||||
if newer_than < changelog_version <= not_newer_than
|
||||
}
|
||||
|
||||
|
||||
async def fetch_changelogs() -> Changelogs:
|
||||
"""
|
||||
Fetch the Markdown-formatted changelog from Red's docs site.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Dict[Version, VersionChangelog]
|
||||
A dict mapping versions to their changelogs. Sorted by version, newest first.
|
||||
"""
|
||||
async with aiohttp.ClientSession(raise_for_status=True) as session:
|
||||
async with session.get(yarl.URL(_RTD_CANONICAL_URL) / "_markdown/changelog.md") as resp:
|
||||
return parse_changelogs(await resp.text())
|
||||
@@ -0,0 +1,6 @@
|
||||
from . import arg_names, cog_compatibility
|
||||
|
||||
__all__ = (
|
||||
"arg_names",
|
||||
"cog_compatibility",
|
||||
)
|
||||
@@ -0,0 +1,6 @@
|
||||
from typing import Final
|
||||
|
||||
DEBUG: Final = "--debug"
|
||||
RED_VERSION: Final = "--red-version"
|
||||
PYTHON_VERSION: Final = "--python-version"
|
||||
CHECK_OTHER_PYTHON_INSTALLS: Final = "--check-other-python-installs"
|
||||
@@ -0,0 +1,215 @@
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from typing import Final, Optional, Tuple
|
||||
|
||||
import click
|
||||
from packaging.version import Version
|
||||
from rich.text import Text
|
||||
|
||||
from redbot._update import cog_compatibility_checker, common
|
||||
from redbot._update.cog_compatibility_checker import CompatibilitySummary
|
||||
from redbot.core import _drivers
|
||||
from redbot.core._cli import asyncio_run
|
||||
from redbot.core.utils._internal_utils import fetch_latest_red_version
|
||||
|
||||
from . import arg_names
|
||||
|
||||
|
||||
EXIT_INSTANCE_SITE_PREFIX_MISMATCH: Final = 4
|
||||
EXIT_INSTANCE_BACKEND_UNSUPPORTED: Final = 5
|
||||
CMD_NAME: Final = "check-cog-compatibility"
|
||||
_COMPATIBILITY_RESULTS_ENV_VAR = "_RED_UPDATE_COMPATIBILITY_RESULTS_FILE"
|
||||
|
||||
|
||||
@click.command(CMD_NAME)
|
||||
@click.argument(
|
||||
"instances",
|
||||
nargs=-1,
|
||||
type=click.Choice(common.INSTANCE_LIST),
|
||||
default=None,
|
||||
metavar="[INSTANCE_NAME]",
|
||||
)
|
||||
@click.option(
|
||||
arg_names.RED_VERSION,
|
||||
type=common.VersionParamType(),
|
||||
default=None,
|
||||
help="The Red version to check cog compatibility for."
|
||||
" If not provided, the information about latest available version will be fetched"
|
||||
" and the command will check whether installed cogs support that version.\n"
|
||||
"If this option is provided, --python-version also has to be provided.",
|
||||
)
|
||||
@click.option(
|
||||
arg_names.PYTHON_VERSION,
|
||||
type=common.VersionParamType(),
|
||||
default=None,
|
||||
help="The Python version to check cog compatibility for."
|
||||
" If not provided, the command will either use the current interpreter's version or,"
|
||||
" if that version is not compatible with the latest Red version, it will try to"
|
||||
" find the latest available CPython interpreter on the system and will check whether"
|
||||
" installed cogs support it.\n"
|
||||
"If this option is provided, --red-version also has to be provided.",
|
||||
)
|
||||
@click.pass_context
|
||||
def check_cog_compatibility(
|
||||
ctx: click.Context,
|
||||
instances: Tuple[str, ...],
|
||||
red_version: Optional[Version],
|
||||
python_version: Optional[Version],
|
||||
) -> None:
|
||||
"""
|
||||
Check if the installed cogs are compatible with the given version.
|
||||
"""
|
||||
if (red_version, python_version).count(None) == 1:
|
||||
raise click.BadParameter(
|
||||
"Either both --red-version and --python-version options"
|
||||
" have to be specified or neither.",
|
||||
param_hint=[arg_names.RED_VERSION, arg_names.PYTHON_VERSION],
|
||||
)
|
||||
|
||||
asyncio_run(
|
||||
_check_cog_compatibility_command_impl(
|
||||
red_version=red_version,
|
||||
python_version=python_version,
|
||||
instances=instances,
|
||||
ignore_prefix=ctx.obj["IGNORE_PREFIX"],
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def _check_cog_compatibility_command_impl(
|
||||
*,
|
||||
red_version: Optional[Version],
|
||||
python_version: Optional[Version],
|
||||
instances: Tuple[str, ...] = (),
|
||||
ignore_prefix: bool = False,
|
||||
) -> None:
|
||||
console = common.get_console()
|
||||
if red_version is None or python_version is None:
|
||||
with console.status("Checking latest version..."):
|
||||
latest = await fetch_latest_red_version()
|
||||
red_version = latest.version
|
||||
|
||||
python_version = Version(".".join(map(str, sys.version_info[:3])))
|
||||
if python_version not in latest.requires_python:
|
||||
interpreters = common.search_for_interpreters(latest.requires_python)
|
||||
_, python_version, _ = interpreters[0]
|
||||
|
||||
if len(instances) == 1:
|
||||
results_file = os.getenv(_COMPATIBILITY_RESULTS_ENV_VAR, "")
|
||||
try:
|
||||
results = await cog_compatibility_checker.check_instance(
|
||||
instances[0],
|
||||
latest_version=red_version,
|
||||
interpreter_version=python_version,
|
||||
ignore_prefix=ignore_prefix,
|
||||
)
|
||||
except _drivers.MissingExtraRequirements:
|
||||
if not results_file:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
Text(instances[0], style="bold"),
|
||||
" instance could not be checked as it uses a storage backend"
|
||||
" that is not supported by the current Red installation"
|
||||
" (some requirements are missing).",
|
||||
)
|
||||
raise SystemExit(EXIT_INSTANCE_BACKEND_UNSUPPORTED)
|
||||
except cog_compatibility_checker.InstanceSitePrefixMismatchError as exc:
|
||||
if not results_file:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
Text(exc.instance_name, style="bold"),
|
||||
" instance could not be checked as it is a part of"
|
||||
" a different Python installation and/or virtual environment.",
|
||||
)
|
||||
raise SystemExit(EXIT_INSTANCE_SITE_PREFIX_MISMATCH)
|
||||
if results_file:
|
||||
with open(results_file, "w", encoding="utf-8") as fp:
|
||||
json.dump(results.to_json_dict(), fp)
|
||||
return
|
||||
|
||||
if not instances:
|
||||
instances = tuple(common.INSTANCE_LIST)
|
||||
checked_instances = []
|
||||
for instance_name in instances:
|
||||
exit_code, _, _ = await call(
|
||||
instance_name,
|
||||
red_version=red_version,
|
||||
python_version=python_version,
|
||||
ignore_prefix=ignore_prefix,
|
||||
)
|
||||
if exit_code != EXIT_INSTANCE_SITE_PREFIX_MISMATCH:
|
||||
if exit_code:
|
||||
raise SystemExit(exit_code)
|
||||
checked_instances.append(instance_name)
|
||||
|
||||
if not checked_instances:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR, "There were no instances to check cog compatibility for."
|
||||
)
|
||||
raise SystemExit(1)
|
||||
|
||||
|
||||
async def call(
|
||||
instance_name: str,
|
||||
*,
|
||||
red_version: Version,
|
||||
python_version: Version,
|
||||
ignore_prefix: bool = False,
|
||||
return_results: bool = False,
|
||||
stdout: Optional[int] = None,
|
||||
) -> Tuple[int, Optional[str], Optional[CompatibilitySummary]]:
|
||||
debug_args = (arg_names.DEBUG,) * common.get_log_cli_level()
|
||||
args = [
|
||||
"-m",
|
||||
"redbot._update",
|
||||
*debug_args,
|
||||
CMD_NAME,
|
||||
instance_name,
|
||||
arg_names.RED_VERSION,
|
||||
str(red_version),
|
||||
arg_names.PYTHON_VERSION,
|
||||
str(python_version),
|
||||
]
|
||||
if ignore_prefix:
|
||||
args.append(arg_names.CHECK_OTHER_PYTHON_INSTALLS)
|
||||
env = os.environ.copy()
|
||||
|
||||
# terminal woes
|
||||
console = common.get_console()
|
||||
if console.is_terminal:
|
||||
env["TTY_COMPATIBLE"] = "1"
|
||||
# Rich only checks stdout for Windows console features:
|
||||
# https://github.com/Textualize/rich/blob/fc41075a3206d2a5fd846c6f41c4d2becab814fa/rich/_windows.py#L46
|
||||
env[common.INTERNAL_LEGACY_WINDOWS_ENV_VAR] = "1" if console.legacy_windows else "0"
|
||||
else:
|
||||
# Rich does not set legacy_windows correctly when is_terminal is False
|
||||
# https://github.com/Textualize/rich/issues/3647
|
||||
env[common.INTERNAL_LEGACY_WINDOWS_ENV_VAR] = "0"
|
||||
env["PYTHONIOENCODING"] = sys.stdout.encoding
|
||||
|
||||
results = None
|
||||
results_file = None
|
||||
if return_results:
|
||||
results_file = tempfile.NamedTemporaryFile(delete=False)
|
||||
try:
|
||||
if results_file is not None:
|
||||
results_file.close()
|
||||
env[_COMPATIBILITY_RESULTS_ENV_VAR] = str(results_file.name)
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(sys.executable, *args, env=env, stdout=stdout)
|
||||
stdout_data, _ = await proc.communicate()
|
||||
decoded_stdout = None
|
||||
if stdout_data is not None:
|
||||
decoded_stdout = stdout_data.decode()
|
||||
exit_code = await proc.wait()
|
||||
if not exit_code and results_file is not None:
|
||||
with open(results_file.name, encoding="utf-8") as fp:
|
||||
results = CompatibilitySummary.from_json_dict(json.load(fp))
|
||||
finally:
|
||||
if results_file is not None:
|
||||
os.remove(results_file.name)
|
||||
|
||||
return exit_code, decoded_stdout, results
|
||||
@@ -0,0 +1,551 @@
|
||||
import dataclasses
|
||||
import enum
|
||||
import functools
|
||||
import itertools
|
||||
import os
|
||||
import sys
|
||||
from typing import Any, Dict, Iterable, Iterator, List, Mapping, Optional, Set, Tuple
|
||||
|
||||
import rich
|
||||
from packaging.version import Version
|
||||
from rich.text import Text
|
||||
from typing_extensions import Self
|
||||
|
||||
from redbot.core import _downloader, _drivers, data_manager
|
||||
from redbot.core._cli import parse_cli_flags
|
||||
from redbot.core.bot import Red
|
||||
from redbot.core.utils._internal_utils import detailed_progress
|
||||
|
||||
from . import common
|
||||
|
||||
|
||||
class InstanceSitePrefixMismatchError(Exception):
|
||||
"""The instance's last known sys.prefix is different from the current one."""
|
||||
|
||||
def __init__(self, instance_name: str, last_known_prefix: Optional[str]) -> None:
|
||||
self.instance_name = instance_name
|
||||
self.last_known_prefix = last_known_prefix
|
||||
super().__init__(
|
||||
f"The last known sys.prefix of {instance_name!r} is different from"
|
||||
" current process's sys.prefix.",
|
||||
)
|
||||
|
||||
|
||||
class SimpleCompatibilityStatus(common.OrderedEnum):
|
||||
UNSUPPORTED = enum.auto()
|
||||
POTENTIALLY_SUPPORTED = enum.auto()
|
||||
EXPLICITLY_SUPPORTED = enum.auto()
|
||||
|
||||
|
||||
class CompatibilityStatus(enum.Enum):
|
||||
# unsupported is <100, 200)
|
||||
UNSUPPORTED_PYTHON_VERSION = 100
|
||||
UNSUPPORTED_BOT_VERSION = 101
|
||||
# potentially supported is <200, 300)
|
||||
POTENTIALLY_SUPPORTED = 200
|
||||
# explicitly supported is <300, 400)
|
||||
EXPLICITLY_SUPPORTED_NON_BREAKING = 300
|
||||
EXPLICITLY_SUPPORTED_MIN_BOT_VERSION = 301
|
||||
EXPLICITLY_SUPPORTED_MAX_BOT_VERSION = 302
|
||||
EXPLICITLY_SUPPORTED_READY_TAG = 303
|
||||
|
||||
@property
|
||||
def simple_status(self) -> SimpleCompatibilityStatus:
|
||||
if self.unsupported:
|
||||
return SimpleCompatibilityStatus.UNSUPPORTED
|
||||
if self.potentially_supported:
|
||||
return SimpleCompatibilityStatus.POTENTIALLY_SUPPORTED
|
||||
if self.explicitly_supported:
|
||||
return SimpleCompatibilityStatus.EXPLICITLY_SUPPORTED
|
||||
raise RuntimeError("unreachable")
|
||||
|
||||
@property
|
||||
def unsupported(self) -> bool:
|
||||
return 100 <= self.value < 200
|
||||
|
||||
@property
|
||||
def potentially_supported(self) -> bool:
|
||||
return 200 <= self.value < 300
|
||||
|
||||
@property
|
||||
def explicitly_supported(self) -> bool:
|
||||
return 300 <= self.value < 400
|
||||
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.simple_status >= other.simple_status
|
||||
return NotImplemented
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.simple_status > other.simple_status
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.simple_status <= other.simple_status
|
||||
return NotImplemented
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.simple_status < other.simple_status
|
||||
return NotImplemented
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class CogCompatibilityInfo:
|
||||
name: str
|
||||
repo_name: str
|
||||
min_bot_version: Version
|
||||
max_bot_version: Version
|
||||
min_python_version: Version
|
||||
tags: Tuple[str, ...]
|
||||
compatibility_status: CompatibilityStatus = CompatibilityStatus.POTENTIALLY_SUPPORTED
|
||||
|
||||
@classmethod
|
||||
def from_installable(cls, installable: _downloader.Installable) -> Self:
|
||||
return cls(
|
||||
name=installable.name,
|
||||
repo_name=installable.repo_name,
|
||||
min_bot_version=installable.min_bot_version,
|
||||
max_bot_version=installable.max_bot_version,
|
||||
min_python_version=installable.min_python_version,
|
||||
tags=installable.tags,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
return cls(
|
||||
name=data["name"],
|
||||
repo_name=data["repo_name"],
|
||||
min_bot_version=Version(data["min_bot_version"]),
|
||||
max_bot_version=Version(data["max_bot_version"]),
|
||||
min_python_version=Version(data["min_python_version"]),
|
||||
tags=tuple(data["tags"]),
|
||||
compatibility_status=CompatibilityStatus(data["compatibility_status"]),
|
||||
)
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"name": self.name,
|
||||
"repo_name": self.repo_name,
|
||||
"min_bot_version": str(self.min_bot_version),
|
||||
"max_bot_version": str(self.max_bot_version),
|
||||
"min_python_version": str(self.min_python_version),
|
||||
"tags": self.tags,
|
||||
"compatibility_status": self.compatibility_status.value,
|
||||
}
|
||||
|
||||
|
||||
CogSupportDict = Dict[str, CogCompatibilityInfo]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class CompatibilityResults(Mapping[str, CogCompatibilityInfo]):
|
||||
latest_version: Version
|
||||
interpreter_version: Version
|
||||
|
||||
explicitly_supported: CogSupportDict = dataclasses.field(default_factory=dict)
|
||||
potentially_supported: CogSupportDict = dataclasses.field(default_factory=dict)
|
||||
incompatible_python_version: CogSupportDict = dataclasses.field(default_factory=dict)
|
||||
incompatible_bot_version: CogSupportDict = dataclasses.field(default_factory=dict)
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
return cls(
|
||||
latest_version=Version(data["latest_version"]),
|
||||
interpreter_version=Version(data["interpreter_version"]),
|
||||
explicitly_supported={
|
||||
cog_name: CogCompatibilityInfo.from_json_dict(info_data)
|
||||
for cog_name, info_data in data["explicitly_supported"].items()
|
||||
},
|
||||
potentially_supported={
|
||||
cog_name: CogCompatibilityInfo.from_json_dict(info_data)
|
||||
for cog_name, info_data in data["potentially_supported"].items()
|
||||
},
|
||||
incompatible_python_version={
|
||||
cog_name: CogCompatibilityInfo.from_json_dict(info_data)
|
||||
for cog_name, info_data in data["incompatible_python_version"].items()
|
||||
},
|
||||
incompatible_bot_version={
|
||||
cog_name: CogCompatibilityInfo.from_json_dict(info_data)
|
||||
for cog_name, info_data in data["incompatible_bot_version"].items()
|
||||
},
|
||||
)
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"latest_version": str(self.latest_version),
|
||||
"interpreter_version": str(self.interpreter_version),
|
||||
"explicitly_supported": {
|
||||
cog_name: info.to_json_dict()
|
||||
for cog_name, info in self.explicitly_supported.items()
|
||||
},
|
||||
"potentially_supported": {
|
||||
cog_name: info.to_json_dict()
|
||||
for cog_name, info in self.potentially_supported.items()
|
||||
},
|
||||
"incompatible_python_version": {
|
||||
cog_name: info.to_json_dict()
|
||||
for cog_name, info in self.incompatible_python_version.items()
|
||||
},
|
||||
"incompatible_bot_version": {
|
||||
cog_name: info.to_json_dict()
|
||||
for cog_name, info in self.incompatible_bot_version.items()
|
||||
},
|
||||
}
|
||||
|
||||
def __getitem__(self, key: str) -> CogCompatibilityInfo:
|
||||
for data in (
|
||||
self.explicitly_supported,
|
||||
self.potentially_supported,
|
||||
self.incompatible_python_version,
|
||||
self.incompatible_bot_version,
|
||||
):
|
||||
try:
|
||||
return data[key]
|
||||
except KeyError:
|
||||
pass
|
||||
raise KeyError(key)
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return itertools.chain(
|
||||
self.explicitly_supported.keys(),
|
||||
self.potentially_supported.keys(),
|
||||
self.incompatible_python_version.keys(),
|
||||
self.incompatible_bot_version.keys(),
|
||||
)
|
||||
|
||||
def __len__(self) -> int:
|
||||
count = 0
|
||||
for data in (
|
||||
self.explicitly_supported,
|
||||
self.potentially_supported,
|
||||
self.incompatible_python_version,
|
||||
self.incompatible_bot_version,
|
||||
):
|
||||
count += len(data)
|
||||
return count
|
||||
|
||||
def __bool__(self) -> bool:
|
||||
return any(
|
||||
(
|
||||
self.explicitly_supported,
|
||||
self.potentially_supported,
|
||||
self.incompatible_python_version,
|
||||
self.incompatible_bot_version,
|
||||
)
|
||||
)
|
||||
|
||||
def print(self) -> None:
|
||||
major_version = Text(f"{self.latest_version.major}.{self.latest_version.minor}")
|
||||
if self.explicitly_supported:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_SUCCESS,
|
||||
"The following cogs are explicitly marked as supporting Red ",
|
||||
major_version,
|
||||
":\n",
|
||||
Text(", ").join(Text(cog, style="bold") for cog in self.explicitly_supported),
|
||||
)
|
||||
if self.potentially_supported:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_WARN,
|
||||
"The following cogs may support Red ",
|
||||
major_version,
|
||||
" but they haven't been explicitly marked as such:\n",
|
||||
Text(", ").join(Text(cog, style="bold") for cog in self.potentially_supported),
|
||||
)
|
||||
if self.incompatible_bot_version:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"The following cogs do not support Red ",
|
||||
Text(str(self.latest_version)),
|
||||
":\n",
|
||||
Text(", ").join(Text(cog, style="bold") for cog in self.incompatible_bot_version),
|
||||
)
|
||||
if self.incompatible_python_version:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"The following cogs do not support Python ",
|
||||
Text(str(self.interpreter_version)),
|
||||
":\n",
|
||||
Text(", ").join(
|
||||
Text(cog, style="bold") for cog in self.incompatible_python_version
|
||||
),
|
||||
)
|
||||
if not self.explicitly_supported and (
|
||||
self.potentially_supported
|
||||
or self.incompatible_bot_version
|
||||
or self.incompatible_python_version
|
||||
):
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"None of the checked cogs were explicitly marked as supporting Red ",
|
||||
major_version,
|
||||
".",
|
||||
)
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class CompatibilitySummary:
|
||||
instance_name: str
|
||||
before_update: CompatibilityResults
|
||||
after_update: CompatibilityResults
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
return cls(
|
||||
instance_name=data["instance_name"],
|
||||
before_update=CompatibilityResults.from_json_dict(data["before_update"]),
|
||||
after_update=CompatibilityResults.from_json_dict(data["after_update"]),
|
||||
)
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"instance_name": self.instance_name,
|
||||
"before_update": self.before_update.to_json_dict(),
|
||||
"after_update": self.after_update.to_json_dict(),
|
||||
}
|
||||
|
||||
|
||||
class CogCompatibilityChecker:
|
||||
def __init__(
|
||||
self,
|
||||
bot: Red,
|
||||
*,
|
||||
latest_version: Version,
|
||||
interpreter_version: Version,
|
||||
ignore_prefix: bool = False,
|
||||
) -> None:
|
||||
self.bot = bot
|
||||
self.latest_version = latest_version
|
||||
self.interpreter_version = interpreter_version
|
||||
self.ignore_prefix = ignore_prefix
|
||||
self._console = common.get_console(stderr=True)
|
||||
self._stdout_console = common.get_console()
|
||||
|
||||
@functools.cached_property
|
||||
def current_version(self) -> Version:
|
||||
return common.get_current_red_version()
|
||||
|
||||
async def check(self) -> CompatibilitySummary:
|
||||
instance_name = data_manager.instance_name()
|
||||
if not self.ignore_prefix:
|
||||
last_known_prefix = await self.bot._config.last_system_info.python_prefix()
|
||||
same_install = False
|
||||
if last_known_prefix is not None:
|
||||
try:
|
||||
same_install = os.path.samefile(last_known_prefix, sys.prefix)
|
||||
except OSError:
|
||||
pass
|
||||
if not same_install:
|
||||
raise InstanceSitePrefixMismatchError(instance_name, last_known_prefix)
|
||||
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"Started checking cog compatibility for the ",
|
||||
Text(instance_name, style="bold"),
|
||||
" instance.",
|
||||
console=self._console,
|
||||
)
|
||||
status = Text.assemble(
|
||||
"Checking compatibility of cogs installed on the ",
|
||||
(instance_name, "bold"),
|
||||
" instance...",
|
||||
)
|
||||
with self._console.status(status):
|
||||
await _downloader._init_without_bot(self.bot._cog_mgr)
|
||||
|
||||
await self._update_repos()
|
||||
|
||||
installed_cogs = await _downloader.installed_cogs()
|
||||
repo_unknown = []
|
||||
to_check = set()
|
||||
|
||||
for cog in installed_cogs:
|
||||
if cog.repo is None:
|
||||
repo_unknown.append(cog)
|
||||
else:
|
||||
to_check.add(cog)
|
||||
|
||||
with self._console.status("Checking available cog updates..."):
|
||||
update_check_result = await _downloader.check_cog_updates(
|
||||
cogs=to_check,
|
||||
update_repos=False,
|
||||
env=_downloader.Environment(
|
||||
red_version=self.latest_version, python_version=self.interpreter_version
|
||||
),
|
||||
)
|
||||
self._console.print("Available cog updates checked.")
|
||||
|
||||
summary = CompatibilitySummary(
|
||||
instance_name=instance_name,
|
||||
before_update=self._evaluate_before_update_compatibility(to_check),
|
||||
after_update=self._evaluate_after_update_compatibility(
|
||||
to_check, update_check_result
|
||||
),
|
||||
)
|
||||
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"Finished checking cog compatibility for the ",
|
||||
Text(instance_name, style="bold"),
|
||||
" instance.",
|
||||
console=self._console,
|
||||
)
|
||||
|
||||
self._stdout_console.print()
|
||||
|
||||
# Note that when a cog can be updated
|
||||
# and its up-to-date version does not support the Red version we're updating to,
|
||||
# we don't check whether currently installed version of the cog supports that Red version.
|
||||
# This is intentional - we want to allow cog creators to mark something incompatible
|
||||
# after the fact.
|
||||
summary.after_update.print()
|
||||
|
||||
return summary
|
||||
|
||||
async def _update_repos(self) -> None:
|
||||
with detailed_progress(unit="repos", console=self._console) as progress:
|
||||
task_id = progress.add_task(
|
||||
"Updating repos", total=len(_downloader._repo_manager.repos)
|
||||
)
|
||||
updated_count = 0
|
||||
already_up_to_date_count = 0
|
||||
failed_count = 0
|
||||
for repo in _downloader._repo_manager.repos:
|
||||
progress.update(task_id, description=f"Updating {repo.name!r} repo")
|
||||
try:
|
||||
old, new = await repo.update()
|
||||
except _downloader.errors.UpdateError:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_WARN,
|
||||
"Could not update repo ",
|
||||
Text(repo.name, style="bold"),
|
||||
", the results for cogs from it may be inaccurate.",
|
||||
console=self._console,
|
||||
)
|
||||
failed_count += 1
|
||||
else:
|
||||
if old != new:
|
||||
updated_count += 1
|
||||
self._console.print("Updated repo", Text(repo.name, style="bold"))
|
||||
else:
|
||||
already_up_to_date_count += 1
|
||||
self._console.print(
|
||||
"Repo", Text(repo.name, style="bold"), "is already up-to-date."
|
||||
)
|
||||
progress.advance(task_id)
|
||||
|
||||
self._stdout_console.print(
|
||||
f"Successfully updated {updated_count} repos, failed to update {failed_count} repos.\n"
|
||||
f"{already_up_to_date_count} repos were already up-to-date.",
|
||||
highlight=True,
|
||||
)
|
||||
|
||||
def _fill_compatibility_results(
|
||||
self, results: CompatibilityResults, cogs: Iterable[_downloader.Installable]
|
||||
) -> None:
|
||||
latest_version = self.latest_version
|
||||
interpreter_version = self.interpreter_version
|
||||
breaking_update = self.current_version.release[:2] != self.latest_version.release[:2]
|
||||
|
||||
for cog in cogs:
|
||||
info = CogCompatibilityInfo.from_installable(cog)
|
||||
if cog.min_python_version > interpreter_version:
|
||||
info.compatibility_status = CompatibilityStatus.UNSUPPORTED_PYTHON_VERSION
|
||||
results.incompatible_python_version[cog.name] = info
|
||||
elif cog.min_bot_version > latest_version or (
|
||||
# max version should be ignored when it's lower than min version
|
||||
cog.min_bot_version <= cog.max_bot_version
|
||||
and cog.max_bot_version < latest_version
|
||||
):
|
||||
info.compatibility_status = CompatibilityStatus.UNSUPPORTED_BOT_VERSION
|
||||
results.incompatible_bot_version[cog.name] = info
|
||||
elif not breaking_update:
|
||||
info.compatibility_status = CompatibilityStatus.EXPLICITLY_SUPPORTED_NON_BREAKING
|
||||
results.explicitly_supported[cog.name] = info
|
||||
elif latest_version.release[:2] == cog.min_bot_version.release[:2]:
|
||||
# If cog creator explicitly set min_bot_version to 3.x.y,
|
||||
# then 3.x is explicitly supported.
|
||||
info.compatibility_status = (
|
||||
CompatibilityStatus.EXPLICITLY_SUPPORTED_MIN_BOT_VERSION
|
||||
)
|
||||
results.explicitly_supported[cog.name] = info
|
||||
elif latest_version.release[:2] == cog.max_bot_version.release[:2]:
|
||||
# If cog creator explicitly set max_bot_version to 3.x.y,
|
||||
# then 3.x is explicitly supported.
|
||||
info.compatibility_status = (
|
||||
CompatibilityStatus.EXPLICITLY_SUPPORTED_MAX_BOT_VERSION
|
||||
)
|
||||
results.explicitly_supported[cog.name] = info
|
||||
elif f"red-{latest_version.major}-{latest_version.minor}-ready" in cog.tags:
|
||||
# If cog creator explicitly added a "red-3.x-ready" tag,
|
||||
# then 3.x is explicitly supported.
|
||||
# This is similar to the meaning of "Programming Language :: Python :: 3.x"
|
||||
# classifiers in Python packaging.
|
||||
info.compatibility_status = CompatibilityStatus.EXPLICITLY_SUPPORTED_READY_TAG
|
||||
results.explicitly_supported[cog.name] = info
|
||||
else:
|
||||
# If we don't have any explicit signals from the cog's metadata that
|
||||
# Red 3.x is supported, the cog is only *potentially* supported by that version.
|
||||
info.compatibility_status = CompatibilityStatus.POTENTIALLY_SUPPORTED
|
||||
results.potentially_supported[cog.name] = info
|
||||
|
||||
def _evaluate_before_update_compatibility(
|
||||
self, to_check: Iterable[_downloader.Installable]
|
||||
) -> CompatibilityResults:
|
||||
results = CompatibilityResults(
|
||||
latest_version=self.latest_version, interpreter_version=self.interpreter_version
|
||||
)
|
||||
|
||||
self._fill_compatibility_results(results, to_check)
|
||||
|
||||
return results
|
||||
|
||||
def _evaluate_after_update_compatibility(
|
||||
self,
|
||||
to_check: Iterable[_downloader.Installable],
|
||||
update_check_result: _downloader.CogUpdateCheckResult,
|
||||
) -> CompatibilityResults:
|
||||
not_updatable = set(to_check)
|
||||
results = CompatibilityResults(
|
||||
latest_version=self.latest_version, interpreter_version=self.interpreter_version
|
||||
)
|
||||
|
||||
not_updatable.difference_update(update_check_result.incompatible_python_version)
|
||||
not_updatable.difference_update(update_check_result.incompatible_bot_version)
|
||||
not_updatable.difference_update(update_check_result.updatable_cogs)
|
||||
|
||||
self._fill_compatibility_results(results, update_check_result.incompatible_python_version)
|
||||
self._fill_compatibility_results(results, update_check_result.incompatible_bot_version)
|
||||
self._fill_compatibility_results(results, update_check_result.updatable_cogs)
|
||||
|
||||
# not_updatable should now only have cogs that were not updateable. Those cogs
|
||||
# are filled based on metadata of the currently installed ("before update") version.
|
||||
self._fill_compatibility_results(results, not_updatable)
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def check_instance(
|
||||
instance: str,
|
||||
*,
|
||||
latest_version: Version,
|
||||
interpreter_version: Version,
|
||||
ignore_prefix: bool = False,
|
||||
) -> CompatibilitySummary:
|
||||
data_manager.load_basic_configuration(instance)
|
||||
red = Red(cli_flags=parse_cli_flags([instance]))
|
||||
driver_cls = _drivers.get_driver_class()
|
||||
await driver_cls.initialize(**data_manager.storage_details())
|
||||
try:
|
||||
checker = CogCompatibilityChecker(
|
||||
red,
|
||||
latest_version=latest_version,
|
||||
interpreter_version=interpreter_version,
|
||||
ignore_prefix=ignore_prefix,
|
||||
)
|
||||
return await checker.check()
|
||||
finally:
|
||||
await driver_cls.teardown()
|
||||
@@ -0,0 +1,221 @@
|
||||
import enum
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from operator import itemgetter
|
||||
from typing import Any, Final, Iterable, List, Literal, Optional, Tuple, Union
|
||||
|
||||
import click
|
||||
import rich
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import Version
|
||||
from python_discovery import PythonInfo, get_interpreter
|
||||
from rich.console import Console, RenderableType
|
||||
from rich.logging import RichHandler
|
||||
from rich.table import Table
|
||||
from rich.text import Text
|
||||
|
||||
from redbot import __version__
|
||||
from redbot.core.utils._internal_utils import (
|
||||
cli_level_to_log_level,
|
||||
get_installed_extras,
|
||||
log_level_to_cli_level,
|
||||
)
|
||||
from redbot.core import data_manager
|
||||
|
||||
_instance_data = data_manager.load_existing_config()
|
||||
INSTANCE_LIST: Final = () if _instance_data is None else tuple(_instance_data.keys())
|
||||
|
||||
|
||||
ICON_SUCCESS = "[green]:white_heavy_check_mark-emoji:[/]"
|
||||
ICON_INFO = "[blue]:information-emoji:[/]"
|
||||
ICON_WARN = "[yellow]:warning-emoji:[/]"
|
||||
ICON_ERROR = "[red]:cross_mark-emoji:[/]"
|
||||
|
||||
INTERNAL_LEGACY_WINDOWS_ENV_VAR = "_RED_UPDATE_INTERNAL_LEGACY_WINDOWS"
|
||||
INTERNAL_UPDATER_METADATA_ENV_VAR = "_RED_UPDATE_INTERNAL_UPDATER_METADATA"
|
||||
_STDERR_CONSOLE: Optional[Console] = None
|
||||
|
||||
RUNNER_DIR_ENV_VAR: Final = "REDBOT_UPDATE_RUNNER_DIR"
|
||||
RUNNER_WRAPPER_EXE_ENV_VAR: Final = "REDBOT_UPDATE_RUNNER_WRAPPER_EXE"
|
||||
|
||||
OLD_VENV_BACKUP_DIR_NAME: Final = "redbot-update-old-venv-backup"
|
||||
|
||||
|
||||
def get_red_dependency_specifier(version: Version, extras: Iterable[str]) -> str:
|
||||
specifier_template = (
|
||||
os.getenv("_RED_UPDATE_PRETEND_SPECIFIER_TEMPLATE")
|
||||
or "Red-DiscordBot {extras} {versionspec}"
|
||||
)
|
||||
joined_extras = ",".join(extras)
|
||||
return specifier_template.format(
|
||||
extras=f"[{joined_extras}]" if joined_extras else "",
|
||||
versionspec=f"=={version}",
|
||||
)
|
||||
|
||||
|
||||
def get_current_red_version() -> Version:
|
||||
return Version(os.getenv("_RED_UPDATE_PRETEND_VERSION") or __version__)
|
||||
|
||||
|
||||
def get_current_python_version() -> Version:
|
||||
return Version(".".join(map(str, sys.version_info[:3])))
|
||||
|
||||
|
||||
def prefix_column(prefix: RenderableType, *parts: Union[str, Text]) -> Table:
|
||||
output = Table.grid(padding=(0, 2))
|
||||
output.add_column()
|
||||
output.add_column()
|
||||
text = Text()
|
||||
for renderable in parts:
|
||||
if isinstance(renderable, str):
|
||||
text.append_text(Text.from_markup(renderable))
|
||||
else:
|
||||
text.append_text(renderable)
|
||||
output.add_row(prefix, text)
|
||||
return output
|
||||
|
||||
|
||||
def print_with_prefix_column(
|
||||
prefix: RenderableType, *parts: Union[str, Text], console: Optional[Console] = None
|
||||
) -> None:
|
||||
if console is None:
|
||||
console = rich.get_console()
|
||||
console.print(prefix_column(prefix, *parts))
|
||||
|
||||
|
||||
def _apply_legacy_windows_workaround() -> None:
|
||||
# Rich does not properly support printing to stderr, when stdout is redirected...
|
||||
# This monkeypatch should be enough to workaround this for our purposes.
|
||||
# https://github.com/Textualize/rich/issues/4071
|
||||
if sys.platform == "win32" and not sys.stdout.isatty():
|
||||
import rich._win32_console
|
||||
|
||||
rich._win32_console.STDOUT = -12
|
||||
|
||||
|
||||
def configure_rich() -> None:
|
||||
_apply_legacy_windows_workaround()
|
||||
value = os.getenv(INTERNAL_LEGACY_WINDOWS_ENV_VAR, "")
|
||||
legacy_windows = int(value) if value else None
|
||||
rich.reconfigure(highlight=False, legacy_windows=legacy_windows)
|
||||
global _STDERR_CONSOLE
|
||||
_STDERR_CONSOLE = Console(highlight=False, stderr=True, legacy_windows=legacy_windows)
|
||||
|
||||
|
||||
def get_console(stderr: bool = False) -> Console:
|
||||
global _STDERR_CONSOLE
|
||||
if _STDERR_CONSOLE is None:
|
||||
raise RuntimeError("_STDERR_CONSOLE is not set")
|
||||
return _STDERR_CONSOLE if stderr else rich.get_console()
|
||||
|
||||
|
||||
def configure_logging(logging_level: int) -> None:
|
||||
configure_rich()
|
||||
level = cli_level_to_log_level(logging_level)
|
||||
base_logger = logging.getLogger("red")
|
||||
base_logger.setLevel(level)
|
||||
base_logger.addHandler(RichHandler(console=get_console(stderr=True), show_path=False))
|
||||
|
||||
|
||||
def get_logging_level() -> int:
|
||||
return logging.getLogger("red").level
|
||||
|
||||
|
||||
def get_log_cli_level() -> int:
|
||||
return log_level_to_cli_level(logging.getLogger("red").level)
|
||||
|
||||
|
||||
def ensure_supported_env() -> None:
|
||||
if sys.prefix == sys.base_prefix:
|
||||
print("redbot-update cannot be used when Red is installed outside a virtual environment.")
|
||||
raise SystemExit(1)
|
||||
if not (
|
||||
os.environ.get(RUNNER_DIR_ENV_VAR, "") and os.environ.get(RUNNER_WRAPPER_EXE_ENV_VAR, "")
|
||||
):
|
||||
print("redbot-update was called incorrectly.")
|
||||
raise SystemExit(1)
|
||||
|
||||
|
||||
def _get_system_interpreters(
|
||||
requires_python: SpecifierSet,
|
||||
) -> List[Tuple[str, Version, PythonInfo]]:
|
||||
interpreters = {}
|
||||
|
||||
def _append_interpreter(info: PythonInfo) -> Literal[False]:
|
||||
version = Version(info.version_str)
|
||||
if version in requires_python:
|
||||
# realpath call is needed because get_interpreter lists
|
||||
# /usr/bin and /bin as separate even though they're the same path
|
||||
interpreters[os.path.realpath(info.executable)] = (version, info)
|
||||
return False
|
||||
|
||||
get_interpreter("cpython", predicate=_append_interpreter)
|
||||
|
||||
ret = [(key, *value) for key, value in interpreters.items()]
|
||||
ret.sort(key=itemgetter(1), reverse=True)
|
||||
return ret
|
||||
|
||||
|
||||
def search_for_interpreters(
|
||||
requires_python: SpecifierSet,
|
||||
) -> List[Tuple[str, Version, PythonInfo]]:
|
||||
console = get_console()
|
||||
with console.status("Searching for compatible Python interpreters on your system..."):
|
||||
interpreters = _get_system_interpreters(requires_python)
|
||||
|
||||
if not interpreters:
|
||||
url = "https://docs.discord.red/en/stable/install_guides/"
|
||||
console.print(
|
||||
f"{ICON_ERROR} Could not find a compatible Python interpreter!\n"
|
||||
'Please follow the steps from the "Installing the pre-requirements" section'
|
||||
" of the install guide for your system:"
|
||||
)
|
||||
console.print(Text(url, style=f"link {url}"))
|
||||
console.print("Once you finish installing the pre-requirements, run this command again.")
|
||||
raise SystemExit(1)
|
||||
|
||||
return interpreters
|
||||
|
||||
|
||||
class OrderedEnum(enum.Enum):
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.value >= other.value
|
||||
return NotImplemented
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.value > other.value
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.value <= other.value
|
||||
return NotImplemented
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
if self.__class__ is other.__class__:
|
||||
return self.value < other.value
|
||||
return NotImplemented
|
||||
|
||||
|
||||
class VersionParamType(click.ParamType):
|
||||
name = "version"
|
||||
|
||||
def convert(
|
||||
self, value: Any, param: Optional[click.Parameter], ctx: Optional[click.Context]
|
||||
) -> Version:
|
||||
if isinstance(value, Version):
|
||||
if len(value.release) < 2:
|
||||
self.fail(
|
||||
f"{value!r} needs to have at least 2 release components (major and minor).",
|
||||
param,
|
||||
ctx,
|
||||
)
|
||||
return value
|
||||
|
||||
try:
|
||||
return self.convert(Version(value), param, ctx)
|
||||
except ValueError:
|
||||
self.fail(f"{value!r} is not a valid version number", param, ctx)
|
||||
@@ -0,0 +1,453 @@
|
||||
import asyncio
|
||||
import os
|
||||
import subprocess
|
||||
import shutil
|
||||
import sys
|
||||
import sysconfig
|
||||
from pathlib import Path
|
||||
from typing import Tuple
|
||||
|
||||
import click
|
||||
from rich.markdown import Markdown
|
||||
from rich.panel import Panel
|
||||
from rich.prompt import Confirm
|
||||
from rich.text import Text
|
||||
|
||||
from redbot import __version__
|
||||
from redbot.core import _downloader, _drivers, data_manager
|
||||
from redbot.core._cli import asyncio_run, parse_cli_flags
|
||||
from redbot.core.bot import Red
|
||||
|
||||
from . import changelog, cmd, common, runner
|
||||
from .updater import UpdaterMetadata, get_updater_metadata
|
||||
|
||||
|
||||
FINISH_UPDATE_CMD_NAME = "finish-update"
|
||||
_UPDATE_COGS_CMD_NAME = "update-cogs"
|
||||
_UPDATE_REPOS_OPTION_NAME = "--update-repos"
|
||||
_EXIT_INSTANCE_SITE_PREFIX_MISMATCH = 4
|
||||
_EXIT_INSTANCE_BACKEND_UNSUPPORTED = 5
|
||||
|
||||
|
||||
@click.group(invoke_without_command=True)
|
||||
@click.option(cmd.arg_names.DEBUG, "logging_level", count=True)
|
||||
def cli(logging_level: int) -> None:
|
||||
common.ensure_supported_env()
|
||||
common.configure_logging(logging_level)
|
||||
|
||||
|
||||
@cli.command(_UPDATE_COGS_CMD_NAME)
|
||||
@click.argument("instance_name")
|
||||
@click.option(_UPDATE_REPOS_OPTION_NAME, default=False, is_flag=True)
|
||||
def update_cogs(instance_name: str, update_repos: bool) -> None:
|
||||
asyncio.run(_update_cogs(instance_name, update_repos))
|
||||
|
||||
|
||||
async def _update_cogs(instance: str, update_repos: bool) -> None:
|
||||
data_manager.load_basic_configuration(instance)
|
||||
red = Red(cli_flags=parse_cli_flags([instance]))
|
||||
driver_cls = _drivers.get_driver_class()
|
||||
await driver_cls.initialize(**data_manager.storage_details())
|
||||
try:
|
||||
await _run_cog_update(red, update_repos=update_repos)
|
||||
except _drivers.MissingExtraRequirements:
|
||||
raise SystemExit(_EXIT_INSTANCE_BACKEND_UNSUPPORTED)
|
||||
finally:
|
||||
await driver_cls.teardown()
|
||||
|
||||
|
||||
async def _run_cog_update(bot: Red, *, update_repos: bool) -> None:
|
||||
stdout_console = common.get_console()
|
||||
console = common.get_console(stderr=True)
|
||||
|
||||
instance_name = data_manager.instance_name()
|
||||
last_known_prefix = await bot._config.last_system_info.python_prefix()
|
||||
same_install = False
|
||||
if last_known_prefix is not None:
|
||||
try:
|
||||
same_install = os.path.samefile(last_known_prefix, sys.prefix)
|
||||
except OSError:
|
||||
pass
|
||||
if not same_install:
|
||||
raise SystemExit(_EXIT_INSTANCE_SITE_PREFIX_MISMATCH)
|
||||
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"Started updating cogs for the ",
|
||||
Text(instance_name, style="bold"),
|
||||
" instance.",
|
||||
console=console,
|
||||
)
|
||||
status = Text.assemble(
|
||||
"Update cogs installed on the ", (instance_name, "bold"), " instance..."
|
||||
)
|
||||
with console.status(status):
|
||||
await _downloader._init_without_bot(bot._cog_mgr)
|
||||
result = await _downloader.update_cogs(update_repos=update_repos)
|
||||
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"Finished updating cogs for the ",
|
||||
Text(instance_name, style="bold"),
|
||||
" instance.",
|
||||
console=console,
|
||||
)
|
||||
|
||||
if not result.checked_cogs:
|
||||
stdout_console.print("There were no cogs to check.")
|
||||
return
|
||||
if not result.updates_available:
|
||||
stdout_console.print("All installed cogs are already up to date.")
|
||||
return
|
||||
|
||||
current_cog_versions_map = {cog.name: cog for cog in result.checked_cogs}
|
||||
if result.failed_reqs:
|
||||
console.print(
|
||||
"Failed to install requirements:",
|
||||
Text(", ").join(Text(req, style="bold") for req in result.failed_reqs),
|
||||
)
|
||||
return
|
||||
|
||||
message = Text("Cog update completed successfully.")
|
||||
|
||||
if result.updated_cogs:
|
||||
cogs_with_changed_eud_statement = set()
|
||||
for cog in result.updated_cogs:
|
||||
current_eud_statement = current_cog_versions_map[cog.name].end_user_data_statement
|
||||
if current_eud_statement != cog.end_user_data_statement:
|
||||
cogs_with_changed_eud_statement.add(cog.name)
|
||||
message.append("\nUpdated: ")
|
||||
message.append_text(
|
||||
Text(", ").join(Text(cog.name, style="bold") for cog in result.updated_cogs)
|
||||
)
|
||||
if cogs_with_changed_eud_statement:
|
||||
message.append("\nEnd user data statements of these cogs have changed: ")
|
||||
message.append_text(
|
||||
Text(", ").join(
|
||||
Text(cog_name, style="bold") for cog_name in cogs_with_changed_eud_statement
|
||||
)
|
||||
)
|
||||
message.append("\nYou can use ")
|
||||
message.append("[p]cog info <repo> <cog>", style="bold")
|
||||
message.append(" to see the updated statements.\n")
|
||||
# If the bot has any slash commands enabled, warn them to sync
|
||||
enabled_slash = await bot.list_enabled_app_commands()
|
||||
if any(enabled_slash.values()):
|
||||
message.append("\nYou may need to resync your slash commands with ")
|
||||
message.append("[p]slash sync")
|
||||
message.append(".")
|
||||
if result.failed_cogs:
|
||||
message.append("\nFailed to update cogs: ")
|
||||
message.append_text(
|
||||
Text(", ").join(Text(cog.name, style="bold") for cog in result.failed_cogs)
|
||||
)
|
||||
if not result.outdated_cogs:
|
||||
message = Text("No cogs were updated.")
|
||||
if result.failed_libs:
|
||||
message.append("\nFailed to install shared libraries: ")
|
||||
message.append_text(
|
||||
Text(", ").join(Text(lib.name, style="bold") for lib in result.failed_libs)
|
||||
)
|
||||
|
||||
stdout_console.print(message)
|
||||
|
||||
|
||||
@cli.command(FINISH_UPDATE_CMD_NAME)
|
||||
def finish_update() -> None:
|
||||
"""
|
||||
Entrypoint for finishing up the update that runs with the new version of Red.
|
||||
"""
|
||||
asyncio_run(_finish_update())
|
||||
|
||||
|
||||
async def _finish_update() -> None:
|
||||
assert runner.get_request_output().request_type is runner.RequestType.exec
|
||||
updater_metadata = get_updater_metadata()
|
||||
console = common.get_console()
|
||||
console.print()
|
||||
|
||||
if updater_metadata.options.interactive and not updater_metadata.options.update_cogs:
|
||||
msg = Text("It is highly recommended to update 3rd-party cogs after updating Red")
|
||||
if updater_metadata.breaking_update:
|
||||
msg.append(", especially after a major update")
|
||||
msg.append(".")
|
||||
console.print(msg)
|
||||
|
||||
cog_compatibility = updater_metadata.cog_compatibility
|
||||
if cog_compatibility is not None:
|
||||
unsupported_cogs = set()
|
||||
cogs_with_improved_compatibility = set()
|
||||
unaffected_cogs = set()
|
||||
for summary in cog_compatibility.checked.values():
|
||||
for before in summary.before_update.values():
|
||||
cog_name = before.name
|
||||
after = summary.after_update[cog_name]
|
||||
if after.compatibility_status.unsupported:
|
||||
unsupported_cogs.add(cog_name)
|
||||
elif after.compatibility_status.explicitly_supported:
|
||||
if before.compatibility_status.explicitly_supported:
|
||||
unaffected_cogs.add(cog_name)
|
||||
else:
|
||||
cogs_with_improved_compatibility.add(cog_name)
|
||||
elif before.compatibility_status.unsupported:
|
||||
cogs_with_improved_compatibility.add(cog_name)
|
||||
else:
|
||||
unaffected_cogs.add(cog_name)
|
||||
|
||||
if cogs_with_improved_compatibility:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"Updating will improve compatibility of ",
|
||||
Text(str(len(cogs_with_improved_compatibility)), style="bold"),
|
||||
" cogs.",
|
||||
)
|
||||
if unsupported_cogs:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_WARN,
|
||||
Text(str(len(unsupported_cogs)), style="bold"),
|
||||
" cogs will remain unsupported after updating:\n",
|
||||
Text(", ").join(
|
||||
Text(cog_name, style="bold") for cog_name in sorted(unsupported_cogs)
|
||||
),
|
||||
)
|
||||
|
||||
update_cogs = updater_metadata.options.update_cogs
|
||||
if update_cogs is None:
|
||||
if updater_metadata.options.interactive:
|
||||
update_cogs = Confirm.ask("Do you want to update all your cogs?", default=True)
|
||||
else:
|
||||
update_cogs = True
|
||||
if update_cogs:
|
||||
await _handle_cog_updates(updater_metadata)
|
||||
|
||||
with console.status("Cleaning up..."):
|
||||
backup_dir = Path(sys.prefix) / common.OLD_VENV_BACKUP_DIR_NAME
|
||||
shutil.rmtree(backup_dir)
|
||||
|
||||
changelog_markdown = changelog.render_markdown(updater_metadata.changelogs)
|
||||
if changelog_markdown:
|
||||
console.print(Panel(Markdown(changelog_markdown)))
|
||||
|
||||
console.print()
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_SUCCESS,
|
||||
"Update to Red ",
|
||||
Text(__version__, style="bold"),
|
||||
" has been finished!",
|
||||
)
|
||||
|
||||
if changelog_markdown:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
'Remember to follow instructions from the "Read before updating" section,'
|
||||
" if any were provided.",
|
||||
)
|
||||
|
||||
if updater_metadata.backup_dir:
|
||||
additional_text = ""
|
||||
if not updater_metadata.options.backup_dir:
|
||||
additional_text = (
|
||||
"\nNote that this is a temporary directory and may eventually get auto-removed"
|
||||
" by your system."
|
||||
)
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"If needed, you can find the backups of the virtual environment"
|
||||
" and the instances at: ",
|
||||
Text(str(updater_metadata.backup_dir), style="bold"),
|
||||
additional_text,
|
||||
)
|
||||
|
||||
|
||||
async def _handle_cog_updates(updater_metadata: UpdaterMetadata) -> None:
|
||||
cog_compatibility = updater_metadata.cog_compatibility
|
||||
console = common.get_console()
|
||||
|
||||
instances = (
|
||||
list(cog_compatibility.checked)
|
||||
if cog_compatibility is not None
|
||||
else updater_metadata.options.instances
|
||||
)
|
||||
checked_instances = {}
|
||||
failed_instances = []
|
||||
unsupported_storage_instances = []
|
||||
for instance_name in instances:
|
||||
if instance_name in updater_metadata.options.excluded_instances:
|
||||
continue
|
||||
exit_code, stdout = await _call_cog_update(
|
||||
instance_name, update_repos=cog_compatibility is None
|
||||
)
|
||||
if exit_code == _EXIT_INSTANCE_BACKEND_UNSUPPORTED:
|
||||
unsupported_storage_instances.append(instance_name)
|
||||
elif exit_code == _EXIT_INSTANCE_SITE_PREFIX_MISMATCH:
|
||||
pass
|
||||
elif exit_code:
|
||||
failed_instances.append(instance_name)
|
||||
print(stdout, end="")
|
||||
Text.assemble(
|
||||
"\N{UPWARDS ARROW} " * 3, "Failure for ", (instance_name, "bold"), " instance"
|
||||
)
|
||||
console.rule(
|
||||
Text.assemble(
|
||||
"\N{UPWARDS ARROW} " * 3,
|
||||
"Failure for ",
|
||||
(instance_name, "bold"),
|
||||
" instance above",
|
||||
" \N{UPWARDS ARROW}" * 3,
|
||||
),
|
||||
style="red",
|
||||
)
|
||||
else:
|
||||
checked_instances[instance_name] = stdout
|
||||
if stdout:
|
||||
console.print()
|
||||
|
||||
if checked_instances:
|
||||
for instance_name, stdout in checked_instances.items():
|
||||
console.rule(Text(instance_name, style="bold"))
|
||||
print(stdout, end="")
|
||||
console.rule()
|
||||
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"Finished updating cogs.",
|
||||
"\nThe results for each instance are shown above." if checked_instances else "",
|
||||
)
|
||||
if failed_instances:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"Failure occurred while trying to perform update for following instances: ",
|
||||
Text(", ").join(
|
||||
Text(instance_name, style="bold") for instance_name in failed_instances
|
||||
),
|
||||
"\nScroll above to find the errors.",
|
||||
)
|
||||
if unsupported_storage_instances:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"The following instances were skipped as they use a storage backend that is"
|
||||
" not supported by the current Red installation (some requirements are missing): ",
|
||||
Text(", ").join(
|
||||
Text(instance_name, style="bold")
|
||||
for instance_name in unsupported_storage_instances
|
||||
),
|
||||
)
|
||||
if not checked_instances:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"There were no",
|
||||
(" other" if failed_instances or unsupported_storage_instances else ""),
|
||||
" instances to update cogs for.",
|
||||
)
|
||||
|
||||
|
||||
async def _call_cog_update(instance_name: str, *, update_repos: bool) -> Tuple[int, str]:
|
||||
debug_args = (cmd.arg_names.DEBUG,) * common.get_log_cli_level()
|
||||
args = [
|
||||
"-m",
|
||||
"redbot._update.internal",
|
||||
*debug_args,
|
||||
_UPDATE_COGS_CMD_NAME,
|
||||
instance_name,
|
||||
]
|
||||
if update_repos:
|
||||
args.append(_UPDATE_REPOS_OPTION_NAME)
|
||||
env = os.environ.copy()
|
||||
|
||||
# terminal woes
|
||||
console = common.get_console()
|
||||
if console.is_terminal:
|
||||
env["TTY_COMPATIBLE"] = "1"
|
||||
# Rich only checks stdout for Windows console features:
|
||||
# https://github.com/Textualize/rich/blob/fc41075a3206d2a5fd846c6f41c4d2becab814fa/rich/_windows.py#L46
|
||||
env[common.INTERNAL_LEGACY_WINDOWS_ENV_VAR] = "1" if console.legacy_windows else "0"
|
||||
else:
|
||||
# Rich does not set legacy_windows correctly when is_terminal is False
|
||||
# https://github.com/Textualize/rich/issues/3647
|
||||
env[common.INTERNAL_LEGACY_WINDOWS_ENV_VAR] = "0"
|
||||
env["PYTHONIOENCODING"] = sys.stdout.encoding
|
||||
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
sys.executable, *args, env=env, stdout=asyncio.subprocess.PIPE
|
||||
)
|
||||
stdout_data, _ = await proc.communicate()
|
||||
decoded_stdout = stdout_data.decode()
|
||||
exit_code = await proc.wait()
|
||||
|
||||
return exit_code, decoded_stdout
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("base_executable")
|
||||
@click.argument("venv_dir", type=click.Path(path_type=Path))
|
||||
@click.argument("scripts_path", type=click.Path(path_type=Path))
|
||||
@click.argument("dependency_specifier")
|
||||
def reinstall(
|
||||
base_executable: str, venv_dir: Path, scripts_path: Path, dependency_specifier: str
|
||||
) -> None:
|
||||
assert runner.get_request_output().request_type is runner.RequestType.exec
|
||||
|
||||
console = common.get_console()
|
||||
with console.status("Creating a new virtual environment..."):
|
||||
subprocess.check_call((base_executable, "-m", "venv", str(venv_dir)))
|
||||
console.print("Created a new virtual environment.")
|
||||
executable = str(scripts_path / f"python{sysconfig.get_config_var('EXE')}")
|
||||
|
||||
common.print_with_prefix_column(common.ICON_INFO, "Starting the install process...")
|
||||
try:
|
||||
subprocess.check_call((executable, "-m", "pip", "install", "-U", "pip"))
|
||||
subprocess.check_call((executable, "-m", "pip", "install", dependency_specifier))
|
||||
except subprocess.CalledProcessError:
|
||||
console.print()
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"Failed to install new version of Red.",
|
||||
)
|
||||
status = console.status("Attempting to restore old virtual environment...")
|
||||
status.start()
|
||||
try:
|
||||
_remove_new_venv(venv_dir)
|
||||
except Exception:
|
||||
status.stop()
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR, "Failed to remove newly created virtual environment."
|
||||
)
|
||||
raise SystemExit(1)
|
||||
try:
|
||||
_restore_old_venv(venv_dir)
|
||||
except Exception:
|
||||
status.stop()
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR, "Failed to restore old virtual environment."
|
||||
)
|
||||
else:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO, "The old virtual environment has been restored."
|
||||
)
|
||||
raise SystemExit(1)
|
||||
|
||||
# NOTE: this will run with the updated version of Red
|
||||
runner.make_exec_request(executable, "finish-update")
|
||||
|
||||
|
||||
def _remove_new_venv(venv_dir: Path) -> None:
|
||||
backup_dir = venv_dir / common.OLD_VENV_BACKUP_DIR_NAME
|
||||
wrapper_exe = runner.get_wrapper_executable()
|
||||
|
||||
for path in venv_dir.iterdir():
|
||||
if path == backup_dir or path == wrapper_exe:
|
||||
continue
|
||||
if path.is_dir():
|
||||
shutil.rmtree(path)
|
||||
else:
|
||||
path.unlink()
|
||||
|
||||
|
||||
def _restore_old_venv(venv_dir: Path) -> None:
|
||||
backup_dir = venv_dir / common.OLD_VENV_BACKUP_DIR_NAME
|
||||
for path in backup_dir.iterdir():
|
||||
path.rename(venv_dir / path.name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
cli()
|
||||
@@ -0,0 +1,119 @@
|
||||
import enum
|
||||
import dataclasses
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, ClassVar, Dict, Iterable, NoReturn, Optional, Tuple, Union
|
||||
|
||||
from . import cmd, common
|
||||
|
||||
_RUNNER_DIR = Path(os.environ.get(common.RUNNER_DIR_ENV_VAR, ""))
|
||||
|
||||
|
||||
class RequestType(enum.Enum):
|
||||
exec = "exec"
|
||||
spawn_command = "spawn_command"
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class RequestInput:
|
||||
request_type: ClassVar[RequestType]
|
||||
request_new_python_exe: str
|
||||
request_new_start_args: Tuple[str, ...]
|
||||
request_set_env_vars: Dict[str, Optional[str]]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class RequestOutput:
|
||||
request_type: RequestType
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ExecRequestInput(RequestInput):
|
||||
request_type: ClassVar = RequestType.exec
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class ExecRequestOutput(RequestOutput):
|
||||
pass
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class SpawnProcessRequestInput(RequestInput):
|
||||
request_type: ClassVar = RequestType.spawn_command
|
||||
command: str
|
||||
args: Tuple[str, ...]
|
||||
env: Optional[Dict[str, str]]
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class SpawnProcessRequestOutput(RequestOutput):
|
||||
exit_code: int
|
||||
exited: bool
|
||||
pid: int
|
||||
sys: Any
|
||||
sys_usage: Dict[str, Any]
|
||||
system_time: int
|
||||
user_time: int
|
||||
|
||||
|
||||
def make_request(request: RequestInput) -> NoReturn:
|
||||
with open(_RUNNER_DIR / "request_input.json", "w", encoding="utf-8") as fp:
|
||||
data = dataclasses.asdict(request)
|
||||
data["request_type"] = request.request_type.value
|
||||
json.dump(data, fp)
|
||||
raise SystemExit(3)
|
||||
|
||||
|
||||
def get_request_output() -> Union[ExecRequestOutput, SpawnProcessRequestOutput]:
|
||||
with open(_RUNNER_DIR / "request_output.json", encoding="utf-8") as fp:
|
||||
data = json.load(fp)
|
||||
request_type = RequestType(data.pop("request_type"))
|
||||
if request_type == RequestType.exec:
|
||||
return ExecRequestOutput(request_type=request_type)
|
||||
elif request_type == RequestType.spawn_command:
|
||||
return SpawnProcessRequestOutput(request_type=request_type, **data)
|
||||
raise RuntimeError("unreachable code")
|
||||
|
||||
|
||||
def make_spawn_process_request(
|
||||
command: str,
|
||||
*args: str,
|
||||
env: Optional[Dict[str, str]] = None,
|
||||
new_start_args: Iterable[str],
|
||||
new_python_exe: str = sys.executable,
|
||||
set_env_vars: Optional[Dict[str, Optional[str]]] = None,
|
||||
) -> NoReturn:
|
||||
if set_env_vars is None:
|
||||
set_env_vars = {}
|
||||
debug_args = (cmd.arg_names.DEBUG,) * common.get_log_cli_level()
|
||||
request = SpawnProcessRequestInput(
|
||||
request_new_python_exe=new_python_exe,
|
||||
request_new_start_args=("-m", "redbot._update.internal", *debug_args, *new_start_args),
|
||||
request_set_env_vars=set_env_vars,
|
||||
command=command,
|
||||
args=args,
|
||||
env=env,
|
||||
)
|
||||
make_request(request)
|
||||
|
||||
|
||||
def make_exec_request(
|
||||
new_python_exe: str,
|
||||
*new_start_args: str,
|
||||
set_env_vars: Optional[Dict[str, Optional[str]]] = None,
|
||||
) -> NoReturn:
|
||||
if set_env_vars is None:
|
||||
set_env_vars = {}
|
||||
debug_args = (cmd.arg_names.DEBUG,) * common.get_log_cli_level()
|
||||
request = ExecRequestInput(
|
||||
request_new_python_exe=new_python_exe,
|
||||
request_new_start_args=("-m", "redbot._update.internal", *debug_args, *new_start_args),
|
||||
request_set_env_vars=set_env_vars,
|
||||
)
|
||||
make_request(request)
|
||||
|
||||
|
||||
def get_wrapper_executable() -> Path:
|
||||
return Path(os.environ[common.RUNNER_WRAPPER_EXE_ENV_VAR])
|
||||
@@ -0,0 +1,129 @@
|
||||
import enum
|
||||
|
||||
from rich.text import Text
|
||||
from textual.app import App, ComposeResult
|
||||
from textual.binding import Binding
|
||||
from textual.events import Click
|
||||
from textual.widgets import Footer, Markdown, MarkdownViewer, Static
|
||||
from typing_extensions import Self
|
||||
|
||||
from .changelog import Changelogs
|
||||
|
||||
|
||||
# See https://github.com/Textualize/textual/discussions/6449
|
||||
class MarkdownLinkTooltip(Static, inherit_css=False):
|
||||
DEFAULT_CSS = """
|
||||
MarkdownLinkTooltip {
|
||||
layer: _tooltips;
|
||||
margin: 1 0;
|
||||
padding: 1 2;
|
||||
background: $panel;
|
||||
width: auto;
|
||||
height: auto;
|
||||
constrain: inside inflect;
|
||||
max-width: 40;
|
||||
display: none;
|
||||
offset-x: -50%;
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
class _MarkdownViewer(MarkdownViewer):
|
||||
DEFAULT_CSS = """
|
||||
_MarkdownViewer {
|
||||
layers: default _tooltips;
|
||||
}
|
||||
"""
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
yield from super().compose()
|
||||
yield MarkdownLinkTooltip()
|
||||
|
||||
def on_markdown_link_clicked(self, message: Markdown.LinkClicked) -> None:
|
||||
# We don't want the default behavior of opening the browser/navigating to a file on click.
|
||||
message.prevent_default()
|
||||
|
||||
tooltip = self.get_child_by_type(MarkdownLinkTooltip)
|
||||
tooltip.display = True
|
||||
# You can't cycle over the links in MarkdownViewer (see Textualize/textual#3555)
|
||||
# so using mouse position is fine.
|
||||
# Textualize/textual#3555: https://github.com/Textualize/textual/discussions/3555
|
||||
tooltip.absolute_offset = self.app.mouse_position
|
||||
# For some reason, links only render correctly when Text has a span over the whole text
|
||||
# with a link but not when Text just has a style applied to it directly, i.e.:
|
||||
# Text(message.href, style=f"link {message.href}")
|
||||
# will not work.
|
||||
tooltip.update(Text().append(message.href, style=f"link {message.href}"))
|
||||
|
||||
def on_click(self, message: Click) -> None:
|
||||
tooltip = self.get_child_by_type(MarkdownLinkTooltip)
|
||||
tooltip.display = False
|
||||
|
||||
|
||||
class ChangelogReaderResult(enum.Enum):
|
||||
QUIT = enum.auto()
|
||||
CONTINUE = enum.auto()
|
||||
|
||||
|
||||
class ChangelogReaderApp(App[ChangelogReaderResult], inherit_bindings=False):
|
||||
ENABLE_COMMAND_PALETTE = False
|
||||
BINDINGS = [
|
||||
Binding(key="ctrl+c", action="quit", description="Exit redbot-update"),
|
||||
Binding(key="q", action="continue", description="Finish reading the changelog"),
|
||||
]
|
||||
|
||||
def __init__(self, markdown_content: str) -> None:
|
||||
self.markdown_content = markdown_content
|
||||
super().__init__()
|
||||
|
||||
@classmethod
|
||||
def from_changelogs(cls, changelogs: Changelogs) -> Self:
|
||||
if not changelogs:
|
||||
return cls("")
|
||||
|
||||
parts = []
|
||||
contributors = sorted(
|
||||
{
|
||||
contributor
|
||||
for changelog in changelogs.values()
|
||||
for contributor in changelog.contributors
|
||||
}
|
||||
)
|
||||
if contributors:
|
||||
contributor_thanks = (
|
||||
"# Thanks to our contributors \N{HEAVY BLACK HEART}\N{VARIATION SELECTOR-16}\n"
|
||||
"**The releases below were made with help from the following people:** \n"
|
||||
)
|
||||
contributor_thanks += ", ".join(
|
||||
f"[@{contributor}](https://github.com/sponsors/{contributor})"
|
||||
for contributor in contributors
|
||||
)
|
||||
parts.append(contributor_thanks)
|
||||
|
||||
parts.append("# Read before updating")
|
||||
for changelog in reversed(changelogs.values()):
|
||||
if changelog.read_before_updating_section:
|
||||
parts.append(f"## {changelog.version}")
|
||||
parts.append(changelog.read_before_updating_section)
|
||||
|
||||
parts.append("# User changelog")
|
||||
for changelog in reversed(changelogs.values()):
|
||||
if changelog.user_changelog_section:
|
||||
parts.append(f"## {changelog.version}")
|
||||
parts.append(changelog.user_changelog_section)
|
||||
|
||||
return cls("\n".join(parts))
|
||||
|
||||
def compose(self) -> ComposeResult:
|
||||
markdown_viewer = _MarkdownViewer(
|
||||
self.markdown_content, show_table_of_contents=True, open_links=False
|
||||
)
|
||||
markdown_viewer.code_indent_guides = False
|
||||
yield markdown_viewer
|
||||
yield Footer()
|
||||
|
||||
def action_quit(self) -> None:
|
||||
self.exit(ChangelogReaderResult.QUIT)
|
||||
|
||||
def action_continue(self) -> None:
|
||||
self.exit(ChangelogReaderResult.CONTINUE)
|
||||
@@ -0,0 +1,736 @@
|
||||
import asyncio
|
||||
import dataclasses
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, NoReturn, Optional, Set
|
||||
|
||||
import click
|
||||
from packaging.version import Version
|
||||
from python_discovery import PythonInfo
|
||||
from rich.markdown import Markdown
|
||||
from rich.panel import Panel
|
||||
from rich.prompt import Confirm, IntPrompt, Prompt
|
||||
from rich.text import Text
|
||||
from typing_extensions import Self
|
||||
|
||||
from redbot.core.utils._internal_utils import (
|
||||
AvailableVersion,
|
||||
detailed_progress,
|
||||
fetch_available_red_versions,
|
||||
get_installed_extras,
|
||||
)
|
||||
|
||||
from . import changelog, cmd, common, runner
|
||||
from .cog_compatibility_checker import CompatibilitySummary
|
||||
from .tui import ChangelogReaderApp, ChangelogReaderResult
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class UpdaterOptions:
|
||||
"""Update options specified by the user."""
|
||||
|
||||
instances: List[str]
|
||||
excluded_instances: Set[str]
|
||||
ignore_prefix: bool
|
||||
backup_dir: Optional[Path]
|
||||
no_backup: bool
|
||||
red_version: Optional[Version]
|
||||
no_major_updates: bool
|
||||
no_full_changelog: bool
|
||||
no_cog_compatibility_check: bool
|
||||
new_python_interpreter: Optional[PythonInfo]
|
||||
update_cogs: Optional[bool]
|
||||
force_reinstall: bool
|
||||
interactive: bool
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
backup_dir = data["backup_dir"]
|
||||
red_version = data["red_version"]
|
||||
return cls(
|
||||
instances=data["instances"],
|
||||
excluded_instances=set(data["excluded_instances"]),
|
||||
ignore_prefix=data["ignore_prefix"],
|
||||
backup_dir=backup_dir and Path(data["backup_dir"]),
|
||||
no_backup=data["no_backup"],
|
||||
red_version=red_version and Version(red_version),
|
||||
no_major_updates=data["no_major_updates"],
|
||||
no_full_changelog=data["no_full_changelog"],
|
||||
no_cog_compatibility_check=data["no_cog_compatibility_check"],
|
||||
new_python_interpreter=(
|
||||
data["new_python_interpreter"]
|
||||
and PythonInfo.from_dict(data["new_python_interpreter"])
|
||||
),
|
||||
update_cogs=data["update_cogs"],
|
||||
force_reinstall=data["force_reinstall"],
|
||||
interactive=data["interactive"],
|
||||
)
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
data = dataclasses.asdict(self)
|
||||
data["excluded_instances"] = list(self.excluded_instances)
|
||||
data["backup_dir"] = self.backup_dir and str(self.backup_dir)
|
||||
data["red_version"] = self.red_version and str(self.red_version)
|
||||
data["new_python_interpreter"] = (
|
||||
self.new_python_interpreter and self.new_python_interpreter.to_dict()
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class UpdaterCompatibilitySummary:
|
||||
checked: Dict[str, CompatibilitySummary]
|
||||
failed: List[str]
|
||||
skipped: List[str]
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
return cls(
|
||||
checked={
|
||||
instance_name: CompatibilitySummary.from_json_dict(results_data)
|
||||
for instance_name, results_data in data["checked"].items()
|
||||
},
|
||||
failed=data["failed"],
|
||||
skipped=data["skipped"],
|
||||
)
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"checked": {
|
||||
instance_name: results.to_json_dict()
|
||||
for instance_name, results in self.checked.items()
|
||||
},
|
||||
"failed": self.failed,
|
||||
"skipped": self.skipped,
|
||||
}
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class BackupResults:
|
||||
checked: List[str]
|
||||
failed: List[str]
|
||||
skipped: List[str] = dataclasses.field(default_factory=list)
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, List[str]]) -> Self:
|
||||
return cls(checked=data["checked"], failed=data["failed"], skipped=data["skipped"])
|
||||
|
||||
def to_json_dict(self) -> Dict[str, List[str]]:
|
||||
return dataclasses.asdict(self)
|
||||
|
||||
|
||||
_PYTHON_VERSION_PLACEHOLDER = Version("0.0.dev0")
|
||||
|
||||
|
||||
@dataclasses.dataclass
|
||||
class UpdaterMetadata:
|
||||
"""Metadata about the update process."""
|
||||
|
||||
# options specified by the user
|
||||
options: UpdaterOptions
|
||||
# info about Red version to update to (latest available or latest non-major update)
|
||||
latest: AvailableVersion
|
||||
latest_major: AvailableVersion
|
||||
# info about Red/Python versions that we're updating from
|
||||
current_version: Version = dataclasses.field(default_factory=common.get_current_red_version)
|
||||
current_python_version: Version = dataclasses.field(
|
||||
default_factory=common.get_current_python_version
|
||||
)
|
||||
# details about the interpreter that will be used for the new venv
|
||||
interpreter_info: PythonInfo = dataclasses.field(default_factory=PythonInfo.current_system)
|
||||
interpreter_version: Version = _PYTHON_VERSION_PLACEHOLDER
|
||||
interpreter_exe: str = ""
|
||||
# changelogs for version in (current_version, latest> range
|
||||
changelogs: changelog.Changelogs = dataclasses.field(default_factory=dict)
|
||||
# cog compatibility check results
|
||||
cog_compatibility: Optional[UpdaterCompatibilitySummary] = None
|
||||
# backup info
|
||||
to_backup: List[str] = dataclasses.field(default_factory=list)
|
||||
backup_dir: Optional[Path] = None
|
||||
backup_results: Optional[BackupResults] = None
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
if self.interpreter_version is _PYTHON_VERSION_PLACEHOLDER:
|
||||
self.interpreter_version = Version(
|
||||
".".join(map(str, self.interpreter_info.version_info[:3]))
|
||||
)
|
||||
if not self.interpreter_exe:
|
||||
self.interpreter_exe = self.interpreter_info.system_executable
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
"""
|
||||
Make an instance of this class from a dictionary,
|
||||
as returned by the `to_json_dict()` method.
|
||||
|
||||
This aims to maintain backwards compatibility with data generated by
|
||||
earlier Red versions as it may be called with such data
|
||||
after the last update step.
|
||||
"""
|
||||
backup_dir = data.get("backup_dir")
|
||||
return cls(
|
||||
options=UpdaterOptions.from_json_dict(data["options"]),
|
||||
latest=AvailableVersion.from_json_dict(data["latest"]),
|
||||
latest_major=AvailableVersion.from_json_dict(data["latest_major"]),
|
||||
current_version=Version(data["current_version"]),
|
||||
current_python_version=Version(data["current_python_version"]),
|
||||
interpreter_version=Version(data["interpreter_version"]),
|
||||
interpreter_info=PythonInfo.from_dict(data["interpreter_info"]),
|
||||
interpreter_exe=data["interpreter_exe"],
|
||||
changelogs={
|
||||
Version(raw_version): changelog.VersionChangelog.from_json_dict(raw_changelog)
|
||||
for raw_version, raw_changelog in data["changelogs"].items()
|
||||
},
|
||||
cog_compatibility=UpdaterCompatibilitySummary.from_json_dict(
|
||||
data["cog_compatibility"]
|
||||
),
|
||||
to_backup=data["to_backup"],
|
||||
backup_dir=backup_dir and Path(backup_dir),
|
||||
backup_results=BackupResults.from_json_dict(data["backup_results"]),
|
||||
)
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"options": self.options.to_json_dict(),
|
||||
"latest": self.latest.to_json_dict(),
|
||||
"latest_major": self.latest_major.to_json_dict(),
|
||||
"current_version": str(self.current_version),
|
||||
"current_python_version": str(self.current_python_version),
|
||||
"interpreter_version": str(self.interpreter_version),
|
||||
"interpreter_info": self.interpreter_info.to_dict(),
|
||||
"interpreter_exe": self.interpreter_exe,
|
||||
"changelogs": {str(v): c.to_json_dict() for v, c in self.changelogs.items()},
|
||||
"cog_compatibility": self.cog_compatibility and self.cog_compatibility.to_json_dict(),
|
||||
"to_backup": self.to_backup,
|
||||
"backup_dir": self.backup_dir and str(self.backup_dir),
|
||||
"backup_results": self.backup_results and self.backup_results.to_json_dict(),
|
||||
}
|
||||
|
||||
@property
|
||||
def breaking_update(self) -> bool:
|
||||
return self.current_version.release[:2] != self.latest.version.release[:2]
|
||||
|
||||
|
||||
class Updater:
|
||||
metadata: UpdaterMetadata
|
||||
|
||||
def __init__(self, options: UpdaterOptions) -> None:
|
||||
self.options = options
|
||||
self.console = common.get_console()
|
||||
|
||||
@property
|
||||
def latest(self) -> AvailableVersion:
|
||||
return self.metadata.latest
|
||||
|
||||
@property
|
||||
def current_version(self) -> Version:
|
||||
return self.metadata.current_version
|
||||
|
||||
async def run(self) -> None:
|
||||
await self._prepare_metadata()
|
||||
|
||||
new_version_available = self.current_version < self.latest.version
|
||||
if not self.options.force_reinstall and not new_version_available:
|
||||
if self.current_version >= self.metadata.latest_major.version:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_SUCCESS,
|
||||
"You are already running the latest available version of Red.",
|
||||
)
|
||||
else:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"There are no non-major updates available.\n",
|
||||
"There is a new major version available: ",
|
||||
Text(str(self.metadata.latest_major.version), style="bold"),
|
||||
)
|
||||
return
|
||||
|
||||
if new_version_available:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_SUCCESS,
|
||||
"New version available: ",
|
||||
Text(str(self.latest.version), style="bold"),
|
||||
)
|
||||
|
||||
await self._show_changelog()
|
||||
self._check_python_requires()
|
||||
if self.options.no_cog_compatibility_check:
|
||||
self.console.print(
|
||||
"Will not make backups as --no-cog-compatibility-check option was passed."
|
||||
)
|
||||
else:
|
||||
await self._check_cog_compatibility()
|
||||
|
||||
if self.options.no_backup:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO, "Will not make backups as --no-backup option was passed."
|
||||
)
|
||||
else:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"The following instances will be backed up before performing the update: ",
|
||||
Text(", ").join(
|
||||
Text(instance_name, style="bold") for instance_name in self.metadata.to_backup
|
||||
),
|
||||
)
|
||||
if self.metadata.breaking_update:
|
||||
self.console.print(
|
||||
"[b]Remember that this is a major release and it may have some breaking changes"
|
||||
" that the bot or its cogs may be affected by.[/]"
|
||||
)
|
||||
if self.options.interactive and not Confirm.ask(
|
||||
f"Do you want to continue with the update to [b]Red {self.latest.version}[/]?"
|
||||
):
|
||||
return
|
||||
self.console.print()
|
||||
|
||||
if self.options.no_backup:
|
||||
self.console.print("Will not make backups as --no-backup option was passed.")
|
||||
else:
|
||||
await self._make_backups()
|
||||
|
||||
await self._update_with_fresh_venv()
|
||||
|
||||
async def _prepare_metadata(self) -> None:
|
||||
interpreter_info = self.options.new_python_interpreter or PythonInfo.current_system()
|
||||
with self.console.status("Checking latest version..."):
|
||||
available_versions = await fetch_available_red_versions()
|
||||
latest_major = available_versions[0]
|
||||
|
||||
self.metadata = UpdaterMetadata(
|
||||
self.options,
|
||||
latest=latest_major,
|
||||
latest_major=latest_major,
|
||||
interpreter_info=interpreter_info,
|
||||
)
|
||||
|
||||
if self.options.red_version:
|
||||
if self.options.red_version <= self.current_version:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR, "You can only update to a newer version of Red."
|
||||
)
|
||||
raise SystemExit(2)
|
||||
if (
|
||||
self.options.no_major_updates
|
||||
and self.options.red_version.release[:2] != self.current_version.release[:2]
|
||||
):
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"Updating to the specified version would be a major update"
|
||||
" but --no-major-updates option was specified.",
|
||||
)
|
||||
raise SystemExit(2)
|
||||
for available_version in available_versions:
|
||||
if available_version.version == self.options.red_version:
|
||||
break
|
||||
else:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR, "The provided version does not seem to exist."
|
||||
)
|
||||
raise SystemExit(2)
|
||||
self.metadata.latest = available_version
|
||||
elif self.options.no_major_updates:
|
||||
for available_version in available_versions:
|
||||
if available_version.version.release[:2] == self.current_version.release[:2]:
|
||||
self.metadata.latest = available_version
|
||||
break
|
||||
else:
|
||||
if self.current_version < latest_major.version:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"Could not find any version of Red that would not be a major update.",
|
||||
)
|
||||
raise SystemExit(1)
|
||||
|
||||
async def _show_changelog(self) -> None:
|
||||
with self.console.status("Fetching changelogs..."):
|
||||
changelogs = await changelog.fetch_changelogs()
|
||||
self.metadata.changelogs = changelogs = changelog.get_changelogs_between(
|
||||
changelogs, self.current_version, self.latest.version
|
||||
)
|
||||
common.print_with_prefix_column(common.ICON_SUCCESS, "Changelogs fetched.")
|
||||
|
||||
if not changelogs:
|
||||
return
|
||||
|
||||
if not self.options.interactive or self.options.no_full_changelog:
|
||||
self.console.print(Panel(Markdown(changelog.render_markdown(changelogs))))
|
||||
if self.options.interactive and not Confirm.ask("Do you want to continue?"):
|
||||
raise click.Abort()
|
||||
return
|
||||
|
||||
first_changelog_version = min(changelogs)
|
||||
last_changelog_version = max(changelogs)
|
||||
parts = []
|
||||
if first_changelog_version == last_changelog_version:
|
||||
parts.append(
|
||||
"You will now be presented with the changelog for"
|
||||
f" [b]Red {first_changelog_version}[/]."
|
||||
)
|
||||
else:
|
||||
parts.append(
|
||||
"You will now be presented with the changelogs for"
|
||||
f" [b]Red {first_changelog_version}[/]-[b]{last_changelog_version}[/]."
|
||||
)
|
||||
parts.append(
|
||||
f"\n[bold]{common.ICON_WARN}"
|
||||
' Make sure to read through the [green]"Read before updating"[/] section'
|
||||
f" before continuing. {common.ICON_WARN}[/bold]\n"
|
||||
)
|
||||
if self.metadata.breaking_update:
|
||||
parts.append(
|
||||
f"[bold]{common.ICON_WARN}"
|
||||
" Please note that this is a major release and it may have some changes that"
|
||||
" your bot or its cogs are affected by.[/bold]\n"
|
||||
)
|
||||
parts.append(
|
||||
"After the changelog is open and you're ready to continue, hit the [b]Q[/] key"
|
||||
" to close the changelog and continue the update process.\n\n"
|
||||
"Hit the [b]Enter[/] key to view the changelog."
|
||||
)
|
||||
self.console.input(Panel("".join(parts)), password=True)
|
||||
|
||||
viewer = ChangelogReaderApp.from_changelogs(changelogs)
|
||||
result = await viewer.run_async()
|
||||
if result is None:
|
||||
raise RuntimeError("Unexpected state")
|
||||
if result is ChangelogReaderResult.QUIT:
|
||||
raise click.Abort()
|
||||
|
||||
self.console.print("Changelog has been closed.\n")
|
||||
|
||||
def _check_python_requires(self) -> None:
|
||||
if self.metadata.interpreter_version in self.latest.requires_python:
|
||||
return
|
||||
if self.options.new_python_interpreter:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"The latest version of Red requires a different Python version (",
|
||||
Text(str(self.latest.requires_python), style="bold"),
|
||||
") from the version of the interpreter passed to with the --new-python-interpreter"
|
||||
" option (",
|
||||
Text(str(self.metadata.interpreter_version), style="bold"),
|
||||
")",
|
||||
)
|
||||
raise SystemExit(1)
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_WARN if self.options.interactive else common.ICON_ERROR,
|
||||
"The latest version of Red requires a different Python version (",
|
||||
Text(str(self.latest.requires_python), style="bold"),
|
||||
") from the one that you are currently using (",
|
||||
Text(str(self.metadata.interpreter_version), style="bold"),
|
||||
")",
|
||||
(
|
||||
"\nredbot-update will have to recreate the virtual environment"
|
||||
" with a compatible version of Python."
|
||||
if self.options.interactive
|
||||
else ""
|
||||
),
|
||||
)
|
||||
if not self.options.interactive:
|
||||
raise SystemExit(1)
|
||||
interpreters = common.search_for_interpreters(self.latest.requires_python)
|
||||
|
||||
def _render_interpreter(interpreter_exe: str, interpreter_version: Version) -> Text:
|
||||
return Text.assemble(
|
||||
"CPython ",
|
||||
(str(interpreter_version), "repr.number"),
|
||||
" (",
|
||||
(interpreter_exe, "log.path"),
|
||||
")",
|
||||
)
|
||||
|
||||
text = Text("Found the following compatible Python interpreters on your system:")
|
||||
for idx, (interpreter_exe, interpreter_version, python_info) in enumerate(interpreters, 1):
|
||||
text.append_text(Text(f"\n{idx}. ", style="markdown.item.number"))
|
||||
text.append_text(_render_interpreter(interpreter_exe, interpreter_version))
|
||||
self.console.print(Panel(text))
|
||||
|
||||
while True:
|
||||
result = IntPrompt.ask(
|
||||
"\nEnter the number of the Python interpreter above that you want to use"
|
||||
" or type 0 to input the path to it yourself. Generally, you should choose"
|
||||
" the interpreter with the latest version on the above list.\n"
|
||||
"Enter your selection",
|
||||
default=1,
|
||||
)
|
||||
if result < 0 or result > len(interpreters):
|
||||
self.console.print("[prompt.invalid] This is not a valid choice.")
|
||||
continue
|
||||
|
||||
if result == 0:
|
||||
response = Prompt.ask(
|
||||
"Please input the path to the Python interpreter that you want to use"
|
||||
)
|
||||
if not response:
|
||||
self.console.print("[prompt.invalid] No path was provided.")
|
||||
continue
|
||||
info = PythonInfo.from_exe(response)
|
||||
interpreter_version = Version(info.version_str)
|
||||
if (
|
||||
info.implementation != "CPython"
|
||||
or interpreter_version not in self.latest.requires_python
|
||||
):
|
||||
self.console.print(
|
||||
"[prompt.invalid] The provided path points to an incompatible Python"
|
||||
" interpreter. Latest version requires CPython"
|
||||
f" {self.latest.requires_python} but the provided interpreter is"
|
||||
f" {info.implementation} {interpreter_version}."
|
||||
)
|
||||
continue
|
||||
self.metadata.interpreter_version = interpreter_version
|
||||
self.metadata.interpreter_info = info
|
||||
self.metadata.interpreter_exe = info.executable
|
||||
else:
|
||||
(
|
||||
self.metadata.interpreter_exe,
|
||||
self.metadata.interpreter_version,
|
||||
self.metadata.interpreter_info,
|
||||
) = interpreters[result - 1]
|
||||
|
||||
self.console.print(
|
||||
"\n[b]You selected:[/]",
|
||||
_render_interpreter(
|
||||
self.metadata.interpreter_exe, self.metadata.interpreter_version
|
||||
),
|
||||
)
|
||||
if Confirm.ask("Do you want to continue with this choice?"):
|
||||
self.console.print()
|
||||
break
|
||||
|
||||
async def _check_cog_compatibility(self) -> None:
|
||||
outputs = {}
|
||||
checked_instances = {}
|
||||
skipped_instances = []
|
||||
failed_instances = []
|
||||
unsupported_storage_instances = []
|
||||
for instance_name in self.options.instances:
|
||||
if instance_name in self.options.excluded_instances:
|
||||
skipped_instances.append(instance_name)
|
||||
continue
|
||||
exit_code, stdout, results = await cmd.cog_compatibility.call(
|
||||
instance_name,
|
||||
red_version=self.latest.version,
|
||||
python_version=self.metadata.interpreter_version,
|
||||
ignore_prefix=self.options.ignore_prefix,
|
||||
return_results=True,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
)
|
||||
if exit_code == cmd.cog_compatibility.EXIT_INSTANCE_BACKEND_UNSUPPORTED:
|
||||
skipped_instances.append(instance_name)
|
||||
unsupported_storage_instances.append(instance_name)
|
||||
elif exit_code == cmd.cog_compatibility.EXIT_INSTANCE_SITE_PREFIX_MISMATCH:
|
||||
skipped_instances.append(instance_name)
|
||||
elif exit_code:
|
||||
failed_instances.append(instance_name)
|
||||
print(stdout, end="")
|
||||
Text.assemble(
|
||||
"\N{UPWARDS ARROW} " * 3,
|
||||
"Failure for ",
|
||||
(instance_name, "bold"),
|
||||
" instance",
|
||||
)
|
||||
self.console.rule(
|
||||
Text.assemble(
|
||||
"\N{UPWARDS ARROW} " * 3,
|
||||
"Failure for ",
|
||||
(instance_name, "bold"),
|
||||
" instance above",
|
||||
" \N{UPWARDS ARROW}" * 3,
|
||||
),
|
||||
style="red",
|
||||
)
|
||||
else:
|
||||
assert results is not None
|
||||
outputs[instance_name] = stdout
|
||||
checked_instances[instance_name] = results
|
||||
if stdout:
|
||||
self.console.print()
|
||||
self.console.print()
|
||||
if not self.options.no_backup:
|
||||
self.metadata.to_backup = [*checked_instances, *failed_instances]
|
||||
|
||||
if outputs:
|
||||
for instance_name, stdout in outputs.items():
|
||||
self.console.rule(Text(instance_name, style="bold"))
|
||||
print(stdout, end="")
|
||||
self.console.rule()
|
||||
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"Finished checking cog compatibility.",
|
||||
(
|
||||
"\nThe results for each of the checked instances are shown above."
|
||||
if checked_instances
|
||||
else ""
|
||||
),
|
||||
)
|
||||
if failed_instances:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"Failure occurred while trying to check compatibility for following instances: ",
|
||||
Text(", ").join(
|
||||
Text(instance_name, style="bold") for instance_name in failed_instances
|
||||
),
|
||||
"\nScroll above to find the errors.",
|
||||
)
|
||||
if unsupported_storage_instances:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"The following instances were skipped as they use a storage backend that is"
|
||||
" not supported by the current Red installation (some requirements are missing): ",
|
||||
Text(", ").join(
|
||||
Text(instance_name, style="bold")
|
||||
for instance_name in unsupported_storage_instances
|
||||
),
|
||||
)
|
||||
if not checked_instances:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_INFO,
|
||||
"There were no",
|
||||
(" other" if failed_instances or unsupported_storage_instances else ""),
|
||||
" instances to check cog compatibility for.",
|
||||
)
|
||||
self.console.print()
|
||||
|
||||
self.metadata.cog_compatibility = UpdaterCompatibilitySummary(
|
||||
checked=checked_instances, failed=failed_instances, skipped=skipped_instances
|
||||
)
|
||||
|
||||
async def _make_backups(self) -> None:
|
||||
self.metadata.backup_dir = backup_dir = self.options.backup_dir or Path(
|
||||
tempfile.mkdtemp(prefix="redbot-update-backup-")
|
||||
)
|
||||
console = common.get_console()
|
||||
console.print("Backups will be created at:", Text(str(backup_dir), style="bold"))
|
||||
venv_archive = backup_dir / "venv.tar.gz"
|
||||
with console.status("Making a backup of the virtual environment directory..."):
|
||||
venv_dir = Path(sys.prefix)
|
||||
venv_files = []
|
||||
for current_dir, _, filenames in os.walk(venv_dir):
|
||||
target_dir = os.path.relpath(current_dir, venv_dir)
|
||||
if target_dir == ".":
|
||||
target_dir = ""
|
||||
for name in filenames:
|
||||
venv_files.append(
|
||||
(os.path.join(current_dir, name), os.path.join(target_dir, name))
|
||||
)
|
||||
with tarfile.open(venv_archive, "w:gz", compresslevel=6) as tar:
|
||||
with detailed_progress(unit="files") as progress:
|
||||
for src, arcname in progress.track(venv_files, description="Compressing..."):
|
||||
tar.add(src, arcname=arcname, recursive=False)
|
||||
console.print(
|
||||
"Created a backup of the virtual environment directory at:",
|
||||
Text(str(venv_archive), style="bold"),
|
||||
)
|
||||
|
||||
checked = []
|
||||
failed = []
|
||||
instance_backups_dir = backup_dir / "instance_backups"
|
||||
instance_backups_dir.mkdir()
|
||||
for instance_name in self.metadata.to_backup:
|
||||
console.print(
|
||||
"Making a backup of the", Text(instance_name, style="bold"), "instance..."
|
||||
)
|
||||
debug_args = (cmd.arg_names.DEBUG,) * common.get_log_cli_level()
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
sys.executable,
|
||||
"-m",
|
||||
"redbot.setup",
|
||||
"backup",
|
||||
*debug_args,
|
||||
instance_name,
|
||||
str(instance_backups_dir),
|
||||
)
|
||||
if await proc.wait():
|
||||
failed.append(instance_name)
|
||||
else:
|
||||
checked.append(instance_name)
|
||||
|
||||
self.metadata.backup_results = BackupResults(checked=checked, failed=failed)
|
||||
if self.metadata.cog_compatibility:
|
||||
self.metadata.backup_results.skipped.extend(self.metadata.cog_compatibility.skipped)
|
||||
|
||||
if failed:
|
||||
common.print_with_prefix_column(
|
||||
common.ICON_ERROR,
|
||||
"The following instances failed during backup: ",
|
||||
Text(", ").join(Text(instance_name, style="bold") for instance_name in failed),
|
||||
"\nScroll above to find the errors.",
|
||||
)
|
||||
# If a backup fails, we cannot allow non-interactive update to continue.
|
||||
# The user can choose to use options such as `--no-backup`, `--instance`,
|
||||
# and `--exclude-instance` to not have the backup step try to backup something
|
||||
# that it can't.
|
||||
if not self.options.interactive or not Confirm.ask(
|
||||
"Do you want to continue with the update regardless?"
|
||||
):
|
||||
raise SystemExit(1)
|
||||
|
||||
async def _update_with_fresh_venv(self) -> NoReturn:
|
||||
console = common.get_console()
|
||||
venv_dir = Path(sys.prefix)
|
||||
backup_dir = venv_dir / common.OLD_VENV_BACKUP_DIR_NAME
|
||||
try:
|
||||
backup_dir.mkdir()
|
||||
except FileExistsError:
|
||||
console.print(
|
||||
"Found that a partial backup of a virtual environment from a past failed update"
|
||||
" exists at",
|
||||
Text(str(backup_dir), style="bold"),
|
||||
"\nThe update will not proceed to avoid overriding it. If you are certain that"
|
||||
" you don't need to restore anything from it, remove it and try updating again.",
|
||||
)
|
||||
raise SystemExit(1)
|
||||
|
||||
with console.status("Determining extras to install..."):
|
||||
try:
|
||||
metadata = await self.latest.fetch_core_metadata()
|
||||
except TypeError:
|
||||
extras = get_installed_extras()
|
||||
else:
|
||||
known_extras = metadata.provides_extra or []
|
||||
extras = [extra for extra in get_installed_extras() if extra in known_extras]
|
||||
console.print("Extras to install have been determined.")
|
||||
|
||||
old_executable = Path(sys.executable)
|
||||
rel_executable = old_executable.relative_to(venv_dir)
|
||||
new_executable = backup_dir / rel_executable
|
||||
wrapper_exe = runner.get_wrapper_executable()
|
||||
|
||||
with console.status("Moving old virtual environment..."):
|
||||
for path in venv_dir.iterdir():
|
||||
if path == backup_dir or path == wrapper_exe:
|
||||
continue
|
||||
path.rename(backup_dir / path.name)
|
||||
console.print("Old virtual environment moved.")
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
"w", encoding="utf-8", prefix="redbot-update-metadata-", suffix=".json", delete=False
|
||||
) as metadata_file:
|
||||
json.dump(self.metadata.to_json_dict(), metadata_file)
|
||||
|
||||
console.print()
|
||||
runner.make_exec_request(
|
||||
str(new_executable),
|
||||
"reinstall",
|
||||
# base executable for venv creation
|
||||
self.metadata.interpreter_exe,
|
||||
# venv dir
|
||||
str(venv_dir),
|
||||
# scripts path
|
||||
self.metadata.interpreter_info.sysconfig_path("scripts", {"base": str(venv_dir)}),
|
||||
# Red dependency specifier
|
||||
common.get_red_dependency_specifier(self.latest.version, extras),
|
||||
set_env_vars={common.INTERNAL_UPDATER_METADATA_ENV_VAR: metadata_file.name},
|
||||
)
|
||||
|
||||
|
||||
def get_updater_metadata() -> UpdaterMetadata:
|
||||
with open(os.environ[common.INTERNAL_UPDATER_METADATA_ENV_VAR], encoding="utf-8") as fp:
|
||||
return UpdaterMetadata.from_json_dict(json.load(fp))
|
||||
@@ -16,6 +16,7 @@ from redbot.core.i18n import Translator
|
||||
from redbot.core.utils.chat_formatting import box, humanize_number
|
||||
from redbot.core.utils.menus import menu, start_adding_reactions
|
||||
from redbot.core.utils.predicates import MessagePredicate, ReactionPredicate
|
||||
from redbot.core.utils.views import SetApiView
|
||||
|
||||
from ...audio_dataclasses import LocalPath
|
||||
from ...converters import ScopeParser
|
||||
@@ -1280,26 +1281,38 @@ class AudioSetCommands(MixinMeta, metaclass=CompositeMetaClass):
|
||||
"6. Click on Create Credential at the top.\n"
|
||||
'7. At the top click the link for "API key".\n'
|
||||
"8. No application restrictions are needed. Click Create at the bottom.\n"
|
||||
"9. You now have a key to add to `{prefix}set api youtube api_key <your_api_key_here>`"
|
||||
).format(prefix=ctx.prefix)
|
||||
await ctx.maybe_send_embed(message)
|
||||
"9. Click the button below this message and set your API key"
|
||||
" with the data shown in Google Developers Console."
|
||||
)
|
||||
await ctx.send(
|
||||
message,
|
||||
view=SetApiView(default_service="youtube", default_keys={"api_key": ""}),
|
||||
)
|
||||
|
||||
@command_audioset.command(name="spotifyapi")
|
||||
@commands.is_owner()
|
||||
async def command_audioset_spotifyapi(self, ctx: commands.Context):
|
||||
"""Instructions to set the Spotify API tokens."""
|
||||
message = _(
|
||||
"1. Go to Spotify developers and log in with your Spotify account.\n"
|
||||
"(https://developer.spotify.com/dashboard/applications)\n"
|
||||
'2. Click "Create An App".\n'
|
||||
"3. Fill out the form provided with your app name, etc.\n"
|
||||
'4. When asked if you\'re developing commercial integration select "No".\n'
|
||||
"5. Accept the terms and conditions.\n"
|
||||
"6. Copy your client ID and your client secret into:\n"
|
||||
"`{prefix}set api spotify client_id <your_client_id_here> "
|
||||
"client_secret <your_client_secret_here>`"
|
||||
).format(prefix=ctx.prefix)
|
||||
await ctx.maybe_send_embed(message)
|
||||
"1. Go to Spotify for Developers and log in with your Spotify account."
|
||||
" If this is your first time, you'll be asked to accept the terms and conditions.\n"
|
||||
"(https://developer.spotify.com/dashboard)\n"
|
||||
'2. Click "Create app".\n'
|
||||
"3. Fill out the form provided with your app name and description."
|
||||
" These can be anything you want. Website field can be left empty.\n"
|
||||
"4. Add `https://localhost` to your Redirect URIs. This will not be used"
|
||||
" but is required when filling out the form.\n"
|
||||
'5. Select "Web API" when asked which API/SDKs you are planning to use.\n'
|
||||
"6. Confirm that you agree to the terms and conditions and save the application.\n"
|
||||
"7. Click the button below this message and set your client ID and your client secret"
|
||||
" with the data shown in Spotify's dashboard."
|
||||
)
|
||||
await ctx.send(
|
||||
message,
|
||||
view=SetApiView(
|
||||
default_service="spotify", default_keys={"client_id": "", "client_secret": ""}
|
||||
),
|
||||
)
|
||||
|
||||
@command_audioset.command(name="countrycode")
|
||||
@commands.guild_only()
|
||||
|
||||
+74
-67
@@ -53,7 +53,7 @@ msgstr "Não foi possível tocar a música"
|
||||
#: redbot/cogs/audio/core/utilities/player.py:442
|
||||
#: redbot/cogs/audio/core/utilities/player.py:524
|
||||
msgid "Queue size limit reached."
|
||||
msgstr ""
|
||||
msgstr "Limite da fila atingindo."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:154
|
||||
#: redbot/cogs/audio/core/utilities/player.py:599
|
||||
@@ -63,41 +63,41 @@ msgstr "Faixa Enfileirada"
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:168
|
||||
#: redbot/cogs/audio/core/utilities/player.py:548
|
||||
msgid "This track is not allowed in this server."
|
||||
msgstr ""
|
||||
msgstr "Esta faixa não é permitida neste servidor."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:185
|
||||
#: redbot/cogs/audio/core/utilities/player.py:570
|
||||
msgid "Track exceeds maximum length."
|
||||
msgstr ""
|
||||
msgstr "Faixa excede comprimento máximo."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:200
|
||||
#: redbot/cogs/audio/core/utilities/player.py:602
|
||||
msgid "{time} until track playback: #{position} in queue"
|
||||
msgstr ""
|
||||
msgstr "{time} até a reprodução da faixa: #{position} na fila"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:260
|
||||
msgid "Tracks Found:"
|
||||
msgstr ""
|
||||
msgstr "Faixas Encontradas:"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:261
|
||||
msgid "search results"
|
||||
msgstr ""
|
||||
msgstr "resultados da pesquisa"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:263
|
||||
msgid "Folders Found:"
|
||||
msgstr ""
|
||||
msgstr "Pastas Encontradas:"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:264
|
||||
msgid "local folders"
|
||||
msgstr ""
|
||||
msgstr "pastas locais"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:266
|
||||
msgid "Files Found:"
|
||||
msgstr ""
|
||||
msgstr "Arquivos Encontrados:"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:267
|
||||
msgid "local tracks"
|
||||
msgstr ""
|
||||
msgstr "faixas locais"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/formatting.py:379
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:240
|
||||
@@ -122,15 +122,15 @@ msgstr "Ambiente inválido"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/local_tracks.py:109
|
||||
msgid "No localtracks folder."
|
||||
msgstr ""
|
||||
msgstr "Sem pasta localtracks."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/miscellaneous.py:50
|
||||
msgid "Not enough {currency}"
|
||||
msgstr ""
|
||||
msgstr "Sem {currency} suficiente"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/miscellaneous.py:51
|
||||
msgid "{required_credits} {currency} required, but you have {bal}."
|
||||
msgstr ""
|
||||
msgstr "{required_credits} {currency} necessário, mas você possui {bal}."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:78
|
||||
msgid "music in {} servers"
|
||||
@@ -140,54 +140,56 @@ msgstr "música em {} servidores"
|
||||
#: redbot/cogs/audio/core/utilities/player.py:139
|
||||
#: redbot/cogs/audio/core/utilities/player.py:144
|
||||
msgid "There's nothing in the queue."
|
||||
msgstr ""
|
||||
msgstr "Não há nada na fila."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:141
|
||||
msgid "Currently livestreaming {track}"
|
||||
msgstr ""
|
||||
msgstr "Transmitindo agora {track}"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:146
|
||||
msgid "{time} left on {track}"
|
||||
msgstr ""
|
||||
msgstr "{time} restante de {track}"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:154
|
||||
#: redbot/cogs/audio/core/utilities/player.py:189
|
||||
msgid "Track Skipped"
|
||||
msgstr ""
|
||||
msgstr "Faixa Pulada"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:167
|
||||
msgid "Track number must be equal to or greater than 1."
|
||||
msgstr ""
|
||||
msgstr "O número da faixa deve ser igual ou maior que 1."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:173
|
||||
msgid "There are only {queuelen} songs currently queued."
|
||||
msgstr ""
|
||||
msgstr "Há apenas músicas {queuelen} na fila."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:179
|
||||
msgid "{skip_to_track} Tracks Skipped"
|
||||
msgstr ""
|
||||
msgstr "{skip_to_track} Faixas Puladas"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:235
|
||||
msgid "The owner needs to set the Spotify client ID and Spotify client secret, before Spotify URLs or codes can be used. \n"
|
||||
"See `{prefix}audioset spotifyapi` for instructions."
|
||||
msgstr ""
|
||||
msgstr "O proprietário precisa definir o ID do cliente do Spotify e o Spotify Client Secret, antes que possam ser usadas URLs ou códigos do Spotify. \n"
|
||||
"Veja `{prefix}audioset spotifyapi` para instruções."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:245
|
||||
msgid "The owner needs to set the YouTube API key before Spotify URLs or codes can be used.\n"
|
||||
"See `{prefix}audioset youtubeapi` for instructions."
|
||||
msgstr ""
|
||||
msgstr "O proprietário precisa definir a chave da API do YouTube antes que URLs ou códigos do Spotify possam ser usados.\n"
|
||||
"Veja `{prefix}audioset youtubeapi` para instruções."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:254
|
||||
#: redbot/cogs/audio/core/utilities/player.py:363
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:594
|
||||
msgid "Unable To Get Tracks"
|
||||
msgstr ""
|
||||
msgstr "Não foi possível obter as faixas"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:255
|
||||
#: redbot/cogs/audio/core/utilities/player.py:364
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:595
|
||||
msgid "Wait until the playlist has finished loading."
|
||||
msgstr ""
|
||||
msgstr "Aguarde até que a playlist termine de carregar."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:266
|
||||
#: redbot/cogs/audio/core/utilities/player.py:308
|
||||
@@ -203,7 +205,7 @@ msgstr "Nada encontrado."
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:607
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:640
|
||||
msgid "Track is not playable."
|
||||
msgstr ""
|
||||
msgstr "Faixa não é reproduzível."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:270
|
||||
#: redbot/cogs/audio/core/utilities/player.py:311
|
||||
@@ -211,7 +213,7 @@ msgstr ""
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:608
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:641
|
||||
msgid "**{suffix}** is not a fully supported format and some tracks may not play."
|
||||
msgstr ""
|
||||
msgstr "**{suffix}** não é um formato totalmente suportado e algumas faixas podem não reproduzir."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:300
|
||||
#: redbot/cogs/audio/core/utilities/player.py:393
|
||||
@@ -235,7 +237,7 @@ msgstr "A chave de API do Spotify ou segredo do cliente não foram definidos cor
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:351
|
||||
msgid "Unable To Find Tracks"
|
||||
msgstr ""
|
||||
msgstr "Não foi possível encontrar as faixas"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:352
|
||||
msgid "This doesn't seem to be a supported Spotify URL or code."
|
||||
@@ -243,26 +245,27 @@ msgstr "Isto não parece ser uma URL ou código do Spotify válido."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:378
|
||||
msgid "{query} is not an allowed query."
|
||||
msgstr ""
|
||||
msgstr "{query} não é uma solicitação permitida."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:394
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:627
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:656
|
||||
msgid "I'm unable to get a track from Lavalink node at the moment, try again in a few minutes."
|
||||
msgstr ""
|
||||
msgstr "Não foi possível obter uma faixa do Lavalink Node no momento, tente novamente em alguns minutos."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:416
|
||||
msgid "Local tracks will not work if the `Lavalink.jar` cannot see the track.\n"
|
||||
"This may be due to permissions or because Lavalink.jar is being run in a different machine than the local tracks."
|
||||
msgstr ""
|
||||
msgstr "As faixas locais não funcionarão se o `Lavalink.jar` não conseguir ver a faixa.\n"
|
||||
"Isto pode ser devido a permissões ou porque o Lavalink.jar está sendo executado em uma máquina diferente das faixas locais."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:486
|
||||
msgid " {bad_tracks} tracks cannot be queued."
|
||||
msgstr ""
|
||||
msgstr " {bad_tracks} faixas não puderam ser adicionadas."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:492
|
||||
msgid "No Title"
|
||||
msgstr ""
|
||||
msgstr "Sem Título"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:494
|
||||
msgid "Playlist Enqueued"
|
||||
@@ -270,7 +273,7 @@ msgstr "Lista de reprodução enfileirada"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:494
|
||||
msgid "Album Enqueued"
|
||||
msgstr ""
|
||||
msgstr "Álbum Adicionado"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:502
|
||||
msgid "Added {num} tracks to the queue.{maxlength_msg}"
|
||||
@@ -286,25 +289,25 @@ msgstr "Nada foi encontrado"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:623
|
||||
msgid "Please wait, finding tracks..."
|
||||
msgstr ""
|
||||
msgstr "Por favor, aguarde, encontrando faixas..."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:629
|
||||
msgid "Getting track {num}/{total}..."
|
||||
msgstr ""
|
||||
msgstr "Obtendo faixa {num}/{total}..."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:630
|
||||
msgid "Matching track {num}/{total}..."
|
||||
msgstr ""
|
||||
msgstr "Correspondendo faixa {num}/{total}..."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:631
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:341
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:414
|
||||
msgid "Loading track {num}/{total}..."
|
||||
msgstr ""
|
||||
msgstr "Carregando faixa {num}/{total}..."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:632
|
||||
msgid "Approximate time remaining: {seconds}"
|
||||
msgstr ""
|
||||
msgstr "Tempo restante aproximado: {seconds}"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/player.py:658
|
||||
msgid "I'm unable to get a track from Lavalink at the moment, try again in a few minutes."
|
||||
@@ -316,27 +319,27 @@ msgstr "A conexão foi redefinida durante o carregamento da lista de reproduçã
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:83
|
||||
msgid "You do not have the permissions to manage {name} (`{id}`) [**{scope}**]."
|
||||
msgstr ""
|
||||
msgstr "Você não tem as permissões para gerenciar {name} (`{id}`) [**{scope}**]."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:101
|
||||
msgid "You do not have the permissions to manage that playlist in {guild}."
|
||||
msgstr ""
|
||||
msgstr "Você não tem permissão para gerenciar essa playlist no {guild}."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:108
|
||||
msgid "You do not have the permissions to manage playlist owned by {user}."
|
||||
msgstr ""
|
||||
msgstr "Você não tem permissão para gerenciar a playlist de {user}."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:112
|
||||
msgid "You do not have the permissions to manage playlists in {scope} scope."
|
||||
msgstr ""
|
||||
msgstr "Você não tem as permissões para gerenciar playlists no escopo {scope}."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:116
|
||||
msgid "No access to playlist."
|
||||
msgstr ""
|
||||
msgstr "Sem acesso à playlist."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:224
|
||||
msgid "{match_count} playlists match {original_input}: Please try to be more specific, or use the playlist ID."
|
||||
msgstr ""
|
||||
msgstr "{match_count} playlists correspondem {original_input}: Por favor, tente ser mais específico, ou use o ID da playlist."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:241
|
||||
msgid "{number}. <{playlist.name}>\n"
|
||||
@@ -344,24 +347,28 @@ msgid "{number}. <{playlist.name}>\n"
|
||||
" - ID: < {playlist.id} >\n"
|
||||
" - Tracks: < {tracks} >\n"
|
||||
" - Author: < {author} >\n\n"
|
||||
msgstr ""
|
||||
msgstr "{number}. <{playlist.name}>\n"
|
||||
" - Escopo: < {scope} >\n"
|
||||
" - ID: < {playlist.id} >\n"
|
||||
" - Faixas: < {tracks} >\n"
|
||||
" - Autor: < {author} >\n\n"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:258
|
||||
msgid "{playlists} playlists found, which one would you like?"
|
||||
msgstr ""
|
||||
msgstr "{playlists} playlists encontradas, de qual você gostaria?"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:277
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:283
|
||||
msgid "Too many matches found and you did not select which one you wanted."
|
||||
msgstr ""
|
||||
msgstr "Muitas opções foram encontradas e você não selecionou qual você queria."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:308
|
||||
msgid "Playlists you can access in this server:"
|
||||
msgstr ""
|
||||
msgstr "Playlists que você pode acessar neste servidor:"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:314
|
||||
msgid "Playlists for {scope}:"
|
||||
msgstr ""
|
||||
msgstr "Playlists para {scope}:"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:318
|
||||
msgid "Page {page_num}/{total_pages} | {num} playlists."
|
||||
@@ -370,46 +377,46 @@ msgstr ""
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:334
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:412
|
||||
msgid "Please wait, adding tracks..."
|
||||
msgstr ""
|
||||
msgstr "Por favor, aguarde, adicionando faixas..."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:361
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:464
|
||||
msgid "Empty playlist {name} (`{id}`) [**{scope}**] created."
|
||||
msgstr ""
|
||||
msgstr "Playlist vazia {name} (`{id}`) [**{scope}**] criada."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:366
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:469
|
||||
msgid "Added {num} tracks from the {playlist_name} playlist. {num_bad} track(s) could not be loaded."
|
||||
msgstr ""
|
||||
msgstr "Adicionadas {num} músicas da lista {playlist_name} . Não foi possível carregar a(s) faixa(s) {num_bad}."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:371
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:474
|
||||
msgid "Added {num} tracks from the {playlist_name} playlist."
|
||||
msgstr ""
|
||||
msgstr "Adicionadas {num} músicas da lista {playlist_name}."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:375
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:478
|
||||
msgid "Playlist Saved"
|
||||
msgstr ""
|
||||
msgstr "Playlist salva"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:540
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:553
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:560
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:571
|
||||
msgid "Unable To Get Playlists"
|
||||
msgstr ""
|
||||
msgstr "Não foi possível obter as playlists"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:541
|
||||
msgid "I don't have permission to connect and speak in your channel."
|
||||
msgstr ""
|
||||
msgstr "Não tenho permissão para conectar e falar em seu canal."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:572
|
||||
msgid "You must be in the voice channel to use the playlist command."
|
||||
msgstr ""
|
||||
msgstr "Você deve estar no canal de voz para usar esse comando."
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:680
|
||||
msgid "the Global"
|
||||
msgstr ""
|
||||
msgstr "o Global"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:680
|
||||
msgid "Global"
|
||||
@@ -417,7 +424,7 @@ msgstr "Global"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:682
|
||||
msgid "the Server"
|
||||
msgstr ""
|
||||
msgstr "o Servidor"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:682
|
||||
msgid "Server"
|
||||
@@ -425,7 +432,7 @@ msgstr "Servidor"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:684
|
||||
msgid "the User"
|
||||
msgstr ""
|
||||
msgstr "o Usuário"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/playlists.py:684
|
||||
msgid "User"
|
||||
@@ -433,20 +440,20 @@ msgstr "Usuário"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:40
|
||||
msgid "__Too many songs in the queue, only showing the first 500__.\n\n"
|
||||
msgstr ""
|
||||
msgstr "__Muitas músicas na fila, mostrando apenas os primeiros 500__.\n\n"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:57
|
||||
msgid "**Currently livestreaming:**\n"
|
||||
msgstr ""
|
||||
msgstr "**Transmitindo agora:**\n"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:59
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:64
|
||||
msgid "Requested by: **{user}**"
|
||||
msgstr ""
|
||||
msgstr "Solicitado por: **{user}**"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:62
|
||||
msgid "Playing: "
|
||||
msgstr ""
|
||||
msgstr "Reproduzindo: "
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:76
|
||||
msgid "requested by **{user}**\n"
|
||||
@@ -454,11 +461,11 @@ msgstr ""
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:80
|
||||
msgid "Queue for __{guild_name}__"
|
||||
msgstr ""
|
||||
msgstr "Fila para __{guild_name}__"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:88
|
||||
msgid "Page {page_num}/{total_pages} | {num_tracks} tracks, {num_remaining} remaining\n"
|
||||
msgstr ""
|
||||
msgstr "Página {page_num}/{total_pages} {num_tracks} faixas, {num_remaining} restantes\n"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:97
|
||||
msgid "Auto-Play"
|
||||
@@ -474,7 +481,7 @@ msgstr "Repetir"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:161
|
||||
msgid "Matching Tracks:"
|
||||
msgstr ""
|
||||
msgstr "Faixas correspondentes:"
|
||||
|
||||
#: redbot/cogs/audio/core/utilities/queue.py:164
|
||||
msgid "Page {page_num}/{total_pages} | {num_tracks} tracks"
|
||||
|
||||
@@ -11,9 +11,9 @@ __all__ = (
|
||||
)
|
||||
|
||||
|
||||
JAR_VERSION: Final[LavalinkVersion] = LavalinkVersion(3, 7, 13, red=2)
|
||||
JAR_VERSION: Final[LavalinkVersion] = LavalinkVersion(3, 7, 13, red=5)
|
||||
YT_PLUGIN_VERSION: Final[str] = "1.18.0"
|
||||
# keep this sorted from oldest to latest
|
||||
SUPPORTED_JAVA_VERSIONS: Final[Tuple[int, ...]] = (11, 17)
|
||||
SUPPORTED_JAVA_VERSIONS: Final[Tuple[int, ...]] = (17, 21)
|
||||
LATEST_SUPPORTED_JAVA_VERSION: Final = SUPPORTED_JAVA_VERSIONS[-1]
|
||||
OLDER_SUPPORTED_JAVA_VERSIONS: Final[Tuple[int, ...]] = SUPPORTED_JAVA_VERSIONS[:-1]
|
||||
|
||||
@@ -6,4 +6,3 @@ from .downloader import Downloader
|
||||
async def setup(bot: Red) -> None:
|
||||
cog = Downloader(bot)
|
||||
await bot.add_cog(cog)
|
||||
cog.create_init_task()
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import discord
|
||||
from redbot.core import commands
|
||||
from redbot.core import _downloader, commands
|
||||
from redbot.core.i18n import Translator
|
||||
from .installable import InstalledModule
|
||||
from redbot.core._downloader.installable import InstalledModule
|
||||
from redbot.core._downloader.repo_manager import Repo as _Repo
|
||||
|
||||
_ = Translator("Koala", __file__)
|
||||
|
||||
@@ -9,14 +10,21 @@ _ = Translator("Koala", __file__)
|
||||
class InstalledCog(InstalledModule):
|
||||
@classmethod
|
||||
async def convert(cls, ctx: commands.Context, arg: str) -> InstalledModule:
|
||||
downloader = ctx.bot.get_cog("Downloader")
|
||||
if downloader is None:
|
||||
raise commands.CommandError(_("No Downloader cog found."))
|
||||
|
||||
cog = discord.utils.get(await downloader.installed_cogs(), name=arg)
|
||||
cog = discord.utils.get(await _downloader.installed_cogs(), name=arg)
|
||||
if cog is None:
|
||||
raise commands.BadArgument(
|
||||
_("Cog `{cog_name}` is not installed.").format(cog_name=arg)
|
||||
)
|
||||
|
||||
return cog
|
||||
|
||||
|
||||
class Repo(_Repo):
|
||||
@classmethod
|
||||
async def convert(cls, ctx: commands.Context, argument: str) -> _Repo:
|
||||
poss_repo = _downloader._repo_manager.get_repo(argument)
|
||||
if poss_repo is None:
|
||||
raise commands.BadArgument(
|
||||
_('Repo by the name "{repo_name}" does not exist.').format(repo_name=argument)
|
||||
)
|
||||
return poss_repo
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Generated
+8
-8
@@ -23,16 +23,16 @@ msgstr ""
|
||||
|
||||
#: redbot/cogs/downloader/checks.py:38
|
||||
msgid "Your response has timed out, please try again."
|
||||
msgstr ""
|
||||
msgstr "Sua resposta expirou. Por favor, tente novamente."
|
||||
|
||||
#: redbot/cogs/downloader/converters.py:14
|
||||
#: redbot/cogs/downloader/repo_manager.py:176
|
||||
msgid "No Downloader cog found."
|
||||
msgstr ""
|
||||
msgstr "Nenhum cog Downloader foi encontrado."
|
||||
|
||||
#: redbot/cogs/downloader/converters.py:19
|
||||
msgid "Cog `{cog_name}` is not installed."
|
||||
msgstr ""
|
||||
msgstr "O Cog `{cog_name}` não está instalado."
|
||||
|
||||
#: redbot/cogs/downloader/downloader.py:31
|
||||
msgid "\n"
|
||||
@@ -67,24 +67,24 @@ msgstr ""
|
||||
|
||||
#: redbot/cogs/downloader/downloader.py:508
|
||||
msgid "Libraries installed."
|
||||
msgstr ""
|
||||
msgstr "Bibliotecas instaladas."
|
||||
|
||||
#: redbot/cogs/downloader/downloader.py:508
|
||||
msgid "Library installed."
|
||||
msgstr ""
|
||||
msgstr "Biblioteca instalada."
|
||||
|
||||
#: redbot/cogs/downloader/downloader.py:511
|
||||
msgid "Some libraries failed to install. Please check your logs for a complete list."
|
||||
msgstr ""
|
||||
msgstr "Não foi possível instalar algumas bibliotecas. Verifique os seus logs para ter uma lista completa."
|
||||
|
||||
#: redbot/cogs/downloader/downloader.py:516
|
||||
msgid "The library failed to install. Please check your logs for a complete list."
|
||||
msgstr ""
|
||||
msgstr "A biblioteca não foi instalada. Por favor, verifique os seus logs para ter uma lista completa."
|
||||
|
||||
#: redbot/cogs/downloader/downloader.py:524
|
||||
#, docstring
|
||||
msgid "Base command for repository management."
|
||||
msgstr ""
|
||||
msgstr "Comando base para gerenciamento do repositório."
|
||||
|
||||
#: redbot/cogs/downloader/downloader.py:531
|
||||
#, docstring
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
Generated
+41
-31
@@ -18,87 +18,87 @@ msgstr ""
|
||||
#: redbot/cogs/general/general.py:49
|
||||
#, docstring
|
||||
msgid "General commands."
|
||||
msgstr ""
|
||||
msgstr "Comandos gerais."
|
||||
|
||||
#: redbot/cogs/general/general.py:54
|
||||
msgid "As I see it, yes"
|
||||
msgstr ""
|
||||
msgstr "Como eu vejo, sim"
|
||||
|
||||
#: redbot/cogs/general/general.py:55
|
||||
msgid "It is certain"
|
||||
msgstr ""
|
||||
msgstr "Com certeza"
|
||||
|
||||
#: redbot/cogs/general/general.py:56
|
||||
msgid "It is decidedly so"
|
||||
msgstr ""
|
||||
msgstr "É decididamente assim"
|
||||
|
||||
#: redbot/cogs/general/general.py:57
|
||||
msgid "Most likely"
|
||||
msgstr ""
|
||||
msgstr "Muito provável"
|
||||
|
||||
#: redbot/cogs/general/general.py:58
|
||||
msgid "Outlook good"
|
||||
msgstr ""
|
||||
msgstr "Perspectiva boa"
|
||||
|
||||
#: redbot/cogs/general/general.py:59
|
||||
msgid "Signs point to yes"
|
||||
msgstr ""
|
||||
msgstr "Os sinais indicam que sim"
|
||||
|
||||
#: redbot/cogs/general/general.py:60
|
||||
msgid "Without a doubt"
|
||||
msgstr ""
|
||||
msgstr "Sem dúvida"
|
||||
|
||||
#: redbot/cogs/general/general.py:61
|
||||
msgid "Yes"
|
||||
msgstr ""
|
||||
msgstr "Sim"
|
||||
|
||||
#: redbot/cogs/general/general.py:62
|
||||
msgid "Yes – definitely"
|
||||
msgstr ""
|
||||
msgstr "Sim – definitivamente"
|
||||
|
||||
#: redbot/cogs/general/general.py:63
|
||||
msgid "You may rely on it"
|
||||
msgstr ""
|
||||
msgstr "Você pode contar com isso"
|
||||
|
||||
#: redbot/cogs/general/general.py:64
|
||||
msgid "Reply hazy, try again"
|
||||
msgstr ""
|
||||
msgstr "Resposta confusa, tente novamente"
|
||||
|
||||
#: redbot/cogs/general/general.py:65
|
||||
msgid "Ask again later"
|
||||
msgstr ""
|
||||
msgstr "Pergunte novamente mais tarde"
|
||||
|
||||
#: redbot/cogs/general/general.py:66
|
||||
msgid "Better not tell you now"
|
||||
msgstr ""
|
||||
msgstr "Melhor não te contar agora"
|
||||
|
||||
#: redbot/cogs/general/general.py:67
|
||||
msgid "Cannot predict now"
|
||||
msgstr ""
|
||||
msgstr "Não consigo prever agora"
|
||||
|
||||
#: redbot/cogs/general/general.py:68
|
||||
msgid "Concentrate and ask again"
|
||||
msgstr ""
|
||||
msgstr "Concentre-se e pergunte de novo"
|
||||
|
||||
#: redbot/cogs/general/general.py:69
|
||||
msgid "Don't count on it"
|
||||
msgstr ""
|
||||
msgstr "Não conte com isso"
|
||||
|
||||
#: redbot/cogs/general/general.py:70
|
||||
msgid "My reply is no"
|
||||
msgstr ""
|
||||
msgstr "Minha resposta é não"
|
||||
|
||||
#: redbot/cogs/general/general.py:71
|
||||
msgid "My sources say no"
|
||||
msgstr ""
|
||||
msgstr "Minhas fontes dizem que não"
|
||||
|
||||
#: redbot/cogs/general/general.py:72
|
||||
msgid "Outlook not so good"
|
||||
msgstr ""
|
||||
msgstr "A previsão não é muito boa"
|
||||
|
||||
#: redbot/cogs/general/general.py:73
|
||||
msgid "Very doubtful"
|
||||
msgstr ""
|
||||
msgstr "Muito duvidoso"
|
||||
|
||||
#: redbot/cogs/general/general.py:88
|
||||
#, docstring
|
||||
@@ -107,11 +107,15 @@ msgid "Choose between multiple options.\n\n"
|
||||
" Options are separated by spaces.\n\n"
|
||||
" To denote options which include whitespace, you should enclose the options in double quotes.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
msgstr "Escolha entre múltiplas opções.\n\n"
|
||||
" Deve haver pelo menos 2 opções para escolher.\n"
|
||||
" As opções são separadas por espaços.\n\n"
|
||||
" Para denotar opções que incluem espaços em branco, você deve colocar as opções entre aspas duplas.\n"
|
||||
" "
|
||||
|
||||
#: redbot/cogs/general/general.py:97
|
||||
msgid "Not enough options to pick from."
|
||||
msgstr ""
|
||||
msgstr "Opções insuficientes para escolher."
|
||||
|
||||
#: redbot/cogs/general/general.py:103
|
||||
#, docstring
|
||||
@@ -119,39 +123,45 @@ msgid "Roll a random number.\n\n"
|
||||
" The result will be between 1 and `<number>`.\n\n"
|
||||
" `<number>` defaults to 100.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
msgstr "Role um número aleatório.\n\n"
|
||||
" O resultado será entre 1 e `<number>`.\n\n"
|
||||
" `<number>` o padrão é 100.\n"
|
||||
" "
|
||||
|
||||
#: redbot/cogs/general/general.py:118
|
||||
msgid "{author.mention} Maybe higher than 1? ;P"
|
||||
msgstr ""
|
||||
msgstr "{author.mention} Talvez maior que 1? ;P"
|
||||
|
||||
#: redbot/cogs/general/general.py:121
|
||||
msgid "{author.mention} Max allowed number is {maxamount}."
|
||||
msgstr ""
|
||||
msgstr "{author.mention} O número máximo permitido é {maxamount}."
|
||||
|
||||
#: redbot/cogs/general/general.py:128
|
||||
#, docstring
|
||||
msgid "Flip a coin... or a user.\n\n"
|
||||
" Defaults to a coin.\n"
|
||||
" "
|
||||
msgstr ""
|
||||
msgstr "Jogue uma moeda... ou um usuário.\n\n"
|
||||
" O padrão é uma moeda.\n"
|
||||
" "
|
||||
|
||||
#: redbot/cogs/general/general.py:136
|
||||
msgid "Nice try. You think this is funny?\n"
|
||||
" How about *this* instead:\n\n"
|
||||
msgstr ""
|
||||
msgstr "Boa tentativa. Você pensa que isso é engraçado?\n"
|
||||
" Que tal *isso* em vez disso:\n\n"
|
||||
|
||||
#: redbot/cogs/general/general.py:147
|
||||
msgid "*flips a coin and... "
|
||||
msgstr ""
|
||||
msgstr "*vira uma moeda e... "
|
||||
|
||||
#: redbot/cogs/general/general.py:147
|
||||
msgid "HEADS!*"
|
||||
msgstr ""
|
||||
msgstr "CARA!*"
|
||||
|
||||
#: redbot/cogs/general/general.py:147
|
||||
msgid "TAILS!*"
|
||||
msgstr ""
|
||||
msgstr "COROA!*"
|
||||
|
||||
#: redbot/cogs/general/general.py:151
|
||||
#, docstring
|
||||
|
||||
Generated
+1
-1
@@ -572,7 +572,7 @@ msgstr ""
|
||||
#: redbot/cogs/mod/settings.py:85 redbot/cogs/mod/settings.py:93
|
||||
#: redbot/cogs/mod/settings.py:96 redbot/cogs/mod/settings.py:108
|
||||
msgid "Yes"
|
||||
msgstr ""
|
||||
msgstr "Sim"
|
||||
|
||||
#: redbot/cogs/mod/settings.py:31 redbot/cogs/mod/settings.py:57
|
||||
#: redbot/cogs/mod/settings.py:62 redbot/cogs/mod/settings.py:67
|
||||
|
||||
Generated
+1
-1
@@ -532,7 +532,7 @@ msgstr ""
|
||||
|
||||
#: redbot/cogs/mutes/mutes.py:1794
|
||||
msgid "this server"
|
||||
msgstr ""
|
||||
msgstr "este servidor"
|
||||
|
||||
#: redbot/cogs/mutes/voicemutes.py:42
|
||||
msgid "That user is not in a voice channel."
|
||||
|
||||
Generated
+1
-1
@@ -171,7 +171,7 @@ msgstr ""
|
||||
|
||||
#: redbot/cogs/trivia/trivia.py:44
|
||||
msgid "Yes"
|
||||
msgstr ""
|
||||
msgstr "Sim"
|
||||
|
||||
#: redbot/cogs/trivia/trivia.py:46
|
||||
msgid "No"
|
||||
|
||||
+41
-1
@@ -3,13 +3,15 @@ import asyncio
|
||||
import logging
|
||||
import sys
|
||||
from enum import IntEnum
|
||||
from typing import Optional
|
||||
from typing import Any, Coroutine, Optional, TypeVar
|
||||
|
||||
import discord
|
||||
from discord import __version__ as discord_version
|
||||
|
||||
from redbot.core.utils._internal_utils import cli_level_to_log_level
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
|
||||
# This needs to be an int enum to be used
|
||||
# with sys.exit
|
||||
@@ -244,6 +246,14 @@ def parse_cli_flags(args):
|
||||
dest="logging_level",
|
||||
help="Increase the verbosity of the logs, each usage of this flag increases the verbosity level by 1.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-verbose",
|
||||
"--no-debug",
|
||||
action="store_const",
|
||||
const=0,
|
||||
dest="logging_level",
|
||||
help="Set the verbosity level to 0.",
|
||||
)
|
||||
parser.add_argument("--dev", action="store_true", help="Enables developer mode")
|
||||
parser.add_argument(
|
||||
"--mentionable",
|
||||
@@ -360,3 +370,33 @@ def parse_cli_flags(args):
|
||||
args.logging_level = cli_level_to_log_level(args.logging_level)
|
||||
|
||||
return args
|
||||
|
||||
|
||||
def asyncio_run(coro: Coroutine[Any, Any, _T]) -> _T:
|
||||
if sys.version_info >= (3, 11):
|
||||
with asyncio.Runner(loop_factory=new_event_loop) as runner:
|
||||
return runner.run(coro)
|
||||
|
||||
if sys.implementation.name == "cpython":
|
||||
# Let's not force this dependency, uvloop is much faster on cpython
|
||||
try:
|
||||
import uvloop
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
|
||||
|
||||
return asyncio.run(coro)
|
||||
|
||||
|
||||
def new_event_loop() -> asyncio.AbstractEventLoop:
|
||||
if sys.implementation.name == "cpython":
|
||||
# Let's not force this dependency, uvloop is much faster on cpython
|
||||
try:
|
||||
import uvloop
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
return uvloop.new_event_loop()
|
||||
|
||||
return asyncio.new_event_loop()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,9 +1,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Tuple, Union, cast
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Tuple, Union
|
||||
|
||||
from redbot import VersionInfo, version_info as red_version_info
|
||||
from packaging.version import Version
|
||||
|
||||
from . import installable
|
||||
from .log import log
|
||||
@@ -21,6 +22,7 @@ class UseDefault:
|
||||
|
||||
# sentinel value
|
||||
USE_DEFAULT = UseDefault()
|
||||
RED_TAG_READY_PATTERN = re.compile(r"^red-(?:[3-9]|[1-9][0-9]+)\.(?:[1-9][0-9]*)-ready$")
|
||||
|
||||
|
||||
def ensure_tuple_of_str(
|
||||
@@ -67,10 +69,10 @@ def ensure_str(info_file: Path, key_name: str, value: Union[Any, UseDefault]) ->
|
||||
return value
|
||||
|
||||
|
||||
def ensure_red_version_info(
|
||||
def create_ensure_red_version(default: Version) -> EnsureCallable:
|
||||
def ensure_red_version(
|
||||
info_file: Path, key_name: str, value: Union[Any, UseDefault]
|
||||
) -> VersionInfo:
|
||||
default = red_version_info
|
||||
) -> Version:
|
||||
if value is USE_DEFAULT:
|
||||
return default
|
||||
if not isinstance(value, str):
|
||||
@@ -83,7 +85,7 @@ def ensure_red_version_info(
|
||||
)
|
||||
return default
|
||||
try:
|
||||
version_info = VersionInfo.from_str(value)
|
||||
version_info = Version(value)
|
||||
except ValueError:
|
||||
log.warning(
|
||||
"Invalid value of '%s' key (given value isn't a valid version string)"
|
||||
@@ -94,11 +96,13 @@ def ensure_red_version_info(
|
||||
return default
|
||||
return version_info
|
||||
|
||||
return ensure_red_version
|
||||
|
||||
def ensure_python_version_info(
|
||||
|
||||
def ensure_python_version(
|
||||
info_file: Path, key_name: str, value: Union[Any, UseDefault]
|
||||
) -> Tuple[int, int, int]:
|
||||
default = (3, 5, 1)
|
||||
) -> Version:
|
||||
default = Version("3.5.1")
|
||||
if value is USE_DEFAULT:
|
||||
return default
|
||||
if not isinstance(value, list):
|
||||
@@ -130,7 +134,7 @@ def ensure_python_version_info(
|
||||
info_file,
|
||||
)
|
||||
return default
|
||||
return cast(Tuple[int, int, int], tuple(value))
|
||||
return Version(".".join(map(str, value)))
|
||||
|
||||
|
||||
def ensure_bool(
|
||||
@@ -201,6 +205,48 @@ def ensure_installable_type(
|
||||
return installable.InstallableType.UNKNOWN
|
||||
|
||||
|
||||
def ensure_tags(info_file: Path, key_name: str, value: Union[Any, UseDefault]) -> Tuple[str, ...]:
|
||||
default: Tuple[str, ...] = ()
|
||||
if value is USE_DEFAULT:
|
||||
return default
|
||||
if not isinstance(value, list):
|
||||
log.warning(
|
||||
"Invalid value of '%s' key (expected list, got %s)"
|
||||
" in JSON information file at path: %s",
|
||||
key_name,
|
||||
type(value).__name__,
|
||||
info_file,
|
||||
)
|
||||
return default
|
||||
valid_tags = []
|
||||
for item in value:
|
||||
if not isinstance(item, str):
|
||||
log.warning(
|
||||
"Invalid item in '%s' list (expected str, got %s)"
|
||||
" in JSON information file at path: %s",
|
||||
key_name,
|
||||
type(item).__name__,
|
||||
info_file,
|
||||
)
|
||||
return default
|
||||
# `red-` tags are reserved for informational metadata we only support a subset of tags
|
||||
if not item.startswith("red-"):
|
||||
valid_tags.append(item)
|
||||
continue
|
||||
if RED_TAG_READY_PATTERN.match(item):
|
||||
valid_tags.append(item)
|
||||
else:
|
||||
log.warning(
|
||||
"Invalid value in '%s' list (tag starts with the reserved 'red-' prefix"
|
||||
" but does not use the only supported reserved tag format: 'red-X.Y-ready')"
|
||||
" in JSON information file at path: %s",
|
||||
key_name,
|
||||
info_file,
|
||||
)
|
||||
|
||||
return tuple(value)
|
||||
|
||||
|
||||
EnsureCallable = Callable[[Path, str, Union[Any, UseDefault]], Any]
|
||||
SchemaType = Dict[str, EnsureCallable]
|
||||
|
||||
@@ -211,14 +257,18 @@ REPO_SCHEMA: SchemaType = {
|
||||
"short": ensure_str,
|
||||
}
|
||||
INSTALLABLE_SCHEMA: SchemaType = {
|
||||
"min_bot_version": ensure_red_version_info,
|
||||
"max_bot_version": ensure_red_version_info,
|
||||
"min_python_version": ensure_python_version_info,
|
||||
"min_bot_version": create_ensure_red_version(Version("0.0.dev0")),
|
||||
# Using little-known version epoch feature to represent something that,
|
||||
# for all practical purposes, will be considered higher than any version number
|
||||
# that we may ever have.
|
||||
# https://packaging.python.org/en/latest/specifications/version-specifiers/#version-epochs
|
||||
"max_bot_version": create_ensure_red_version(Version("99999!99999.99999.post99999+hi.mom")),
|
||||
"min_python_version": ensure_python_version,
|
||||
"hidden": ensure_bool,
|
||||
"disabled": ensure_bool,
|
||||
"required_cogs": ensure_required_cogs_mapping,
|
||||
"requirements": ensure_tuple_of_str,
|
||||
"tags": ensure_tuple_of_str,
|
||||
"tags": ensure_tags,
|
||||
"type": ensure_installable_type,
|
||||
"end_user_data_statement": ensure_str,
|
||||
}
|
||||
@@ -2,22 +2,21 @@ from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import shutil
|
||||
from enum import IntEnum
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple, Union, cast
|
||||
|
||||
from packaging.version import Version
|
||||
|
||||
from .log import log
|
||||
from .info_schemas import INSTALLABLE_SCHEMA, update_mixin
|
||||
from .json_mixins import RepoJSONMixin
|
||||
|
||||
from redbot.core import VersionInfo
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .repo_manager import RepoManager, Repo
|
||||
|
||||
|
||||
class InstallableType(IntEnum):
|
||||
# using IntEnum, because hot-reload breaks its identity
|
||||
class InstallableType(Enum):
|
||||
UNKNOWN = 0
|
||||
COG = 1
|
||||
SHARED_LIBRARY = 2
|
||||
@@ -45,12 +44,12 @@ class Installable(RepoJSONMixin):
|
||||
Name(s) of the author(s).
|
||||
end_user_data_statement : `str`
|
||||
End user data statement of the module.
|
||||
min_bot_version : `VersionInfo`
|
||||
min_bot_version : `packaging.version.Version`
|
||||
The minimum bot version required for this Installable.
|
||||
max_bot_version : `VersionInfo`
|
||||
max_bot_version : `packaging.version.Version`
|
||||
The maximum bot version required for this Installable.
|
||||
Ignored if `min_bot_version` is newer than `max_bot_version`.
|
||||
min_python_version : `tuple` of `int`
|
||||
min_python_version : `packaging.version.Version`
|
||||
The minimum python version required for this cog.
|
||||
hidden : `bool`
|
||||
Whether or not this cog will be hidden from the user when they use
|
||||
@@ -88,9 +87,9 @@ class Installable(RepoJSONMixin):
|
||||
self.commit = commit
|
||||
|
||||
self.end_user_data_statement: str
|
||||
self.min_bot_version: VersionInfo
|
||||
self.max_bot_version: VersionInfo
|
||||
self.min_python_version: Tuple[int, int, int]
|
||||
self.min_bot_version: Version
|
||||
self.max_bot_version: Version
|
||||
self.min_python_version: Version
|
||||
self.hidden: bool
|
||||
self.disabled: bool
|
||||
self.required_cogs: Dict[str, str] # Cog name -> repo URL
|
||||
@@ -139,7 +138,7 @@ class Installable(RepoJSONMixin):
|
||||
super()._read_info_file()
|
||||
|
||||
update_mixin(self, INSTALLABLE_SCHEMA)
|
||||
if self.type == InstallableType.SHARED_LIBRARY:
|
||||
if self.type is InstallableType.SHARED_LIBRARY:
|
||||
self.hidden = True
|
||||
|
||||
|
||||
@@ -163,7 +162,7 @@ class InstalledModule(Installable):
|
||||
json_repo_name: str = "",
|
||||
):
|
||||
super().__init__(location=location, repo=repo, commit=commit)
|
||||
self.pinned: bool = pinned if self.type == InstallableType.COG else False
|
||||
self.pinned: bool = pinned if self.type is InstallableType.COG else False
|
||||
# this is here so that Downloader could use real repo name instead of "MISSING_REPO"
|
||||
self._json_repo_name = json_repo_name
|
||||
|
||||
@@ -173,7 +172,7 @@ class InstalledModule(Installable):
|
||||
"module_name": self.name,
|
||||
"commit": self.commit,
|
||||
}
|
||||
if self.type == InstallableType.COG:
|
||||
if self.type is InstallableType.COG:
|
||||
module_json["pinned"] = self.pinned
|
||||
return module_json
|
||||
|
||||
@@ -0,0 +1,3 @@
|
||||
import logging
|
||||
|
||||
log = logging.getLogger("red.core.downloader")
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,7 +2,7 @@ import enum
|
||||
from typing import Optional, Type
|
||||
|
||||
from .. import data_manager
|
||||
from .base import IdentifierData, BaseDriver, ConfigCategory
|
||||
from .base import IdentifierData, BaseDriver, ConfigCategory, MissingExtraRequirements
|
||||
from .json import JsonDriver
|
||||
from .postgres import PostgresDriver
|
||||
|
||||
@@ -12,6 +12,7 @@ __all__ = [
|
||||
"get_driver_class_include_old",
|
||||
"ConfigCategory",
|
||||
"IdentifierData",
|
||||
"MissingExtraRequirements",
|
||||
"BaseDriver",
|
||||
"JsonDriver",
|
||||
"PostgresDriver",
|
||||
|
||||
+36
-73
@@ -1,6 +1,7 @@
|
||||
import asyncio
|
||||
import contextlib
|
||||
import platform
|
||||
import shlex
|
||||
import sys
|
||||
import logging
|
||||
import traceback
|
||||
@@ -9,8 +10,9 @@ from typing import Tuple
|
||||
|
||||
import aiohttp
|
||||
import discord
|
||||
import importlib.metadata
|
||||
from packaging.requirements import Requirement
|
||||
import redbot_update
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.version import Version
|
||||
from redbot.core import data_manager
|
||||
|
||||
from redbot.core.bot import ExitCodes
|
||||
@@ -19,14 +21,13 @@ from redbot.core.i18n import (
|
||||
Translator,
|
||||
set_contextual_locales_from_guild,
|
||||
)
|
||||
from .. import __version__ as red_version, version_info as red_version_info
|
||||
from .. import __version__ as red_version
|
||||
from . import commands
|
||||
from ._config import get_latest_confs
|
||||
from .utils._internal_utils import (
|
||||
fuzzy_command_search,
|
||||
format_fuzzy_results,
|
||||
expected_version,
|
||||
fetch_latest_red_version_info,
|
||||
fetch_latest_red_version,
|
||||
send_to_owners_with_prefix_replaced,
|
||||
)
|
||||
from .utils.chat_formatting import inline, format_perms_list
|
||||
@@ -52,7 +53,7 @@ ______ _ ______ _ _ ______ _
|
||||
_ = Translator(__name__, __file__)
|
||||
|
||||
|
||||
def get_outdated_red_messages(pypi_version: str, py_version_req: str) -> Tuple[str, str]:
|
||||
def get_outdated_red_messages(pypi_version: str) -> Tuple[str, str]:
|
||||
outdated_red_message = _(
|
||||
"Your Red instance is out of date! {} is the current version, however you are using {}!"
|
||||
).format(pypi_version, red_version)
|
||||
@@ -61,7 +62,6 @@ def get_outdated_red_messages(pypi_version: str, py_version_req: str) -> Tuple[s
|
||||
f"[red]!!![/red]Version [cyan]{pypi_version}[/] is available, "
|
||||
f"but you're using [cyan]{red_version}[/][red]!!![/red]"
|
||||
)
|
||||
current_python = platform.python_version()
|
||||
extra_update = _(
|
||||
"\n\nWhile the following command should work in most scenarios as it is "
|
||||
"based on your current OS, environment, and Python version, "
|
||||
@@ -70,64 +70,15 @@ def get_outdated_red_messages(pypi_version: str, py_version_req: str) -> Tuple[s
|
||||
"needs to be done during the update.**"
|
||||
).format(docs="https://docs.discord.red/en/stable/update_red.html")
|
||||
|
||||
if not expected_version(current_python, py_version_req):
|
||||
extra_update += _(
|
||||
"\n\nYou have Python `{py_version}` and this update "
|
||||
"requires `{req_py}`; you cannot simply run the update command.\n\n"
|
||||
"You will need to follow the update instructions in our docs above, "
|
||||
"if you still need help updating after following the docs go to our "
|
||||
"#support channel in <https://discord.gg/red>"
|
||||
).format(py_version=current_python, req_py=py_version_req)
|
||||
outdated_red_message += extra_update
|
||||
return outdated_red_message, rich_outdated_message
|
||||
|
||||
red_dist = importlib.metadata.distribution("Red-DiscordBot")
|
||||
installed_extras = red_dist.metadata.get_all("Provides-Extra")
|
||||
installed_extras.remove("dev")
|
||||
installed_extras.remove("all")
|
||||
distributions = {}
|
||||
for req_str in red_dist.requires:
|
||||
req = Requirement(req_str)
|
||||
if req.marker is None or req.marker.evaluate():
|
||||
continue
|
||||
for extra in reversed(installed_extras):
|
||||
if not req.marker.evaluate({"extra": extra}):
|
||||
continue
|
||||
|
||||
# Check that the requirement is met.
|
||||
# This is a bit simplified for our purposes and does not check
|
||||
# whether the requirements of our requirements are met as well.
|
||||
# This could potentially be an issue if we'll ever depend on
|
||||
# a dependency's extra in our extra when we already depend on that
|
||||
# in our base dependencies. However, considering that right now, all
|
||||
# our dependencies are also fully pinned, this should not ever matter.
|
||||
if req.name in distributions:
|
||||
dist = distributions[req.name]
|
||||
else:
|
||||
try:
|
||||
dist = importlib.metadata.distribution(req.name)
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
dist = None
|
||||
distributions[req.name] = dist
|
||||
if dist is None or not req.specifier.contains(dist.version, prereleases=True):
|
||||
installed_extras.remove(extra)
|
||||
|
||||
if installed_extras:
|
||||
package_extras = f"[{','.join(installed_extras)}]"
|
||||
else:
|
||||
package_extras = ""
|
||||
|
||||
redbot_update_bin = redbot_update.find_redbot_update_bin()
|
||||
is_windows = platform.system() == "Windows"
|
||||
update_command = f'"{redbot_update_bin}"' if is_windows else shlex.quote(redbot_update_bin)
|
||||
extra_update += _(
|
||||
"\n\nTo update your bot, first shutdown your bot"
|
||||
" then open a window of {console} (Not as admin) and run the following:"
|
||||
"{command_1}\n"
|
||||
"Once you've started up your bot again, we recommend that"
|
||||
" you update any installed 3rd-party cogs with this command in Discord:"
|
||||
"{command_2}"
|
||||
" then open a window of {console} (Not as admin) and run the following: {command}"
|
||||
).format(
|
||||
console=_("Command Prompt") if platform.system() == "Windows" else _("Terminal"),
|
||||
command_1=f'```"{sys.executable}" -m pip install -U "Red-DiscordBot{package_extras}"```',
|
||||
command_2=f"```[p]cog update```",
|
||||
console=_("Command Prompt") if is_windows else _("Terminal"),
|
||||
command=f"```{update_command}```",
|
||||
)
|
||||
outdated_red_message += extra_update
|
||||
return outdated_red_message, rich_outdated_message
|
||||
@@ -176,14 +127,15 @@ def init_events(bot, cli_flags):
|
||||
if bot.intents.members: # Lets avoid 0 Unique Users
|
||||
table_counts.add_row("Unique Users", str(users))
|
||||
|
||||
outdated_red_message = ""
|
||||
rich_outdated_message = ""
|
||||
pypi_version, py_version_req = await fetch_latest_red_version_info()
|
||||
outdated = pypi_version and pypi_version > red_version_info
|
||||
if outdated:
|
||||
outdated_red_message, rich_outdated_message = get_outdated_red_messages(
|
||||
pypi_version, py_version_req
|
||||
)
|
||||
fetch_version_task = asyncio.create_task(fetch_latest_red_version())
|
||||
log.info("Fetching information about latest Red version...")
|
||||
try:
|
||||
await asyncio.wait_for(asyncio.shield(fetch_version_task), timeout=5)
|
||||
except asyncio.TimeoutError:
|
||||
log.info("Version information will continue to be fetched in the background...")
|
||||
except Exception:
|
||||
# these will be logged later
|
||||
pass
|
||||
|
||||
rich_console = rich.get_console()
|
||||
rich_console.print(INTRO, style="red", markup=False, highlight=False)
|
||||
@@ -209,11 +161,22 @@ def init_events(bot, cli_flags):
|
||||
rich_console.print(
|
||||
f"Looking for a quick guide on setting up Red? Checkout {Text('https://start.discord.red', style='link https://start.discord.red}')}"
|
||||
)
|
||||
if rich_outdated_message:
|
||||
rich_console.print(rich_outdated_message)
|
||||
|
||||
bot._red_ready.set()
|
||||
if outdated_red_message:
|
||||
|
||||
try:
|
||||
latest = await fetch_version_task
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError) as exc:
|
||||
log.error("Failed to fetch latest version information from PyPI.", exc_info=exc)
|
||||
except (KeyError, ValueError) as exc:
|
||||
log.error("Failed to parse version metadata received from PyPI.", exc_info=exc)
|
||||
else:
|
||||
outdated = latest.version > Version(red_version)
|
||||
if outdated:
|
||||
outdated_red_message, rich_outdated_message = get_outdated_red_messages(
|
||||
latest.version
|
||||
)
|
||||
rich_console.print(rich_outdated_message)
|
||||
await send_to_owners_with_prefix_replaced(bot, outdated_red_message)
|
||||
|
||||
@bot.event
|
||||
|
||||
@@ -97,9 +97,6 @@ class RPC:
|
||||
self._runner,
|
||||
host="127.0.0.1",
|
||||
port=port,
|
||||
shutdown_timeout=120
|
||||
# Give the RPC server 2 minutes to finish up, else slap it!
|
||||
# Seems like a reasonable time. See Red#6391
|
||||
),
|
||||
)
|
||||
except Exception as exc:
|
||||
|
||||
+53
-8
@@ -1,4 +1,5 @@
|
||||
from __future__ import annotations
|
||||
import argparse
|
||||
import asyncio
|
||||
import inspect
|
||||
import logging
|
||||
@@ -37,7 +38,18 @@ import discord
|
||||
from discord.ext import commands as dpy_commands
|
||||
from discord.ext.commands import when_mentioned_or
|
||||
|
||||
from . import Config, _i18n, i18n, app_commands, commands, errors, _drivers, modlog, bank
|
||||
from . import (
|
||||
Config,
|
||||
_i18n,
|
||||
i18n,
|
||||
app_commands,
|
||||
commands,
|
||||
errors,
|
||||
_drivers,
|
||||
modlog,
|
||||
bank,
|
||||
_downloader,
|
||||
)
|
||||
from ._cli import ExitCodes
|
||||
from ._cog_manager import CogManager, CogManagerUI
|
||||
from .core_commands import Core
|
||||
@@ -69,6 +81,8 @@ CUSTOM_GROUPS = "CUSTOM_GROUPS"
|
||||
COMMAND_SCOPE = "COMMAND"
|
||||
SHARED_API_TOKENS = "SHARED_API_TOKENS"
|
||||
|
||||
_DEFAULT_DESCRIPTION = "Red V3"
|
||||
|
||||
log = logging.getLogger("red")
|
||||
|
||||
__all__ = ("Red",)
|
||||
@@ -101,7 +115,9 @@ class Red(
|
||||
): # pylint: disable=no-member # barely spurious warning caused by shadowing
|
||||
"""Our subclass of discord.ext.commands.AutoShardedBot"""
|
||||
|
||||
def __init__(self, *args, cli_flags=None, bot_dir: Path = Path.cwd(), **kwargs):
|
||||
def __init__(
|
||||
self, *args: Any, cli_flags: argparse.Namespace, bot_dir: Path = Path.cwd(), **kwargs: Any
|
||||
) -> None:
|
||||
self._shutdown_mode = ExitCodes.CRITICAL
|
||||
self._cli_flags = cli_flags
|
||||
self._config = Config.get_core_conf(force_registration=False)
|
||||
@@ -132,7 +148,7 @@ class Red(
|
||||
help__tagline="",
|
||||
help__use_tick=False,
|
||||
help__react_timeout=30,
|
||||
description="Red V3",
|
||||
description=_DEFAULT_DESCRIPTION,
|
||||
invite_public=False,
|
||||
invite_perm=0,
|
||||
invite_commands_scope=False,
|
||||
@@ -141,6 +157,7 @@ class Red(
|
||||
invoke_error_msg=None,
|
||||
extra_owner_destinations=[],
|
||||
owner_opt_out_list=[],
|
||||
last_system_info__python_prefix=None,
|
||||
last_system_info__python_version=[3, 7],
|
||||
last_system_info__machine=None,
|
||||
last_system_info__system=None,
|
||||
@@ -238,7 +255,13 @@ class Red(
|
||||
self._main_dir = bot_dir
|
||||
self._cog_mgr = CogManager()
|
||||
self._use_team_features = cli_flags.use_team_features
|
||||
super().__init__(*args, help_command=None, tree_cls=RedTree, **kwargs)
|
||||
super().__init__(
|
||||
*args,
|
||||
description=kwargs.pop("description", _DEFAULT_DESCRIPTION),
|
||||
help_command=None,
|
||||
tree_cls=RedTree,
|
||||
**kwargs,
|
||||
)
|
||||
# Do not manually use the help formatter attribute here, see `send_help_for`,
|
||||
# for a documented API. The internals of this object are still subject to change.
|
||||
self._help_formatter = commands.help.RedHelpFormatter()
|
||||
@@ -1198,14 +1221,35 @@ class Red(
|
||||
|
||||
last_system_info = await self._config.last_system_info()
|
||||
|
||||
last_python_prefix = last_system_info["python_prefix"]
|
||||
if last_python_prefix is None:
|
||||
await self._config.last_system_info.python_prefix.set(sys.prefix)
|
||||
elif last_python_prefix != sys.prefix:
|
||||
await self._config.last_system_info.python_prefix.set(sys.prefix)
|
||||
try:
|
||||
same_install = os.path.samefile(last_python_prefix, sys.prefix)
|
||||
except OSError:
|
||||
same_install = False
|
||||
if not same_install:
|
||||
if sys.prefix != sys.base_prefix:
|
||||
install_info = "in the currently used virtual environment"
|
||||
else:
|
||||
install_info = "with the currently used Python installation"
|
||||
log.warning(
|
||||
"Red seems to have been started with a different Python installation"
|
||||
" and/or virtual environment. This is not, in itself, an issue but is often"
|
||||
" done unintentionally and may explain some, otherwise unexpected, behavior."
|
||||
" This message will not be shown again, if you start Red %s again.",
|
||||
install_info,
|
||||
)
|
||||
|
||||
ver_info = list(sys.version_info[:2])
|
||||
python_version_changed = False
|
||||
LIB_PATH = cog_data_path(raw_name="Downloader") / "lib"
|
||||
if ver_info != last_system_info["python_version"]:
|
||||
await self._config.last_system_info.python_version.set(ver_info)
|
||||
if any(LIB_PATH.iterdir()):
|
||||
shutil.rmtree(str(LIB_PATH))
|
||||
LIB_PATH.mkdir()
|
||||
if any(_downloader.LIB_PATH.iterdir()):
|
||||
shutil.rmtree(str(_downloader.LIB_PATH))
|
||||
_downloader.LIB_PATH.mkdir()
|
||||
asyncio.create_task(
|
||||
send_to_owners_with_prefix_replaced(
|
||||
self,
|
||||
@@ -2502,6 +2546,7 @@ class Red(
|
||||
n_remaining = len(messages) - idx
|
||||
files_perm = (
|
||||
isinstance(channel, discord.abc.User)
|
||||
or channel.guild is None
|
||||
or channel.permissions_for(channel.guild.me).attach_files
|
||||
)
|
||||
options = ("more", "file") if files_perm else ("more",)
|
||||
|
||||
@@ -38,6 +38,7 @@ from typing import (
|
||||
|
||||
import aiohttp
|
||||
import discord
|
||||
from packaging.version import Version
|
||||
from redbot.core.data_manager import storage_type
|
||||
|
||||
from . import (
|
||||
@@ -49,10 +50,11 @@ from . import (
|
||||
i18n,
|
||||
bank,
|
||||
modlog,
|
||||
_downloader,
|
||||
)
|
||||
from ._diagnoser import IssueDiagnoser
|
||||
from .utils import AsyncIter, can_user_send_messages_in
|
||||
from .utils._internal_utils import fetch_latest_red_version_info
|
||||
from .utils._internal_utils import fetch_latest_red_version
|
||||
from .utils.predicates import MessagePredicate
|
||||
from .utils.chat_formatting import (
|
||||
box,
|
||||
@@ -215,12 +217,8 @@ class CoreLogic:
|
||||
else:
|
||||
await bot.add_loaded_package(name)
|
||||
loaded_packages.append(name)
|
||||
# remove in Red 3.4
|
||||
downloader = bot.get_cog("Downloader")
|
||||
if downloader is None:
|
||||
continue
|
||||
try:
|
||||
maybe_repo = await downloader._shared_lib_load_check(name)
|
||||
maybe_repo = await _downloader._shared_lib_load_check(name)
|
||||
except Exception:
|
||||
log.exception(
|
||||
"Shared library check failed,"
|
||||
@@ -424,8 +422,14 @@ class Core(commands.commands._RuleDropper, commands.Cog, CoreLogic):
|
||||
owner = app_info.owner
|
||||
custom_info = await self.bot._config.custom_info()
|
||||
|
||||
pypi_version, py_version_req = await fetch_latest_red_version_info()
|
||||
outdated = pypi_version and pypi_version > red_version_info
|
||||
try:
|
||||
latest = await fetch_latest_red_version()
|
||||
except (aiohttp.ClientError, TimeoutError) as exc:
|
||||
log.error("Failed to fetch latest version information from PyPI.", exc_info=exc)
|
||||
pypi_version = None
|
||||
else:
|
||||
pypi_version = latest.version
|
||||
outdated = pypi_version and pypi_version > Version(__version__)
|
||||
|
||||
if embed_links:
|
||||
dpy_version = "[{}]({})".format(discord.__version__, dpy_repo)
|
||||
|
||||
@@ -158,7 +158,7 @@ class DevOutput:
|
||||
output.append(self.formatted_exc)
|
||||
elif self.always_include_result or self.result is not None:
|
||||
try:
|
||||
result = str(self.result)
|
||||
result = str(self.result) if isinstance(self.result, str) else repr(self.result)
|
||||
# ensure that the result can be encoded (GH-6485)
|
||||
result.encode("utf-8")
|
||||
except Exception as exc:
|
||||
|
||||
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+607
-607
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+607
-607
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+607
-607
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+607
-607
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+607
-607
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+1444
-897
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+607
-607
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+607
-607
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
Generated
+604
-604
File diff suppressed because it is too large
Load Diff
@@ -3,27 +3,35 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
import collections.abc
|
||||
import contextlib
|
||||
import importlib.metadata
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
import time
|
||||
import warnings
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
from pathlib import Path
|
||||
from tarfile import TarInfo
|
||||
from typing import (
|
||||
Any,
|
||||
AsyncIterable,
|
||||
AsyncIterator,
|
||||
Awaitable,
|
||||
Callable,
|
||||
Dict,
|
||||
Generator,
|
||||
Iterable,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Union,
|
||||
TypedDict,
|
||||
TypeVar,
|
||||
TypedDict,
|
||||
TYPE_CHECKING,
|
||||
Tuple,
|
||||
cast,
|
||||
@@ -31,13 +39,20 @@ from typing import (
|
||||
|
||||
import aiohttp
|
||||
import discord
|
||||
import yarl
|
||||
from packaging.metadata import Metadata
|
||||
from packaging.requirements import Requirement
|
||||
from packaging.specifiers import SpecifierSet
|
||||
from packaging.utils import parse_sdist_filename
|
||||
from packaging.version import Version
|
||||
import rapidfuzz
|
||||
from rich.progress import ProgressColumn
|
||||
from rich.progress_bar import ProgressBar
|
||||
import rich.progress
|
||||
from rich.console import Console
|
||||
from rich.text import Text
|
||||
from red_commons.logging import VERBOSE, TRACE
|
||||
from typing_extensions import NotRequired, Self
|
||||
|
||||
from redbot import VersionInfo
|
||||
from redbot import __version__
|
||||
from redbot.core import data_manager
|
||||
from redbot.core.utils.chat_formatting import box
|
||||
|
||||
@@ -54,15 +69,27 @@ __all__ = (
|
||||
"create_backup",
|
||||
"send_to_owners_with_preprocessor",
|
||||
"send_to_owners_with_prefix_replaced",
|
||||
"expected_version",
|
||||
"fetch_latest_red_version_info",
|
||||
"ReleaseFile",
|
||||
"AvailableVersion",
|
||||
"fetch_available_red_versions",
|
||||
"fetch_latest_red_version",
|
||||
"deprecated_removed",
|
||||
"RichIndefiniteBarColumn",
|
||||
"RichSpeedColumn",
|
||||
"detailed_progress",
|
||||
"cli_level_to_log_level",
|
||||
)
|
||||
|
||||
_T = TypeVar("_T")
|
||||
|
||||
# I guess there's nothing in allowing people to use an alternative index.
|
||||
_SIMPLE_API_URL = os.getenv("RED_SIMPLE_API_URL") or "https://pypi.org/simple/"
|
||||
# This variable should only be used for debugging purposes (hence why it starts with `_`).
|
||||
# You can debug the behavior by e.g. creating a "Red-DiscordBot.json" file,
|
||||
# starting a server with `python -m http.server` and starting Red with the following env vars:
|
||||
# RED_SIMPLE_API_URL=http://localhost:8000 _RED_SIMPLE_API_ENDPOINT_PATH=Red-DiscordBot.json
|
||||
_SIMPLE_API_ENDPOINT_PATH = os.getenv("_RED_SIMPLE_API_ENDPOINT_PATH") or "Red-DiscordBot"
|
||||
|
||||
|
||||
def safe_delete(pth: Path):
|
||||
if pth.exists():
|
||||
@@ -216,7 +243,27 @@ async def format_fuzzy_results(
|
||||
return "Perhaps you wanted one of these? " + box("\n".join(lines), lang="vhdl")
|
||||
|
||||
|
||||
def _tar_addfile_from_string(tar: tarfile.TarFile, name: str, string: str) -> None:
|
||||
encoded = string.encode("utf-8")
|
||||
fp = BytesIO(encoded)
|
||||
|
||||
# TarInfo needs `mtime` and `size`
|
||||
# https://stackoverflow.com/q/53306000
|
||||
tar_info = tarfile.TarInfo(name)
|
||||
tar_info.mtime = time.time()
|
||||
tar_info.size = len(encoded)
|
||||
|
||||
tar.addfile(tar_info, fp)
|
||||
|
||||
|
||||
class BackupDetails(TypedDict):
|
||||
backup_version: int
|
||||
|
||||
|
||||
async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
|
||||
# version of backup
|
||||
BACKUP_VERSION = 2
|
||||
|
||||
data_path = Path(data_manager.core_data_path().parent)
|
||||
if not data_path.exists():
|
||||
return None
|
||||
@@ -226,36 +273,66 @@ async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
|
||||
backup_fpath = dest / f"redv3_{data_manager.instance_name()}_{timestr}.tar.gz"
|
||||
|
||||
to_backup = []
|
||||
# we need trailing separator to not exclude files and folders that only start with these names
|
||||
exclusions = [
|
||||
"__pycache__",
|
||||
# Lavalink will be downloaded on Audio load
|
||||
"Lavalink.jar",
|
||||
os.path.join("Downloader", "lib"),
|
||||
os.path.join("CogManager", "cogs"),
|
||||
os.path.join("RepoManager", "repos"),
|
||||
os.path.join("Audio", "logs"),
|
||||
# cogs and repos installed through Downloader can be reinstalled using restore command
|
||||
os.path.join("Downloader", "lib", ""),
|
||||
os.path.join("CogManager", "cogs", ""),
|
||||
os.path.join("RepoManager", "repos", ""),
|
||||
os.path.join("Audio", "logs", ""),
|
||||
# these files are created during backup so we exclude them from data path backup
|
||||
os.path.join("RepoManager", "repos.json"),
|
||||
"instance.json",
|
||||
"backup_details.json",
|
||||
]
|
||||
|
||||
# Avoiding circular imports
|
||||
from ...cogs.downloader.repo_manager import RepoManager
|
||||
from redbot.core._downloader.repo_manager import RepoManager
|
||||
|
||||
repo_mgr = RepoManager()
|
||||
await repo_mgr.initialize()
|
||||
repo_output = []
|
||||
for repo in repo_mgr.repos:
|
||||
repo_output.append({"url": repo.url, "name": repo.name, "branch": repo.branch})
|
||||
repos_file = data_path / "cogs" / "RepoManager" / "repos.json"
|
||||
with repos_file.open("w") as fs:
|
||||
json.dump(repo_output, fs, indent=4)
|
||||
instance_file = data_path / "instance.json"
|
||||
with instance_file.open("w") as fs:
|
||||
json.dump({data_manager.instance_name(): data_manager.basic_config}, fs, indent=4)
|
||||
for f in data_path.glob("**/*"):
|
||||
|
||||
with rich.progress.Progress(
|
||||
rich.progress.SpinnerColumn(),
|
||||
rich.progress.TextColumn("[progress.description]{task.description}"),
|
||||
RichIndefiniteBarColumn(),
|
||||
rich.progress.TextColumn("{task.completed} files processed"),
|
||||
rich.progress.TimeElapsedColumn(),
|
||||
) as progress:
|
||||
for f in progress.track(
|
||||
data_path.glob("**/*"), description="Preparing files for backup..."
|
||||
):
|
||||
if not any(ex in str(f) for ex in exclusions) and f.is_file():
|
||||
to_backup.append(f)
|
||||
|
||||
with tarfile.open(str(backup_fpath), "w:gz") as tar:
|
||||
for f in to_backup:
|
||||
backup_details: BackupDetails = {
|
||||
"backup_version": BACKUP_VERSION,
|
||||
}
|
||||
|
||||
with tarfile.open(str(backup_fpath), "w:gz", dereference=True) as tar:
|
||||
with detailed_progress(unit="files") as progress:
|
||||
progress_tracker = progress.track(to_backup, description="Compressing data")
|
||||
for f in progress_tracker:
|
||||
tar.add(str(f), arcname=str(f.relative_to(data_path)), recursive=False)
|
||||
|
||||
# add repos backup
|
||||
repos_data = json.dumps(repo_output, indent=4)
|
||||
_tar_addfile_from_string(tar, "cogs/RepoManager/repos.json", repos_data)
|
||||
|
||||
# add instance's original data
|
||||
instance_data = json.dumps(
|
||||
{data_manager.instance_name(): data_manager.basic_config}, indent=4
|
||||
)
|
||||
_tar_addfile_from_string(tar, "instance.json", instance_data)
|
||||
|
||||
# add info about backup version
|
||||
_tar_addfile_from_string(tar, "backup_details.json", json.dumps(backup_details))
|
||||
return backup_fpath
|
||||
|
||||
|
||||
@@ -321,23 +398,232 @@ async def send_to_owners_with_prefix_replaced(bot: Red, content: str, **kwargs):
|
||||
await send_to_owners_with_preprocessor(bot, content, content_preprocessor=preprocessor)
|
||||
|
||||
|
||||
def expected_version(current: str, expected: str) -> bool:
|
||||
# Requirement needs a regular requirement string, so "x" serves as requirement's name here
|
||||
return Requirement(f"x{expected}").specifier.contains(current, prereleases=True)
|
||||
# gotta use functional TypedDict syntax due to hyphens in keys
|
||||
ReleaseFile = TypedDict(
|
||||
"ReleaseFile",
|
||||
{
|
||||
"filename": str,
|
||||
"url": str,
|
||||
"hashes": Dict[str, str],
|
||||
"requires-python": NotRequired[str],
|
||||
"core-metadata": NotRequired[Union[bool, Dict[str, str]]],
|
||||
"yanked": bool,
|
||||
"size": int,
|
||||
"upload-time": NotRequired[str],
|
||||
"provenance": NotRequired[Optional[str]],
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def fetch_latest_red_version_info() -> Tuple[Optional[VersionInfo], Optional[str]]:
|
||||
try:
|
||||
class AvailableVersion:
|
||||
def __init__(self, version: Version, files: Dict[str, ReleaseFile]) -> None:
|
||||
self.version = version
|
||||
self.files = files
|
||||
required_pythons = {f.get("requires-python") or "" for f in files.values()}
|
||||
if len(required_pythons) > 1:
|
||||
raise ValueError("found multiple files with different Requires-Python values")
|
||||
self.requires_python = SpecifierSet(required_pythons.pop())
|
||||
|
||||
@classmethod
|
||||
def from_json_dict(cls, data: Dict[str, Any]) -> Self:
|
||||
ret = cls(Version(data["version"]), data["files"])
|
||||
if str(ret.requires_python) != data["requires_python"]:
|
||||
raise ValueError("requires_python key in given data is inconsistent with files")
|
||||
return ret
|
||||
|
||||
def to_json_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"version": str(self.version),
|
||||
"requires_python": str(self.requires_python),
|
||||
"files": self.files,
|
||||
}
|
||||
|
||||
async def fetch_core_metadata(self) -> Metadata:
|
||||
for release_file in self.files.values():
|
||||
core_metadata_hashes = release_file.get("core-metadata", False)
|
||||
if core_metadata_hashes is False:
|
||||
continue
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get("https://pypi.org/pypi/Red-DiscordBot/json") as r:
|
||||
data = await r.json()
|
||||
except (aiohttp.ClientError, asyncio.TimeoutError):
|
||||
return None, None
|
||||
else:
|
||||
release = VersionInfo.from_str(data["info"]["version"])
|
||||
required_python = data["info"]["requires_python"]
|
||||
async with session.get(f"{release_file['url']}.metadata") as resp:
|
||||
return Metadata.from_email(await resp.read(), validate=False)
|
||||
raise TypeError("Could not find core metadata for any of the release files.")
|
||||
|
||||
return release, required_python
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.version == other.version
|
||||
return NotImplemented
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.version != other.version
|
||||
return NotImplemented
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.version < other.version
|
||||
return NotImplemented
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.version <= other.version
|
||||
return NotImplemented
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.version > other.version
|
||||
return NotImplemented
|
||||
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
if isinstance(other, self.__class__):
|
||||
return self.version >= other.version
|
||||
return NotImplemented
|
||||
|
||||
|
||||
async def fetch_available_red_versions(
|
||||
*, include_prereleases: Optional[bool] = None
|
||||
) -> List[AvailableVersion]:
|
||||
"""
|
||||
Fetch information about Red releases available on PyPI,
|
||||
sorted by version (latest first).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
include_prereleases : bool, optional
|
||||
Whether the pre-releases should be included in the list.
|
||||
If ``None`` (the default), the pre-releases will only be included,
|
||||
if the currently running Red version is considered a pre-release.
|
||||
|
||||
Raises
|
||||
------
|
||||
aiohttp.ClientError
|
||||
An error occurred during request to PyPI.
|
||||
TimeoutError
|
||||
The request to PyPI timed out.
|
||||
ValueError
|
||||
Some part of the response was considered invalid.
|
||||
This includes issues such as incorrect response content type,
|
||||
invalid version strings, inability to find files for a release,
|
||||
and mismatching Requires-Python values.
|
||||
KeyError
|
||||
The PyPI metadata is missing some of the required information.
|
||||
"""
|
||||
if include_prereleases is None:
|
||||
include_prereleases = Version(__version__).is_prerelease
|
||||
expected_content_type = "application/vnd.pypi.simple.v1+json"
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(
|
||||
yarl.URL(_SIMPLE_API_URL) / _SIMPLE_API_ENDPOINT_PATH,
|
||||
headers={"Accept": expected_content_type},
|
||||
) as resp:
|
||||
data = await resp.json()
|
||||
content_type = resp.headers["Content-Type"]
|
||||
if not (
|
||||
content_type.startswith(expected_content_type)
|
||||
or (
|
||||
content_type.startswith("application/json")
|
||||
and data["meta"]["api-version"].startswith("1.")
|
||||
)
|
||||
):
|
||||
raise ValueError("got unexpected response from Simple Repository API")
|
||||
|
||||
files: Dict[Version, Dict[str, ReleaseFile]] = {}
|
||||
f: ReleaseFile
|
||||
for f in data["files"]:
|
||||
if f.get("yanked"):
|
||||
continue
|
||||
filename = f["filename"]
|
||||
if filename.endswith((".tar.gz", ".zip")):
|
||||
_, version = parse_sdist_filename(filename)
|
||||
elif filename.endswith(".whl"):
|
||||
# https://packaging.python.org/en/latest/specifications/binary-distribution-format/#file-name-convention
|
||||
_, raw_version, _ = filename.split("-", 2)
|
||||
version = Version(raw_version)
|
||||
else:
|
||||
continue
|
||||
if version.is_prerelease and not include_prereleases:
|
||||
continue
|
||||
version_files = files.setdefault(version, {})
|
||||
version_files[f["filename"]] = f
|
||||
|
||||
if not files:
|
||||
raise ValueError("could not find any files")
|
||||
|
||||
available_versions = [
|
||||
AvailableVersion(version, version_files) for version, version_files in files.items()
|
||||
]
|
||||
available_versions.sort(reverse=True)
|
||||
|
||||
return available_versions
|
||||
|
||||
|
||||
async def fetch_latest_red_version(
|
||||
*, include_prereleases: Optional[bool] = None
|
||||
) -> AvailableVersion:
|
||||
"""
|
||||
Fetch information about latest Red release on PyPI.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
include_prereleases : bool, optional
|
||||
Whether the pre-releases should be considered when finding the latest version.
|
||||
If ``None`` (the default), the pre-releases will only be considered,
|
||||
if the currently running Red version is considered a pre-release.
|
||||
|
||||
Raises
|
||||
------
|
||||
aiohttp.ClientError
|
||||
An error occurred during request to PyPI.
|
||||
TimeoutError
|
||||
The request to PyPI timed out.
|
||||
ValueError
|
||||
Some part of the response was considered invalid.
|
||||
This includes issues such as incorrect response content type,
|
||||
invalid version strings, inability to find files for a release,
|
||||
and mismatching Requires-Python values.
|
||||
KeyError
|
||||
The PyPI metadata is missing some of the required information.
|
||||
"""
|
||||
available_versions = await fetch_available_red_versions(
|
||||
include_prereleases=include_prereleases
|
||||
)
|
||||
return available_versions[0]
|
||||
|
||||
|
||||
def get_installed_extras() -> List[str]:
|
||||
red_dist = importlib.metadata.distribution("Red-DiscordBot")
|
||||
installed_extras = red_dist.metadata.get_all("Provides-Extra")
|
||||
if installed_extras is None:
|
||||
return []
|
||||
installed_extras.remove("dev")
|
||||
installed_extras.remove("all")
|
||||
distributions: Dict[str, Optional[importlib.metadata.Distribution]] = {}
|
||||
for req_str in red_dist.requires or []:
|
||||
req = Requirement(req_str)
|
||||
if req.marker is None or req.marker.evaluate():
|
||||
continue
|
||||
for extra in reversed(installed_extras):
|
||||
if not req.marker.evaluate({"extra": extra}):
|
||||
continue
|
||||
|
||||
# Check that the requirement is met.
|
||||
# This is a bit simplified for our purposes and does not check
|
||||
# whether the requirements of our requirements are met as well.
|
||||
# This could potentially be an issue if we'll ever depend on
|
||||
# a dependency's extra in our extra when we already depend on that
|
||||
# in our base dependencies. However, considering that right now, all
|
||||
# our dependencies are also fully pinned, this should not ever matter.
|
||||
if req.name in distributions:
|
||||
dist = distributions[req.name]
|
||||
else:
|
||||
try:
|
||||
dist = importlib.metadata.distribution(req.name)
|
||||
except importlib.metadata.PackageNotFoundError:
|
||||
dist = None
|
||||
distributions[req.name] = dist
|
||||
if dist is None or not req.specifier.contains(dist.version, prereleases=True):
|
||||
installed_extras.remove(extra)
|
||||
|
||||
return installed_extras
|
||||
|
||||
|
||||
def deprecated_removed(
|
||||
@@ -356,10 +642,10 @@ def deprecated_removed(
|
||||
)
|
||||
|
||||
|
||||
class RichIndefiniteBarColumn(ProgressColumn):
|
||||
def render(self, task):
|
||||
return ProgressBar(
|
||||
pulse=task.completed < task.total,
|
||||
class RichIndefiniteBarColumn(rich.progress.ProgressColumn):
|
||||
def render(self, task: rich.progress.Task) -> rich.progress.ProgressBar:
|
||||
return rich.progress.ProgressBar(
|
||||
pulse=task.completed < task.total if task.total is not None else True,
|
||||
animation_time=task.get_time(),
|
||||
width=40,
|
||||
total=task.total,
|
||||
@@ -367,6 +653,33 @@ class RichIndefiniteBarColumn(ProgressColumn):
|
||||
)
|
||||
|
||||
|
||||
class RichSpeedColumn(rich.progress.ProgressColumn):
|
||||
def __init__(self, *, unit: str) -> None:
|
||||
self.unit = unit
|
||||
super().__init__()
|
||||
|
||||
def render(self, task: rich.progress.Task) -> Text:
|
||||
speed = task.finished_speed or task.speed
|
||||
if speed is None:
|
||||
return Text("?", style="progress.data.speed")
|
||||
return Text(f"{int(speed)} {self.unit}/s", style="progress.data.speed")
|
||||
|
||||
|
||||
def detailed_progress(*, unit: str, console: Optional[Console] = None) -> rich.progress.Progress:
|
||||
return rich.progress.Progress(
|
||||
rich.progress.SpinnerColumn(),
|
||||
rich.progress.TextColumn("[progress.description]{task.description}"),
|
||||
rich.progress.BarColumn(bar_width=None),
|
||||
RichSpeedColumn(unit=unit),
|
||||
rich.progress.TaskProgressColumn(),
|
||||
rich.progress.TextColumn("eta"),
|
||||
rich.progress.TimeRemainingColumn(),
|
||||
rich.progress.TextColumn("elapsed"),
|
||||
rich.progress.TimeElapsedColumn(),
|
||||
console=console,
|
||||
)
|
||||
|
||||
|
||||
def cli_level_to_log_level(level: int) -> int:
|
||||
if level == 0:
|
||||
log_level = logging.INFO
|
||||
@@ -377,3 +690,15 @@ def cli_level_to_log_level(level: int) -> int:
|
||||
else:
|
||||
log_level = TRACE
|
||||
return log_level
|
||||
|
||||
|
||||
def log_level_to_cli_level(log_level: int) -> int:
|
||||
if log_level == TRACE:
|
||||
level = 3
|
||||
elif log_level == VERBOSE:
|
||||
level = 2
|
||||
elif log_level == logging.DEBUG:
|
||||
level = 1
|
||||
else:
|
||||
level = 0
|
||||
return level
|
||||
|
||||
+33
-17
@@ -282,7 +282,20 @@ class RedRichHandler(RichHandler):
|
||||
self.console.print(traceback)
|
||||
|
||||
|
||||
def init_logging(level: int, location: pathlib.Path, cli_flags: argparse.Namespace) -> None:
|
||||
_FILE_FORMATTER = logging.Formatter(
|
||||
"[{asctime}] [{levelname}] {name}: {message}", datefmt="%Y-%m-%d %H:%M:%S", style="{"
|
||||
)
|
||||
|
||||
|
||||
def init_logging(
|
||||
level: int,
|
||||
*,
|
||||
location: Optional[pathlib.Path] = None,
|
||||
rich_logging: Optional[bool] = None,
|
||||
rich_tracebacks: bool = False,
|
||||
rich_traceback_extra_lines: int = 0,
|
||||
rich_traceback_show_locals: bool = False,
|
||||
) -> None:
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(level)
|
||||
# DEBUG logging for discord.py is a bit too ridiculous :)
|
||||
@@ -312,24 +325,21 @@ def init_logging(level: int, location: pathlib.Path, cli_flags: argparse.Namespa
|
||||
|
||||
enable_rich_logging = False
|
||||
|
||||
if isatty(0) and cli_flags.rich_logging is None:
|
||||
if isatty(0) and rich_logging is None:
|
||||
# Check if the bot thinks it has a active terminal.
|
||||
enable_rich_logging = True
|
||||
elif cli_flags.rich_logging is True:
|
||||
elif rich_logging is True:
|
||||
enable_rich_logging = True
|
||||
|
||||
file_formatter = logging.Formatter(
|
||||
"[{asctime}] [{levelname}] {name}: {message}", datefmt="%Y-%m-%d %H:%M:%S", style="{"
|
||||
)
|
||||
if enable_rich_logging is True:
|
||||
rich_formatter = logging.Formatter("{message}", datefmt="[%X]", style="{")
|
||||
|
||||
stdout_handler = RedRichHandler(
|
||||
rich_tracebacks=cli_flags.rich_tracebacks,
|
||||
rich_tracebacks=rich_tracebacks,
|
||||
show_path=False,
|
||||
highlighter=NullHighlighter(),
|
||||
tracebacks_extra_lines=cli_flags.rich_traceback_extra_lines,
|
||||
tracebacks_show_locals=cli_flags.rich_traceback_show_locals,
|
||||
tracebacks_extra_lines=rich_traceback_extra_lines,
|
||||
tracebacks_show_locals=rich_traceback_show_locals,
|
||||
tracebacks_theme=(
|
||||
PygmentsSyntaxTheme(FixedMonokaiStyle)
|
||||
if rich_console.color_system == "truecolor"
|
||||
@@ -339,11 +349,22 @@ def init_logging(level: int, location: pathlib.Path, cli_flags: argparse.Namespa
|
||||
stdout_handler.setFormatter(rich_formatter)
|
||||
else:
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(file_formatter)
|
||||
stdout_handler.setFormatter(_FILE_FORMATTER)
|
||||
|
||||
root_logger.addHandler(stdout_handler)
|
||||
logging.captureWarnings(True)
|
||||
|
||||
if location is not None:
|
||||
init_file_logging(location)
|
||||
|
||||
if not enable_rich_logging and rich_tracebacks:
|
||||
log.warning(
|
||||
"Rich tracebacks were requested but they will not be enabled"
|
||||
" as Rich logging is not active."
|
||||
)
|
||||
|
||||
|
||||
def init_file_logging(location: pathlib.Path) -> None:
|
||||
if not location.exists():
|
||||
location.mkdir(parents=True, exist_ok=True)
|
||||
# Rotate latest logs to previous logs
|
||||
@@ -379,12 +400,7 @@ def init_logging(level: int, location: pathlib.Path, cli_flags: argparse.Namespa
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
root_logger = logging.getLogger()
|
||||
for fhandler in (latest_fhandler, all_fhandler):
|
||||
fhandler.setFormatter(file_formatter)
|
||||
fhandler.setFormatter(_FILE_FORMATTER)
|
||||
root_logger.addHandler(fhandler)
|
||||
|
||||
if not enable_rich_logging and cli_flags.rich_tracebacks:
|
||||
log.warning(
|
||||
"Rich tracebacks were requested but they will not be enabled"
|
||||
" as Rich logging is not active."
|
||||
)
|
||||
|
||||
@@ -172,11 +172,9 @@ def red(config_fr):
|
||||
|
||||
cli_flags = parse_cli_flags(["ignore_me"])
|
||||
|
||||
description = "Red v3 - Alpha"
|
||||
|
||||
Config.get_core_conf = lambda *args, **kwargs: config_fr
|
||||
|
||||
red = Red(cli_flags=cli_flags, description=description, dm_help=None, owner_ids=set())
|
||||
red = Red(cli_flags=cli_flags)
|
||||
|
||||
yield red
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ import shutil
|
||||
|
||||
import pytest
|
||||
|
||||
from redbot.cogs.downloader.repo_manager import RepoManager, Repo, ProcessFormatter
|
||||
from redbot.cogs.downloader.installable import Installable, InstalledModule
|
||||
from redbot.core._downloader.repo_manager import RepoManager, Repo, ProcessFormatter
|
||||
from redbot.core._downloader.installable import Installable, InstalledModule
|
||||
|
||||
__all__ = [
|
||||
"GIT_VERSION",
|
||||
@@ -87,6 +87,7 @@ INFO_JSON = {
|
||||
"author": ("tekulvw",),
|
||||
"min_bot_version": "3.0.0",
|
||||
"max_bot_version": "3.0.2",
|
||||
"min_python_version": [3, 7, 1],
|
||||
"description": "A long description",
|
||||
"hidden": False,
|
||||
"install_msg": "A post-installation message",
|
||||
@@ -101,6 +102,7 @@ LIBRARY_INFO_JSON = {
|
||||
"author": ("seputaes",),
|
||||
"min_bot_version": "3.0.0",
|
||||
"max_bot_version": "3.0.2",
|
||||
"min_python_version": [3, 7, 1],
|
||||
"description": "A long library description",
|
||||
"hidden": False, # libraries are always hidden, this tests it will be flipped
|
||||
"install_msg": "A library install message",
|
||||
|
||||
+516
-47
@@ -1,28 +1,37 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from redbot import _early_init
|
||||
|
||||
# this needs to be called as early as possible
|
||||
_early_init()
|
||||
|
||||
import asyncio
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
import tarfile
|
||||
from copy import deepcopy
|
||||
from pathlib import Path
|
||||
from typing import Dict, Any, Optional, Union
|
||||
from typing import Any, Dict, IO, List, NoReturn, Optional, Set, Tuple, Union
|
||||
|
||||
import click
|
||||
|
||||
import redbot.logging
|
||||
from redbot.core._cli import confirm
|
||||
from redbot.core.utils._internal_utils import (
|
||||
BackupDetails,
|
||||
safe_delete,
|
||||
create_backup as red_create_backup,
|
||||
cli_level_to_log_level,
|
||||
detailed_progress,
|
||||
)
|
||||
from redbot.core import config, data_manager
|
||||
from redbot.core import config, data_manager, _downloader
|
||||
from redbot.core._cog_manager import CogManager
|
||||
from redbot.core._config import migrate
|
||||
from redbot.core._cli import ExitCodes
|
||||
from redbot.core._cli import ExitCodes, asyncio_run
|
||||
from redbot.core.data_manager import appdir, config_dir, config_file
|
||||
from redbot.core._drivers import (
|
||||
BackendType,
|
||||
@@ -57,12 +66,16 @@ def save_config(name, data, remove=False):
|
||||
json.dump(_config, fs, indent=4)
|
||||
|
||||
|
||||
def get_default_data_path(instance_name: str) -> Path:
|
||||
return Path(appdir.user_data_dir) / "data" / instance_name
|
||||
|
||||
|
||||
def get_data_dir(*, instance_name: str, data_path: Optional[Path], interactive: bool) -> str:
|
||||
if data_path is not None:
|
||||
return str(data_path.resolve())
|
||||
data_path = Path(appdir.user_data_dir) / "data" / instance_name
|
||||
default_data_path = get_default_data_path(instance_name)
|
||||
if not interactive:
|
||||
return str(data_path.resolve())
|
||||
return str(default_data_path.resolve())
|
||||
|
||||
print(
|
||||
"We've attempted to figure out a sane default data location which is printed below."
|
||||
@@ -70,12 +83,15 @@ def get_data_dir(*, instance_name: str, data_path: Optional[Path], interactive:
|
||||
" otherwise input your desired data location."
|
||||
)
|
||||
print()
|
||||
print("Default: {}".format(data_path))
|
||||
print(f"Default: {default_data_path}")
|
||||
|
||||
while True:
|
||||
data_path_input = input("> ")
|
||||
|
||||
if data_path_input != "":
|
||||
data_path = Path(data_path_input)
|
||||
else:
|
||||
data_path = default_data_path
|
||||
|
||||
try:
|
||||
exists = data_path.exists()
|
||||
@@ -84,7 +100,7 @@ def get_data_dir(*, instance_name: str, data_path: Optional[Path], interactive:
|
||||
"We were unable to check your chosen directory."
|
||||
" Provided path may contain an invalid character."
|
||||
)
|
||||
sys.exit(ExitCodes.INVALID_CLI_USAGE)
|
||||
continue
|
||||
|
||||
if not exists:
|
||||
try:
|
||||
@@ -95,12 +111,12 @@ def get_data_dir(*, instance_name: str, data_path: Optional[Path], interactive:
|
||||
" You may need to create the directory and set proper permissions"
|
||||
" for it manually before it can be used as the data directory."
|
||||
)
|
||||
sys.exit(ExitCodes.INVALID_CLI_USAGE)
|
||||
continue
|
||||
|
||||
print(f"You have chosen {str(data_path)!r} to be your data directory.")
|
||||
if click.confirm("Please confirm", default=True):
|
||||
break
|
||||
|
||||
print("You have chosen {} to be your data directory.".format(data_path))
|
||||
if not click.confirm("Please confirm", default=True):
|
||||
print("Please start the process over.")
|
||||
sys.exit(ExitCodes.CRITICAL)
|
||||
return str(data_path.resolve())
|
||||
|
||||
|
||||
@@ -131,7 +147,6 @@ def get_storage_type(backend: Optional[str], *, interactive: bool):
|
||||
return storage_dict[storage]
|
||||
|
||||
|
||||
def get_name(name: str) -> str:
|
||||
INSTANCE_NAME_RE = re.compile(
|
||||
r"""
|
||||
[a-z0-9] # starts with letter or digit
|
||||
@@ -143,6 +158,9 @@ def get_name(name: str) -> str:
|
||||
""",
|
||||
re.VERBOSE | re.IGNORECASE,
|
||||
)
|
||||
|
||||
|
||||
def get_name(name: str = "", *, confirm_overwrite: bool = False) -> str:
|
||||
if name:
|
||||
if INSTANCE_NAME_RE.fullmatch(name) is None:
|
||||
print(
|
||||
@@ -151,9 +169,17 @@ def get_name(name: str) -> str:
|
||||
" and non-consecutive underscores (_) and periods (.)."
|
||||
)
|
||||
sys.exit(ExitCodes.INVALID_CLI_USAGE)
|
||||
if name in instance_data and not confirm_overwrite:
|
||||
print(
|
||||
"An instance with this name already exists.\n"
|
||||
"If you want to remove the existing instance and replace it with this one,"
|
||||
" run this command with --overwrite-existing-instance flag."
|
||||
)
|
||||
sys.exit(ExitCodes.INVALID_CLI_USAGE)
|
||||
return name
|
||||
|
||||
while len(name) == 0:
|
||||
name = ""
|
||||
while not name:
|
||||
print(
|
||||
"Please enter a name for your instance,"
|
||||
" it will be used to run your bot from here on out.\n"
|
||||
@@ -176,6 +202,16 @@ def get_name(name: str) -> str:
|
||||
default=False,
|
||||
):
|
||||
name = ""
|
||||
elif name in instance_data and not confirm_overwrite:
|
||||
print(
|
||||
"WARNING: An instance already exists with this name."
|
||||
" Continuing will overwrite the existing instance config."
|
||||
)
|
||||
if not click.confirm(
|
||||
"Are you absolutely certain you want to continue with this instance name?",
|
||||
default=False,
|
||||
):
|
||||
name = ""
|
||||
|
||||
print() # new line for aesthetics
|
||||
return name
|
||||
@@ -205,7 +241,7 @@ def basic_setup(
|
||||
"Hello! Before we begin, we need to gather some initial information"
|
||||
" for the new instance."
|
||||
)
|
||||
name = get_name(name)
|
||||
name = get_name(name, confirm_overwrite=overwrite_existing_instance)
|
||||
|
||||
default_data_dir = get_data_dir(
|
||||
instance_name=name, data_path=data_path, interactive=interactive
|
||||
@@ -220,26 +256,6 @@ def basic_setup(
|
||||
driver_cls = get_driver_class(storage_type)
|
||||
default_dirs["STORAGE_DETAILS"] = driver_cls.get_config_details()
|
||||
|
||||
if name in instance_data:
|
||||
if overwrite_existing_instance:
|
||||
pass
|
||||
elif interactive:
|
||||
print(
|
||||
"WARNING: An instance already exists with this name. "
|
||||
"Continuing will overwrite the existing instance config."
|
||||
)
|
||||
if not click.confirm(
|
||||
"Are you absolutely certain you want to continue?", default=False
|
||||
):
|
||||
print("Not continuing")
|
||||
sys.exit(ExitCodes.SHUTDOWN)
|
||||
else:
|
||||
print(
|
||||
"An instance with this name already exists.\n"
|
||||
"If you want to remove the existing instance and replace it with this one,"
|
||||
" run this command with --overwrite-existing-instance flag."
|
||||
)
|
||||
sys.exit(ExitCodes.INVALID_CLI_USAGE)
|
||||
save_config(name, default_dirs)
|
||||
|
||||
if interactive:
|
||||
@@ -266,11 +282,14 @@ def get_target_backend(backend: str) -> BackendType:
|
||||
|
||||
|
||||
async def do_migration(
|
||||
current_backend: BackendType, target_backend: BackendType
|
||||
current_backend: BackendType,
|
||||
target_backend: BackendType,
|
||||
new_storage_details: Optional[dict] = None,
|
||||
) -> Dict[str, Any]:
|
||||
cur_driver_cls = get_driver_class_include_old(current_backend)
|
||||
new_driver_cls = get_driver_class(target_backend)
|
||||
cur_storage_details = data_manager.storage_details()
|
||||
if new_storage_details is None:
|
||||
new_storage_details = new_driver_cls.get_config_details()
|
||||
|
||||
await cur_driver_cls.initialize(**cur_storage_details)
|
||||
@@ -368,6 +387,379 @@ async def remove_instance_interaction() -> None:
|
||||
await remove_instance(selected, interactive=True)
|
||||
|
||||
|
||||
def open_file_from_tar(tar: tarfile.TarFile, arcname: str) -> Optional[IO[bytes]]:
|
||||
try:
|
||||
fp = tar.extractfile(arcname)
|
||||
except (KeyError, tarfile.StreamError):
|
||||
return None
|
||||
return fp
|
||||
|
||||
|
||||
class RestoreInfo:
|
||||
STORAGE_BACKENDS = {
|
||||
BackendType.JSON: "JSON",
|
||||
BackendType.POSTGRES: "PostgreSQL",
|
||||
BackendType.MONGOV1: "MongoDB (unavailable)",
|
||||
BackendType.MONGO: "MongoDB (unavailable)",
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
tar: tarfile.TarFile,
|
||||
backup_details: BackupDetails,
|
||||
name: str,
|
||||
data_path: Path,
|
||||
storage_type: BackendType,
|
||||
storage_details: dict,
|
||||
restore_downloader: Optional[bool] = None,
|
||||
):
|
||||
self.tar = tar
|
||||
self.backup_details = backup_details
|
||||
self.backup_version = backup_details["backup_version"]
|
||||
self.name = name
|
||||
self._data_path = data_path
|
||||
self.storage_type = storage_type
|
||||
self.storage_details = storage_details
|
||||
self._restore_downloader: Optional[bool] = restore_downloader
|
||||
self._data_path_ensure_result: Optional[bool] = None
|
||||
|
||||
@classmethod
|
||||
def from_tar(
|
||||
cls, tar: tarfile.TarFile, *, restore_downloader: Optional[bool] = None
|
||||
) -> RestoreInfo:
|
||||
instance_name, raw_data = cls.get_instance_from_backup(tar)
|
||||
backup_details = cls.get_backup_details(tar)
|
||||
|
||||
return cls(
|
||||
tar=tar,
|
||||
backup_details=backup_details,
|
||||
name=instance_name,
|
||||
data_path=Path(raw_data["DATA_PATH"]),
|
||||
storage_type=BackendType(raw_data["STORAGE_TYPE"]),
|
||||
storage_details=raw_data["STORAGE_DETAILS"],
|
||||
restore_downloader=restore_downloader,
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_instance_from_backup(tar: tarfile.TarFile) -> Tuple[str, dict]:
|
||||
if (fp := open_file_from_tar(tar, "instance.json")) is None:
|
||||
print("This isn't a valid backup file!")
|
||||
sys.exit(1)
|
||||
with fp:
|
||||
return json.load(fp).popitem()
|
||||
|
||||
@staticmethod
|
||||
def get_backup_details(tar: tarfile.TarFile) -> BackupDetails:
|
||||
if (fp := open_file_from_tar(tar, "backup_details.json")) is None:
|
||||
# backup version 1 doesn't have the details file
|
||||
return {"backup_version": 1}
|
||||
with fp:
|
||||
backup_details = json.load(fp)
|
||||
backup_version = backup_details.get("backup_version")
|
||||
if not isinstance(backup_version, int):
|
||||
print("This does not appear to be a valid backup.")
|
||||
sys.exit(1)
|
||||
if backup_version > 2:
|
||||
print("This backup was created using newer version of Red. Update Red to restore it.")
|
||||
sys.exit(1)
|
||||
return backup_details
|
||||
|
||||
@property
|
||||
def data_path(self) -> Path:
|
||||
return self._data_path
|
||||
|
||||
@data_path.setter
|
||||
def data_path(self, value: Path) -> None:
|
||||
self._data_path_ensure_result = None
|
||||
self._data_path = value
|
||||
|
||||
@property
|
||||
def name_used(self) -> bool:
|
||||
return self.name in instance_list
|
||||
|
||||
def ensure_data_path(self) -> bool:
|
||||
if self._data_path_ensure_result is not None:
|
||||
return self._data_path_ensure_result
|
||||
if self.data_path.is_absolute():
|
||||
try:
|
||||
# try making the dir since that's most reliant access check, if path does not exist
|
||||
self.data_path.mkdir(parents=True, exist_ok=True)
|
||||
except OSError:
|
||||
self._data_path_ensure_result = False
|
||||
else:
|
||||
# if path exists, mkdir above is a no-op so we still have to check for write access
|
||||
self._data_path_ensure_result = os.access(self.data_path, os.W_OK)
|
||||
else:
|
||||
# if path is not absolute, it's not valid on the current OS, e.g.
|
||||
# Path('D:\\data').is_absolute() is False on Linux/macOS
|
||||
# Path('/some/path').is_absolute() is False on Windows
|
||||
self._data_path_ensure_result = False
|
||||
return self._data_path_ensure_result
|
||||
|
||||
@property
|
||||
def data_path_not_empty(self) -> bool:
|
||||
if not self.ensure_data_path():
|
||||
return True
|
||||
try:
|
||||
return next(self.data_path.glob("*"), None) is not None
|
||||
except OSError:
|
||||
return True
|
||||
|
||||
@property
|
||||
def backend_unavailable(self) -> bool:
|
||||
return self.storage_type in (BackendType.MONGOV1, BackendType.MONGO)
|
||||
|
||||
@functools.cached_property
|
||||
def can_restore_downloader(self) -> bool:
|
||||
return "cogs/RepoManager/repos.json" in self.all_tar_member_names
|
||||
|
||||
@functools.cached_property
|
||||
def restore_downloader(self) -> bool:
|
||||
if self._restore_downloader is not None:
|
||||
return self.can_restore_downloader
|
||||
return self.can_restore_downloader and click.confirm(
|
||||
"Do you want to restore 3rd-party repos and cogs installed through Downloader?",
|
||||
default=True,
|
||||
)
|
||||
|
||||
@functools.cached_property
|
||||
def all_tar_members(self) -> List[tarfile.TarInfo]:
|
||||
return self.tar.getmembers()
|
||||
|
||||
@functools.cached_property
|
||||
def all_tar_member_names(self) -> List[str]:
|
||||
return [tarinfo.name for tarinfo in self.all_tar_members]
|
||||
|
||||
def get_tar_members_to_extract(self) -> List[tarfile.TarInfo]:
|
||||
ignored_members: Set[str] = {"backup_details.json", "instance.json"}
|
||||
if not self.restore_downloader:
|
||||
ignored_members |= {
|
||||
"cogs/RepoManager/repos.json",
|
||||
"cogs/RepoManager/settings.json",
|
||||
"cogs/Downloader/settings.json",
|
||||
}
|
||||
return [member for member in self.all_tar_members if member.name not in ignored_members]
|
||||
|
||||
def print_instance_data(self) -> None:
|
||||
print("\nWhen the instance was backed up, it was using these settings:")
|
||||
print(" Original instance name:", self.name)
|
||||
print(" Original data path:", self.data_path)
|
||||
print(" Original storage backend:", self.STORAGE_BACKENDS[self.storage_type])
|
||||
self.print_storage_details()
|
||||
|
||||
def print_storage_details(self, *, original: bool = True) -> None:
|
||||
if self.storage_type is BackendType.POSTGRES:
|
||||
if original:
|
||||
print(" Original storage details:")
|
||||
else:
|
||||
print(" Storage details:")
|
||||
for key in ("host", "port", "database", "user"):
|
||||
print(f" - DB {key}:", self.storage_details[key])
|
||||
print(" - DB password: ***")
|
||||
|
||||
def ask_for_changes(self, *, interactive: bool) -> None:
|
||||
if interactive:
|
||||
self._ask_for_optional_changes()
|
||||
self._ask_for_required_changes(interactive=interactive)
|
||||
|
||||
def _ask_for_optional_changes(self) -> None:
|
||||
if click.confirm("\nWould you like to change anything?"):
|
||||
if not self.name_used and click.confirm("Do you want to use different instance name?"):
|
||||
self._ask_for_name()
|
||||
if not self.data_path_not_empty and click.confirm(
|
||||
"Do you want to use different data path?"
|
||||
):
|
||||
self._ask_for_data_path()
|
||||
if not self.backend_unavailable and click.confirm(
|
||||
"Do you want to use different storage backend or change storage details?"
|
||||
):
|
||||
self._ask_for_storage()
|
||||
|
||||
@staticmethod
|
||||
def _error_and_exit(message: str) -> NoReturn:
|
||||
print(f"ERROR: {message}")
|
||||
sys.exit(1)
|
||||
|
||||
@staticmethod
|
||||
def _warning(message: str) -> None:
|
||||
print(f"WARNING: {message}")
|
||||
|
||||
@staticmethod
|
||||
def _info(message: str) -> None:
|
||||
print(f"INFO: {message}")
|
||||
|
||||
def _ask_for_required_changes(self, interactive: bool) -> None:
|
||||
p = self._warning if interactive else self._error_and_exit
|
||||
if self.name_used:
|
||||
p("Original instance name is already used by a different instance.")
|
||||
p("Continuing will overwrite the existing instance config.")
|
||||
if click.confirm("Do you want to use different instance name?", default=True):
|
||||
self._ask_for_name()
|
||||
if not self.ensure_data_path():
|
||||
p(
|
||||
"Original data path can't be used as it cannot be written to by the current user."
|
||||
" You have to choose a different path."
|
||||
)
|
||||
self._ask_for_data_path()
|
||||
elif self.data_path_not_empty:
|
||||
p(
|
||||
"Original data path can't be used as it's not empty."
|
||||
" You have to choose a different path."
|
||||
)
|
||||
self._ask_for_data_path()
|
||||
if self.backend_unavailable:
|
||||
p(
|
||||
"Original storage backend is no longer available in Red."
|
||||
" You have to choose a different backend."
|
||||
)
|
||||
self._ask_for_storage()
|
||||
|
||||
def _ask_for_name(self) -> None:
|
||||
self.name = get_name("")
|
||||
|
||||
def _ask_for_data_path(self) -> None:
|
||||
while True:
|
||||
self.data_path = Path(
|
||||
get_data_dir(instance_name=self.name, data_path=None, interactive=True)
|
||||
)
|
||||
if not self.ensure_data_path():
|
||||
print("Given path can't be used as it cannot be written to by the current user.")
|
||||
elif self.data_path_not_empty:
|
||||
print("Given path can't be used as it's not empty.")
|
||||
else:
|
||||
return
|
||||
|
||||
def _ask_for_storage(self) -> None:
|
||||
self.storage_type = get_storage_type(None, interactive=True)
|
||||
driver_cls = get_driver_class(self.storage_type)
|
||||
self.storage_details = driver_cls.get_config_details()
|
||||
|
||||
def extractall(self) -> None:
|
||||
to_extract = self.get_tar_members_to_extract()
|
||||
with detailed_progress(unit="files") as progress:
|
||||
progress_tracker = progress.track(to_extract, description="Extracting data")
|
||||
# tar.errorlevel == 0 so errors are printed to stderr
|
||||
self.tar.extractall(path=self.data_path, members=progress_tracker)
|
||||
|
||||
def get_basic_config(self, use_json: bool = False) -> dict:
|
||||
default_dirs = deepcopy(data_manager.basic_config_default)
|
||||
default_dirs["DATA_PATH"] = str(self.data_path)
|
||||
if use_json:
|
||||
default_dirs["STORAGE_TYPE"] = BackendType.JSON.value
|
||||
default_dirs["STORAGE_DETAILS"] = {}
|
||||
else:
|
||||
default_dirs["STORAGE_TYPE"] = self.storage_type.value
|
||||
default_dirs["STORAGE_DETAILS"] = self.storage_details
|
||||
return default_dirs
|
||||
|
||||
async def restore_data(self) -> None:
|
||||
self.extractall()
|
||||
|
||||
# data in backup file is using json
|
||||
save_config(self.name, self.get_basic_config(use_json=True))
|
||||
data_manager.load_basic_configuration(self.name)
|
||||
|
||||
if self.storage_type is not BackendType.JSON:
|
||||
await do_migration(BackendType.JSON, self.storage_type, self.storage_details)
|
||||
save_config(self.name, self.get_basic_config())
|
||||
data_manager.load_basic_configuration(self.name)
|
||||
|
||||
if self.restore_downloader:
|
||||
driver_cls = get_driver_class(self.storage_type)
|
||||
await driver_cls.initialize(**self.storage_details)
|
||||
try:
|
||||
await _downloader._init_without_bot(CogManager())
|
||||
await _downloader._restore_from_backup()
|
||||
finally:
|
||||
await driver_cls.teardown()
|
||||
elif self.backup_version == 1:
|
||||
self._info(
|
||||
"Downloader's data isn't included in the backup file"
|
||||
" - this backup was created with Red 3.5.24 or older."
|
||||
)
|
||||
elif not self.can_restore_downloader:
|
||||
self._warning("Downloader's data isn't included in the backup file.")
|
||||
|
||||
async def run(
|
||||
self,
|
||||
*,
|
||||
interactive: bool,
|
||||
instance_name: str = "",
|
||||
data_path: Optional[Path] = None,
|
||||
backend: Optional[BackendType] = None,
|
||||
use_sane_default_data_path: bool = False,
|
||||
) -> None:
|
||||
storage_details = {}
|
||||
if backend:
|
||||
driver_cls = get_driver_class(backend)
|
||||
storage_details = driver_cls.get_config_details()
|
||||
print("\n---")
|
||||
self.print_instance_data()
|
||||
|
||||
if use_sane_default_data_path:
|
||||
data_path = get_default_data_path(instance_name or self.name)
|
||||
if instance_name or data_path or backend:
|
||||
print("\nThe following settings have been overridden with command options:")
|
||||
if instance_name:
|
||||
self.name = instance_name
|
||||
print(" Instance name:", instance_name)
|
||||
if data_path:
|
||||
self.data_path = data_path
|
||||
print(" Data path:", data_path)
|
||||
if backend:
|
||||
self.storage_type = backend
|
||||
self.storage_details = storage_details
|
||||
print(" Storage backend:", self.STORAGE_BACKENDS[backend])
|
||||
self.print_storage_details(original=False)
|
||||
|
||||
self.ask_for_changes(interactive=interactive)
|
||||
await self.restore_data()
|
||||
|
||||
print("Restore process has been completed.")
|
||||
|
||||
|
||||
async def restore_instance(
|
||||
backup_path: Path,
|
||||
*,
|
||||
interactive: bool,
|
||||
skip_downloader_restore: bool,
|
||||
instance_name: str,
|
||||
data_path: Optional[Path],
|
||||
use_sane_default_data_path: bool = False,
|
||||
backend: Optional[str],
|
||||
) -> None:
|
||||
try:
|
||||
tar = tarfile.open(backup_path)
|
||||
except tarfile.ReadError:
|
||||
print(
|
||||
"We couldn't open the given backup file. Make sure that you're passing correct file."
|
||||
)
|
||||
return
|
||||
|
||||
print("Hello! This command will guide you through restore process.")
|
||||
if interactive:
|
||||
restore_downloader = False if skip_downloader_restore else None
|
||||
else:
|
||||
restore_downloader = not skip_downloader_restore
|
||||
with tar:
|
||||
# The filter functionality exists on Python 3.11.4+.
|
||||
# We'll use the value consistent with the 3.11's default
|
||||
# since there's no reason we shouldn't trust the archive
|
||||
# that we generated ourselves.
|
||||
tar.extraction_filter = getattr(tarfile, "fully_trusted_filter", None)
|
||||
restore_info = RestoreInfo.from_tar(
|
||||
tar,
|
||||
restore_downloader=restore_downloader,
|
||||
)
|
||||
await restore_info.run(
|
||||
interactive=interactive,
|
||||
instance_name=instance_name,
|
||||
data_path=data_path,
|
||||
use_sane_default_data_path=use_sane_default_data_path,
|
||||
backend=get_target_backend(backend) if backend else None,
|
||||
)
|
||||
|
||||
|
||||
@click.group(invoke_without_command=True)
|
||||
@click.option(
|
||||
"--debug",
|
||||
@@ -436,15 +828,9 @@ def cli(
|
||||
overwrite_existing_instance: bool,
|
||||
) -> None:
|
||||
"""Create a new instance."""
|
||||
|
||||
level = cli_level_to_log_level(debug)
|
||||
base_logger = logging.getLogger("red")
|
||||
base_logger.setLevel(level)
|
||||
formatter = logging.Formatter(
|
||||
"[{asctime}] [{levelname}] {name}: {message}", datefmt="%Y-%m-%d %H:%M:%S", style="{"
|
||||
)
|
||||
stdout_handler = logging.StreamHandler(sys.stdout)
|
||||
stdout_handler.setFormatter(formatter)
|
||||
base_logger.addHandler(stdout_handler)
|
||||
redbot.logging.init_logging(level)
|
||||
|
||||
if ctx.invoked_subcommand is None:
|
||||
basic_setup(
|
||||
@@ -514,7 +900,7 @@ def delete(
|
||||
remove_datapath: Optional[bool],
|
||||
) -> None:
|
||||
"""Removes an instance."""
|
||||
asyncio.run(
|
||||
asyncio_run(
|
||||
remove_instance(
|
||||
instance, interactive, delete_data, _create_backup, drop_db, remove_datapath
|
||||
)
|
||||
@@ -536,7 +922,7 @@ def convert(instance: str, backend: str) -> None:
|
||||
if current_backend == BackendType.MONGOV1:
|
||||
raise RuntimeError("Please see the 3.2 release notes for upgrading a bot using mongo.")
|
||||
else:
|
||||
new_storage_details = asyncio.run(do_migration(current_backend, target))
|
||||
new_storage_details = asyncio_run(do_migration(current_backend, target))
|
||||
|
||||
if new_storage_details is not None:
|
||||
default_dirs["STORAGE_TYPE"] = target.value
|
||||
@@ -560,7 +946,90 @@ def convert(instance: str, backend: str) -> None:
|
||||
)
|
||||
def backup(instance: str, destination_folder: Path) -> None:
|
||||
"""Backup instance's data."""
|
||||
asyncio.run(create_backup(instance, destination_folder))
|
||||
asyncio_run(create_backup(instance, destination_folder))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument(
|
||||
"backup_file",
|
||||
type=click.Path(file_okay=True, resolve_path=True, readable=True, path_type=Path),
|
||||
metavar="<BACKUP_FILE>",
|
||||
)
|
||||
@click.option(
|
||||
"--no-prompt",
|
||||
"interactive",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Don't ask for user input during the process. Most of the values",
|
||||
)
|
||||
@click.option(
|
||||
"--no-restore-downloader",
|
||||
"skip_downloader_restore",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Skip restoring of 3rd-party repos and cogs installed through Downloader.",
|
||||
)
|
||||
@click.option(
|
||||
"--instance-name",
|
||||
type=str,
|
||||
default="",
|
||||
help=(
|
||||
"Name of the new instance. By default, the name stored in the backup will be used"
|
||||
" and, if the --no-prompt option was not specified, you will be able to change this"
|
||||
" before restoring"
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--data-path",
|
||||
type=click.Path(exists=False, dir_okay=True, file_okay=False, writable=True, path_type=Path),
|
||||
default=None,
|
||||
help=(
|
||||
"Data path of the new instance. If this option and --no-prompt are omitted,"
|
||||
" you will be asked for this."
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--use-sane-default-data-path",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help=(
|
||||
"Use the sane default data path derived from the instance name instead of using data path"
|
||||
" from the backup or specifying --data-path option."
|
||||
),
|
||||
)
|
||||
@click.option(
|
||||
"--backend",
|
||||
type=click.Choice(["json", "postgres"]),
|
||||
default=None,
|
||||
help=(
|
||||
"Choose a backend type for the new instance."
|
||||
" By default, the backend of the backed up instance will be used"
|
||||
" and, if the --no-prompt option was not specified, you will be able to change this"
|
||||
" before restoring.\n"
|
||||
"Note: Choosing PostgreSQL will prevent the setup from being completely non-interactive."
|
||||
),
|
||||
)
|
||||
def restore(
|
||||
backup_file: Path,
|
||||
interactive: bool,
|
||||
skip_downloader_restore: bool,
|
||||
instance_name: str,
|
||||
data_path: Optional[Path],
|
||||
use_sane_default_data_path: bool,
|
||||
backend: Optional[str],
|
||||
) -> None:
|
||||
"""Restore instance."""
|
||||
asyncio.run(
|
||||
restore_instance(
|
||||
backup_file,
|
||||
interactive=interactive,
|
||||
skip_downloader_restore=skip_downloader_restore,
|
||||
instance_name=instance_name,
|
||||
data_path=data_path,
|
||||
use_sane_default_data_path=use_sane_default_data_path,
|
||||
backend=backend,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def run_cli():
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user