[V3] Optimize the backup command (#1666)

* [V3 Core] Enhance [p]backup to exclude some files

* Backup the repo list too

* Lol Sinbad's pre-commit hook

* Add option of sending the backup to the owner via DM

* Drop an unnecessary config object in RepoManager

* Move the backup functionality in redbot-setup to the new stuff

* More work on implementation, including backing up the instance data
This commit is contained in:
palmtree5 2018-05-27 20:56:28 -08:00 committed by Kowlin
parent f01d48f9ae
commit 7775b16199
5 changed files with 99 additions and 38 deletions

View File

@ -50,7 +50,7 @@ class Downloader:
if str(self.LIB_PATH) not in syspath: if str(self.LIB_PATH) not in syspath:
syspath.insert(1, str(self.LIB_PATH)) syspath.insert(1, str(self.LIB_PATH))
self._repo_manager = RepoManager(self.conf) self._repo_manager = RepoManager()
async def cog_install_path(self): async def cog_install_path(self):
"""Get the current cog install path. """Get the current cog install path.

View File

@ -496,8 +496,7 @@ class Repo(RepoJSONMixin):
class RepoManager: class RepoManager:
def __init__(self, downloader_config: Config): def __init__(self):
self.downloader_config = downloader_config
self._repos = {} self._repos = {}

View File

@ -2,6 +2,7 @@ import asyncio
import datetime import datetime
import importlib import importlib
import itertools import itertools
import json
import logging import logging
import os import os
import sys import sys
@ -951,7 +952,7 @@ class Core:
@commands.command() @commands.command()
@checks.is_owner() @checks.is_owner()
async def backup(self, ctx): async def backup(self, ctx, backup_path: str = None):
"""Creates a backup of all data for the instance.""" """Creates a backup of all data for the instance."""
from redbot.core.data_manager import basic_config, instance_name from redbot.core.data_manager import basic_config, instance_name
from redbot.core.drivers.red_json import JSON from redbot.core.drivers.red_json import JSON
@ -979,13 +980,58 @@ class Core:
instance_name, ctx.message.created_at.strftime("%Y-%m-%d %H-%M-%S") instance_name, ctx.message.created_at.strftime("%Y-%m-%d %H-%M-%S")
) )
if data_dir.exists(): if data_dir.exists():
home = data_dir.home() if not backup_path:
backup_file = home / backup_filename backup_pth = data_dir.home()
os.chdir(str(data_dir.parent)) else:
backup_pth = Path(backup_path)
backup_file = backup_pth / backup_filename
to_backup = []
exclusions = [
"__pycache__",
"Lavalink.jar",
os.path.join("Downloader", "lib"),
os.path.join("CogManager", "cogs"),
os.path.join("RepoManager", "repos"),
]
downloader_cog = ctx.bot.get_cog("Downloader")
if downloader_cog and hasattr(downloader_cog, "_repo_manager"):
repo_output = []
repo_mgr = downloader_cog._repo_manager
for n, repo in repo_mgr._repos:
repo_output.append(
{{"url": repo.url, "name": repo.name, "branch": repo.branch}}
)
repo_filename = data_dir / "cogs" / "RepoManager" / "repos.json"
with open(str(repo_filename), "w") as f:
f.write(json.dumps(repo_output, indent=4))
instance_data = {instance_name: basic_config}
instance_file = data_dir / "instance.json"
with open(str(instance_file), "w") as instance_out:
instance_out.write(json.dumps(instance_data, indent=4))
for f in data_dir.glob("**/*"):
if not any(ex in str(f) for ex in exclusions):
to_backup.append(f)
with tarfile.open(str(backup_file), "w:gz") as tar: with tarfile.open(str(backup_file), "w:gz") as tar:
tar.add(data_dir.stem) for f in to_backup:
tar.add(str(f), recursive=False)
print(str(backup_file))
await ctx.send( await ctx.send(
_("A backup has been made of this instance. It is at {}.").format(backup_file) _("A backup has been made of this instance. It is at {}.").format((backup_file))
)
await ctx.send(_("Would you like to receive a copy via DM? (y/n)"))
def same_author_check(m):
return m.author == ctx.author and m.channel == ctx.channel
try:
msg = await ctx.bot.wait_for("message", check=same_author_check, timeout=60)
except asyncio.TimeoutError:
await ctx.send(_("Ok then."))
else:
if msg.content.lower().strip() == "y":
await ctx.author.send(
_("Here's a copy of the backup"), file=discord.File(str(backup_file))
) )
else: else:
await ctx.send(_("That directory doesn't seem to exist...")) await ctx.send(_("That directory doesn't seem to exist..."))

View File

@ -12,7 +12,12 @@ from pathlib import Path
import appdirs import appdirs
from redbot.core.cli import confirm from redbot.core.cli import confirm
from redbot.core.data_manager import basic_config_default from redbot.core.data_manager import (
basic_config_default,
load_basic_configuration,
instance_name,
basic_config,
)
from redbot.core.json_io import JsonIO from redbot.core.json_io import JsonIO
from redbot.core.utils import safe_delete from redbot.core.utils import safe_delete
from redbot.core.drivers.red_json import JSON from redbot.core.drivers.red_json import JSON
@ -287,33 +292,45 @@ async def edit_instance():
async def create_backup(selected, instance_data): async def create_backup(selected, instance_data):
if confirm("Would you like to make a backup of the data for this instance? (y/n)"): if confirm("Would you like to make a backup of the data for this instance? (y/n)"):
load_basic_configuration(selected)
if instance_data["STORAGE_TYPE"] == "MongoDB": if instance_data["STORAGE_TYPE"] == "MongoDB":
print("Backing up the instance's data...")
await mongo_to_json(instance_data["DATA_PATH"], instance_data["STORAGE_DETAILS"]) await mongo_to_json(instance_data["DATA_PATH"], instance_data["STORAGE_DETAILS"])
backup_filename = "redv3-{}-{}.tar.gz".format(
selected, dt.utcnow().strftime("%Y-%m-%d %H-%M-%S")
)
pth = Path(instance_data["DATA_PATH"])
if pth.exists():
home = pth.home()
backup_file = home / backup_filename
os.chdir(str(pth.parent))
with tarfile.open(str(backup_file), "w:gz") as tar:
tar.add(pth.stem)
print("A backup of {} has been made. It is at {}".format(selected, backup_file))
else:
print("Backing up the instance's data...") print("Backing up the instance's data...")
backup_filename = "redv3-{}-{}.tar.gz".format( backup_filename = "redv3-{}-{}.tar.gz".format(
selected, dt.utcnow().strftime("%Y-%m-%d %H-%M-%S") selected, dt.utcnow().strftime("%Y-%m-%d %H-%M-%S")
) )
pth = Path(instance_data["DATA_PATH"]) pth = Path(instance_data["DATA_PATH"])
if pth.exists(): if pth.exists():
home = pth.home() backup_pth = pth.home()
backup_file = home / backup_filename backup_file = backup_pth / backup_filename
os.chdir(str(pth.parent)) # str is used here because 3.5 support
to_backup = []
exclusions = [
"__pycache__",
"Lavalink.jar",
os.path.join("Downloader", "lib"),
os.path.join("CogManager", "cogs"),
os.path.join("RepoManager", "repos"),
]
from redbot.cogs.downloader.repo_manager import RepoManager
repo_mgr = RepoManager()
repo_output = []
for _, repo in repo_mgr._repos:
repo_output.append({"url": repo.url, "name": repo.name, "branch": repo.branch})
repo_filename = pth / "cogs" / "RepoManager" / "repos.json"
with open(str(repo_filename), "w") as f:
f.write(json.dumps(repo_output, indent=4))
instance_data = {instance_name: basic_config}
instance_file = pth / "instance.json"
with open(str(instance_file), "w") as instance_out:
instance_out.write(json.dumps(instance_data, indent=4))
for f in pth.glob("**/*"):
if not any(ex in str(f) for ex in exclusions):
to_backup.append(f)
with tarfile.open(str(backup_file), "w:gz") as tar: with tarfile.open(str(backup_file), "w:gz") as tar:
tar.add(pth.stem) # add all files in that directory for f in to_backup:
tar.add(str(f), recursive=False)
print("A backup of {} has been made. It is at {}".format(selected, backup_file)) print("A backup of {} has been made. It is at {}".format(selected, backup_file))

View File

@ -33,9 +33,8 @@ def patch_relative_to(monkeysession):
@pytest.fixture @pytest.fixture
def repo_manager(tmpdir_factory, config): def repo_manager(tmpdir_factory):
config.register_global(repos={}) rm = RepoManager()
rm = RepoManager(config)
# rm.repos_folder = Path(str(tmpdir_factory.getbasetemp())) / 'repos' # rm.repos_folder = Path(str(tmpdir_factory.getbasetemp())) / 'repos'
return rm return rm