mirror of
https://github.com/Cog-Creators/Red-DiscordBot.git
synced 2025-11-21 10:17:59 -05:00
[V3 Setup] Overhaul backend conversion process through setup scripts (#2579)
* swap to click for setup * Initial changes * expose some stuff to allow for per-driver optimizations * overwrite base config * add red log * add one print juuuust in case * fix this * thanks kowlin * damn * oops * fix thing * partial commit * Working mongo -> json conversion, it sucks tho * remove unused line * Wrote initial optimized json importer * optimized json importer * remove useless line * update mongo to json converter * lets try writing the correct entry * oops * style fix * add some garbage data filters going from old mongo to json * ignore garbage data in mongov2 conversions * simplify code a bit and add a completion message * missed one * Update pipfile lock * Lock click version
This commit is contained in:
306
redbot/setup.py
306
redbot/setup.py
@@ -1,5 +1,4 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import asyncio
|
||||
import json
|
||||
import os
|
||||
@@ -8,19 +7,30 @@ import tarfile
|
||||
from copy import deepcopy
|
||||
from datetime import datetime as dt
|
||||
from pathlib import Path
|
||||
import logging
|
||||
|
||||
import appdirs
|
||||
import click
|
||||
|
||||
import redbot.logging
|
||||
from redbot.core.cli import confirm
|
||||
from redbot.core.data_manager import (
|
||||
basic_config_default,
|
||||
load_basic_configuration,
|
||||
instance_name,
|
||||
basic_config,
|
||||
cog_data_path,
|
||||
core_data_path,
|
||||
storage_details,
|
||||
)
|
||||
from redbot.core.json_io import JsonIO
|
||||
from redbot.core.utils import safe_delete
|
||||
from redbot.core import Config
|
||||
from redbot.core.drivers import BackendType, IdentifierData
|
||||
from redbot.core.drivers.red_json import JSON
|
||||
|
||||
conversion_log = logging.getLogger("red.converter")
|
||||
|
||||
config_dir = None
|
||||
appdir = appdirs.AppDirs("Red-DiscordBot")
|
||||
if sys.platform == "linux":
|
||||
@@ -36,15 +46,6 @@ except PermissionError:
|
||||
config_file = config_dir / "config.json"
|
||||
|
||||
|
||||
def parse_cli_args():
|
||||
parser = argparse.ArgumentParser(description="Red - Discord Bot's instance manager (V3)")
|
||||
parser.add_argument(
|
||||
"--delete", "-d", help="Interactively delete an instance", action="store_true"
|
||||
)
|
||||
parser.add_argument("--edit", "-e", help="Interactively edit an instance", action="store_true")
|
||||
return parser.parse_known_args()
|
||||
|
||||
|
||||
def load_existing_config():
|
||||
if not config_file.exists():
|
||||
return {}
|
||||
@@ -52,6 +53,13 @@ def load_existing_config():
|
||||
return JsonIO(config_file)._load_json()
|
||||
|
||||
|
||||
instance_data = load_existing_config()
|
||||
if instance_data is None:
|
||||
instance_list = []
|
||||
else:
|
||||
instance_list = list(instance_data.keys())
|
||||
|
||||
|
||||
def save_config(name, data, remove=False):
|
||||
config = load_existing_config()
|
||||
if remove and name in config:
|
||||
@@ -173,37 +181,138 @@ def basic_setup():
|
||||
)
|
||||
|
||||
|
||||
async def json_to_mongo(current_data_dir: Path, storage_details: dict):
|
||||
from redbot.core.drivers.red_mongo import Mongo
|
||||
def get_current_backend(instance) -> BackendType:
|
||||
return BackendType(instance_data[instance]["STORAGE_TYPE"])
|
||||
|
||||
|
||||
def get_target_backend(backend) -> BackendType:
|
||||
if backend == "json":
|
||||
return BackendType.JSON
|
||||
elif backend == "mongo":
|
||||
return BackendType.MONGO
|
||||
|
||||
|
||||
async def json_to_mongov2(instance):
|
||||
instance_vals = instance_data[instance]
|
||||
current_data_dir = Path(instance_vals["DATA_PATH"])
|
||||
|
||||
load_basic_configuration(instance)
|
||||
|
||||
from redbot.core.drivers import red_mongo
|
||||
|
||||
storage_details = red_mongo.get_config_details()
|
||||
|
||||
core_conf = Config.get_core_conf()
|
||||
new_driver = red_mongo.Mongo(cog_name="Core", identifier="0", **storage_details)
|
||||
|
||||
core_conf.init_custom("CUSTOM_GROUPS", 2)
|
||||
custom_group_data = await core_conf.custom("CUSTOM_GROUPS").all()
|
||||
|
||||
curr_custom_data = custom_group_data.get("Core", {}).get("0", {})
|
||||
exported_data = await core_conf.driver.export_data(curr_custom_data)
|
||||
conversion_log.info("Starting Core conversion...")
|
||||
await new_driver.import_data(exported_data, curr_custom_data)
|
||||
conversion_log.info("Core conversion complete.")
|
||||
|
||||
core_data_file = current_data_dir / "core" / "settings.json"
|
||||
driver = Mongo(cog_name="Core", identifier="0", **storage_details)
|
||||
with core_data_file.open(mode="r") as f:
|
||||
core_data = json.loads(f.read())
|
||||
data = core_data.get("0", {})
|
||||
for key, value in data.items():
|
||||
await driver.set(key, value=value)
|
||||
for p in current_data_dir.glob("cogs/**/settings.json"):
|
||||
cog_name = p.parent.stem
|
||||
if "." in cog_name:
|
||||
# Garbage handler
|
||||
continue
|
||||
with p.open(mode="r") as f:
|
||||
cog_data = json.load(f)
|
||||
for identifier, data in cog_data.items():
|
||||
driver = Mongo(cog_name, identifier, **storage_details)
|
||||
for key, value in data.items():
|
||||
await driver.set(key, value=value)
|
||||
for identifier, all_data in cog_data.items():
|
||||
try:
|
||||
conf = Config.get_conf(None, int(identifier), cog_name=cog_name)
|
||||
except ValueError:
|
||||
continue
|
||||
new_driver = red_mongo.Mongo(
|
||||
cog_name=cog_name, identifier=conf.driver.unique_cog_identifier, **storage_details
|
||||
)
|
||||
|
||||
curr_custom_data = custom_group_data.get(cog_name, {}).get(identifier, {})
|
||||
|
||||
exported_data = await conf.driver.export_data(curr_custom_data)
|
||||
conversion_log.info(f"Converting {cog_name} with identifier {identifier}...")
|
||||
await new_driver.import_data(exported_data, curr_custom_data)
|
||||
|
||||
conversion_log.info("Cog conversion complete.")
|
||||
|
||||
return storage_details
|
||||
|
||||
|
||||
async def mongo_to_json(current_data_dir: Path, storage_details: dict):
|
||||
async def mongov2_to_json(instance):
|
||||
load_basic_configuration(instance)
|
||||
|
||||
core_path = core_data_path()
|
||||
|
||||
from redbot.core.drivers import red_json
|
||||
|
||||
core_conf = Config.get_core_conf()
|
||||
new_driver = red_json.JSON(cog_name="Core", identifier="0", data_path_override=core_path)
|
||||
|
||||
core_conf.init_custom("CUSTOM_GROUPS", 2)
|
||||
custom_group_data = await core_conf.custom("CUSTOM_GROUPS").all()
|
||||
|
||||
curr_custom_data = custom_group_data.get("Core", {}).get("0", {})
|
||||
exported_data = await core_conf.driver.export_data(curr_custom_data)
|
||||
conversion_log.info("Starting Core conversion...")
|
||||
await new_driver.import_data(exported_data, curr_custom_data)
|
||||
conversion_log.info("Core conversion complete.")
|
||||
|
||||
collection_names = await core_conf.driver.db.list_collection_names()
|
||||
splitted_names = list(
|
||||
filter(
|
||||
lambda elem: elem[1] != "" and elem[0] != "Core",
|
||||
[n.split(".") for n in collection_names],
|
||||
)
|
||||
)
|
||||
|
||||
ident_map = {} # Cogname: idents list
|
||||
for cog_name, category in splitted_names:
|
||||
if cog_name not in ident_map:
|
||||
ident_map[cog_name] = set()
|
||||
|
||||
idents = await core_conf.driver.db[cog_name][category].distinct("_id.RED_uuid")
|
||||
ident_map[cog_name].update(set(idents))
|
||||
|
||||
for cog_name, idents in ident_map.items():
|
||||
for identifier in idents:
|
||||
curr_custom_data = custom_group_data.get(cog_name, {}).get(identifier, {})
|
||||
try:
|
||||
conf = Config.get_conf(None, int(identifier), cog_name=cog_name)
|
||||
except ValueError:
|
||||
continue
|
||||
exported_data = await conf.driver.export_data(curr_custom_data)
|
||||
|
||||
new_path = cog_data_path(raw_name=cog_name)
|
||||
new_driver = red_json.JSON(cog_name, identifier, data_path_override=new_path)
|
||||
conversion_log.info(f"Converting {cog_name} with identifier {identifier}...")
|
||||
await new_driver.import_data(exported_data, curr_custom_data)
|
||||
|
||||
# cog_data_path(raw_name=cog_name)
|
||||
|
||||
conversion_log.info("Cog conversion complete.")
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
async def mongo_to_json(instance):
|
||||
load_basic_configuration(instance)
|
||||
|
||||
from redbot.core.drivers.red_mongo import Mongo
|
||||
|
||||
m = Mongo("Core", "0", **storage_details)
|
||||
m = Mongo("Core", "0", **storage_details())
|
||||
db = m.db
|
||||
collection_names = await db.list_collection_names()
|
||||
for collection_name in collection_names:
|
||||
if collection_name == "Core":
|
||||
c_data_path = current_data_dir / "core"
|
||||
if "." in collection_name:
|
||||
# Fix for one of Zeph's problems
|
||||
continue
|
||||
elif collection_name == "Core":
|
||||
c_data_path = core_data_path()
|
||||
else:
|
||||
c_data_path = current_data_dir / "cogs" / collection_name
|
||||
c_data_path = cog_data_path(raw_name=collection_name)
|
||||
c_data_path.mkdir(parents=True, exist_ok=True)
|
||||
# Every cog name has its own collection
|
||||
collection = db[collection_name]
|
||||
@@ -212,9 +321,16 @@ async def mongo_to_json(current_data_dir: Path, storage_details: dict):
|
||||
# This means if two cogs have the same name but different identifiers, they will
|
||||
# be two separate documents in the same collection
|
||||
cog_id = document.pop("_id")
|
||||
if not isinstance(cog_id, str):
|
||||
# Another garbage data check
|
||||
continue
|
||||
elif not str(cog_id).isdigit():
|
||||
continue
|
||||
driver = JSON(collection_name, cog_id, data_path_override=c_data_path)
|
||||
for key, value in document.items():
|
||||
await driver.set(key, value=value)
|
||||
for category, value in document.items():
|
||||
ident_data = IdentifierData(str(cog_id), category, (), (), {})
|
||||
await driver.set(ident_data, value=value)
|
||||
return {}
|
||||
|
||||
|
||||
async def edit_instance():
|
||||
@@ -257,32 +373,6 @@ async def edit_instance():
|
||||
else:
|
||||
default_dirs["DATA_PATH"] = str(current_data_dir.resolve())
|
||||
|
||||
if confirm("Would you like to change the storage type? (y/n):"):
|
||||
storage = get_storage_type()
|
||||
|
||||
storage_dict = {1: "JSON", 2: "MongoDBV2"}
|
||||
default_dirs["STORAGE_TYPE"] = storage_dict[storage]
|
||||
if storage_dict.get(storage, 1) == "MongoDBV2":
|
||||
from redbot.core.drivers.red_mongo import get_config_details
|
||||
|
||||
storage_details = get_config_details()
|
||||
default_dirs["STORAGE_DETAILS"] = storage_details
|
||||
|
||||
if instance_data["STORAGE_TYPE"] == "JSON":
|
||||
raise NotImplementedError("We cannot convert from JSON to MongoDB at this time.")
|
||||
# if confirm("Would you like to import your data? (y/n) "):
|
||||
# await json_to_mongo(current_data_dir, storage_details)
|
||||
elif storage_dict.get(storage, 1) == "JSON":
|
||||
storage_details = instance_data["STORAGE_DETAILS"]
|
||||
default_dirs["STORAGE_DETAILS"] = {}
|
||||
if instance_data["STORAGE_TYPE"] == "MongoDB":
|
||||
if confirm("Would you like to import your data? (y/n) "):
|
||||
await mongo_to_json(current_data_dir, storage_details)
|
||||
elif instance_data["STORAGE_TYPE"] == "MongoDBV2":
|
||||
raise NotImplementedError(
|
||||
"We cannot convert from this version of MongoDB to JSON at this time."
|
||||
)
|
||||
|
||||
if name != selected:
|
||||
save_config(selected, {}, remove=True)
|
||||
save_config(name, default_dirs)
|
||||
@@ -290,16 +380,17 @@ async def edit_instance():
|
||||
print("Your basic configuration has been edited")
|
||||
|
||||
|
||||
async def create_backup(selected, instance_data):
|
||||
async def create_backup(instance):
|
||||
instance_vals = instance_data[instance]
|
||||
if confirm("Would you like to make a backup of the data for this instance? (y/n)"):
|
||||
load_basic_configuration(selected)
|
||||
if instance_data["STORAGE_TYPE"] == "MongoDB":
|
||||
await mongo_to_json(instance_data["DATA_PATH"], instance_data["STORAGE_DETAILS"])
|
||||
load_basic_configuration(instance)
|
||||
if instance_vals["STORAGE_TYPE"] == "MongoDB":
|
||||
await mongo_to_json(instance)
|
||||
print("Backing up the instance's data...")
|
||||
backup_filename = "redv3-{}-{}.tar.gz".format(
|
||||
selected, dt.utcnow().strftime("%Y-%m-%d %H-%M-%S")
|
||||
instance, dt.utcnow().strftime("%Y-%m-%d %H-%M-%S")
|
||||
)
|
||||
pth = Path(instance_data["DATA_PATH"])
|
||||
pth = Path(instance_vals["DATA_PATH"])
|
||||
if pth.exists():
|
||||
backup_pth = pth.home()
|
||||
backup_file = backup_pth / backup_filename
|
||||
@@ -321,24 +412,27 @@ async def create_backup(selected, instance_data):
|
||||
repo_filename = pth / "cogs" / "RepoManager" / "repos.json"
|
||||
with open(str(repo_filename), "w") as f:
|
||||
f.write(json.dumps(repo_output, indent=4))
|
||||
instance_data = {instance_name: basic_config}
|
||||
instance_vals = {instance_name: basic_config}
|
||||
instance_file = pth / "instance.json"
|
||||
with open(str(instance_file), "w") as instance_out:
|
||||
instance_out.write(json.dumps(instance_data, indent=4))
|
||||
instance_out.write(json.dumps(instance_vals, indent=4))
|
||||
for f in pth.glob("**/*"):
|
||||
if not any(ex in str(f) for ex in exclusions):
|
||||
to_backup.append(f)
|
||||
with tarfile.open(str(backup_file), "w:gz") as tar:
|
||||
for f in to_backup:
|
||||
tar.add(str(f), recursive=False)
|
||||
print("A backup of {} has been made. It is at {}".format(selected, backup_file))
|
||||
print("A backup of {} has been made. It is at {}".format(instance, backup_file))
|
||||
|
||||
|
||||
async def remove_instance(selected, instance_data):
|
||||
if instance_data["STORAGE_TYPE"] == "MongoDB":
|
||||
async def remove_instance(instance):
|
||||
await create_backup(instance)
|
||||
|
||||
instance_vals = instance_data[instance]
|
||||
if instance_vals["STORAGE_TYPE"] == "MongoDB":
|
||||
from redbot.core.drivers.red_mongo import Mongo
|
||||
|
||||
m = Mongo("Core", **instance_data["STORAGE_DETAILS"])
|
||||
m = Mongo("Core", **instance_vals["STORAGE_DETAILS"])
|
||||
db = m.db
|
||||
collections = await db.collection_names(include_system_collections=False)
|
||||
for name in collections:
|
||||
@@ -347,12 +441,11 @@ async def remove_instance(selected, instance_data):
|
||||
else:
|
||||
pth = Path(instance_data["DATA_PATH"])
|
||||
safe_delete(pth)
|
||||
save_config(selected, {}, remove=True)
|
||||
print("The instance {} has been removed\n".format(selected))
|
||||
save_config(instance, {}, remove=True)
|
||||
print("The instance {} has been removed\n".format(instance))
|
||||
|
||||
|
||||
async def remove_instance_interaction():
|
||||
instance_list = load_existing_config()
|
||||
if not instance_list:
|
||||
print("No instances have been set up!")
|
||||
return
|
||||
@@ -361,35 +454,76 @@ async def remove_instance_interaction():
|
||||
"You have chosen to remove an instance. The following "
|
||||
"is a list of instances that currently exist:\n"
|
||||
)
|
||||
for instance in instance_list.keys():
|
||||
for instance in instance_data.keys():
|
||||
print("{}\n".format(instance))
|
||||
print("Please select one of the above by entering its name")
|
||||
selected = input("> ")
|
||||
|
||||
if selected not in instance_list.keys():
|
||||
if selected not in instance_data.keys():
|
||||
print("That isn't a valid instance!")
|
||||
return
|
||||
instance_data = instance_list[selected]
|
||||
|
||||
await create_backup(selected, instance_data)
|
||||
await remove_instance(selected, instance_data)
|
||||
await create_backup(selected)
|
||||
await remove_instance(selected)
|
||||
|
||||
|
||||
def main():
|
||||
args, _ = parse_cli_args()
|
||||
if args.delete:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(remove_instance_interaction())
|
||||
elif args.edit:
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(edit_instance())
|
||||
else:
|
||||
@click.group(invoke_without_command=True)
|
||||
@click.option("--debug", type=bool)
|
||||
@click.pass_context
|
||||
def cli(ctx, debug):
|
||||
level = logging.DEBUG if debug else logging.INFO
|
||||
redbot.logging.init_logging(level=level, location=Path.cwd() / "red_setup_logs")
|
||||
if ctx.invoked_subcommand is None:
|
||||
basic_setup()
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("instance", type=click.Choice(instance_list))
|
||||
def delete(instance):
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(remove_instance(instance))
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument("instance", type=click.Choice(instance_list))
|
||||
@click.argument("backend", type=click.Choice(["json", "mongo"]))
|
||||
def convert(instance, backend):
|
||||
current_backend = get_current_backend(instance)
|
||||
target = get_target_backend(backend)
|
||||
|
||||
default_dirs = deepcopy(basic_config_default)
|
||||
default_dirs["DATA_PATH"] = str(Path(instance_data[instance]["DATA_PATH"]))
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
|
||||
new_storage_details = None
|
||||
|
||||
if current_backend == BackendType.MONGOV1:
|
||||
if target == BackendType.MONGO:
|
||||
raise RuntimeError(
|
||||
"Please see conversion docs for updating to the latest mongo version."
|
||||
)
|
||||
elif target == BackendType.JSON:
|
||||
new_storage_details = loop.run_until_complete(mongo_to_json(instance))
|
||||
elif current_backend == BackendType.JSON:
|
||||
if target == BackendType.MONGO:
|
||||
new_storage_details = loop.run_until_complete(json_to_mongov2(instance))
|
||||
elif current_backend == BackendType.MONGO:
|
||||
if target == BackendType.JSON:
|
||||
new_storage_details = loop.run_until_complete(mongov2_to_json(instance))
|
||||
|
||||
if new_storage_details is not None:
|
||||
default_dirs["STORAGE_TYPE"] = target.value
|
||||
default_dirs["STORAGE_DETAILS"] = new_storage_details
|
||||
save_config(instance, default_dirs)
|
||||
conversion_log.info(f"Conversion to {target} complete.")
|
||||
else:
|
||||
conversion_log.info(f"Cannot convert {current_backend} to {target} at this time.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
cli()
|
||||
except KeyboardInterrupt:
|
||||
print("Exiting...")
|
||||
else:
|
||||
|
||||
Reference in New Issue
Block a user