mirror of
https://github.com/Cog-Creators/Red-DiscordBot.git
synced 2025-11-06 11:18:54 -05:00
[V3 Setup] Overhaul backend conversion process through setup scripts (#2579)
* swap to click for setup * Initial changes * expose some stuff to allow for per-driver optimizations * overwrite base config * add red log * add one print juuuust in case * fix this * thanks kowlin * damn * oops * fix thing * partial commit * Working mongo -> json conversion, it sucks tho * remove unused line * Wrote initial optimized json importer * optimized json importer * remove useless line * update mongo to json converter * lets try writing the correct entry * oops * style fix * add some garbage data filters going from old mongo to json * ignore garbage data in mongov2 conversions * simplify code a bit and add a completion message * missed one * Update pipfile lock * Lock click version
This commit is contained in:
parent
ad06b0e723
commit
6c296a9a17
19
Pipfile.lock
generated
19
Pipfile.lock
generated
@ -76,6 +76,13 @@
|
|||||||
],
|
],
|
||||||
"version": "==3.0.4"
|
"version": "==3.0.4"
|
||||||
},
|
},
|
||||||
|
"click": {
|
||||||
|
"hashes": [
|
||||||
|
"sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13",
|
||||||
|
"sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7"
|
||||||
|
],
|
||||||
|
"version": "==7.0"
|
||||||
|
},
|
||||||
"colorama": {
|
"colorama": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d",
|
"sha256:05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d",
|
||||||
@ -747,11 +754,11 @@
|
|||||||
},
|
},
|
||||||
"tox": {
|
"tox": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:04f8f1aa05de8e76d7a266ccd14e0d665d429977cd42123bc38efa9b59964e9e",
|
"sha256:1b166b93d2ce66bb7b253ba944d2be89e0c9d432d49eeb9da2988b4902a4684e",
|
||||||
"sha256:25ef928babe88c71e3ed3af0c464d1160b01fca2dd1870a5bb26c2dea61a17fc"
|
"sha256:665cbdd99f5c196dd80d1d8db8c8cf5d48b1ae1f778bccd1bdf14d5aaf4ca0fc"
|
||||||
],
|
],
|
||||||
"index": "pypi",
|
"index": "pypi",
|
||||||
"version": "==3.7.0"
|
"version": "==3.9.0"
|
||||||
},
|
},
|
||||||
"urllib3": {
|
"urllib3": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
@ -762,10 +769,10 @@
|
|||||||
},
|
},
|
||||||
"virtualenv": {
|
"virtualenv": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
"sha256:5a3ecdfbde67a4a3b3111301c4d64a5b71cf862c8c42958d30cf3253df1f29dd",
|
"sha256:6aebaf4dd2568a0094225ebbca987859e369e3e5c22dc7d52e5406d504890417",
|
||||||
"sha256:dffd40d19ab0168c02cf936de59590a3c0f2c8c4a36f363fcf3dae18728dc94e"
|
"sha256:984d7e607b0a5d1329425dd8845bd971b957424b5ba664729fab51ab8c11bc39"
|
||||||
],
|
],
|
||||||
"version": "==16.4.1"
|
"version": "==16.4.3"
|
||||||
},
|
},
|
||||||
"websockets": {
|
"websockets": {
|
||||||
"hashes": [
|
"hashes": [
|
||||||
|
|||||||
@ -1,6 +1,14 @@
|
|||||||
|
import enum
|
||||||
|
|
||||||
from .red_base import IdentifierData
|
from .red_base import IdentifierData
|
||||||
|
|
||||||
__all__ = ["get_driver", "IdentifierData"]
|
__all__ = ["get_driver", "IdentifierData", "BackendType"]
|
||||||
|
|
||||||
|
|
||||||
|
class BackendType(enum.Enum):
|
||||||
|
JSON = "JSON"
|
||||||
|
MONGO = "MongoDBV2"
|
||||||
|
MONGOV1 = "MongoDB"
|
||||||
|
|
||||||
|
|
||||||
def get_driver(type, *args, **kwargs):
|
def get_driver(type, *args, **kwargs):
|
||||||
|
|||||||
@ -1,8 +1,18 @@
|
|||||||
|
import enum
|
||||||
from typing import Tuple
|
from typing import Tuple
|
||||||
|
|
||||||
__all__ = ["BaseDriver", "IdentifierData"]
|
__all__ = ["BaseDriver", "IdentifierData"]
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigCategory(enum.Enum):
|
||||||
|
GLOBAL = "GLOBAL"
|
||||||
|
GUILD = "GUILD"
|
||||||
|
CHANNEL = "TEXTCHANNEL"
|
||||||
|
ROLE = "ROLE"
|
||||||
|
USER = "USER"
|
||||||
|
MEMBER = "MEMBER"
|
||||||
|
|
||||||
|
|
||||||
class IdentifierData:
|
class IdentifierData:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
@ -72,6 +82,9 @@ class BaseDriver:
|
|||||||
self.cog_name = cog_name
|
self.cog_name = cog_name
|
||||||
self.unique_cog_identifier = identifier
|
self.unique_cog_identifier = identifier
|
||||||
|
|
||||||
|
async def has_valid_connection(self) -> bool:
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
async def get(self, identifier_data: IdentifierData):
|
async def get(self, identifier_data: IdentifierData):
|
||||||
"""
|
"""
|
||||||
Finds the value indicate by the given identifiers.
|
Finds the value indicate by the given identifiers.
|
||||||
@ -121,3 +134,75 @@ class BaseDriver:
|
|||||||
identifier_data
|
identifier_data
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _get_levels(self, category, custom_group_data):
|
||||||
|
if category == ConfigCategory.GLOBAL.value:
|
||||||
|
return 0
|
||||||
|
elif category in (
|
||||||
|
ConfigCategory.USER.value,
|
||||||
|
ConfigCategory.GUILD.value,
|
||||||
|
ConfigCategory.CHANNEL.value,
|
||||||
|
ConfigCategory.ROLE.value,
|
||||||
|
):
|
||||||
|
return 1
|
||||||
|
elif category == ConfigCategory.MEMBER.value:
|
||||||
|
return 2
|
||||||
|
elif category in custom_group_data:
|
||||||
|
return custom_group_data[category]
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Cannot convert due to group: {category}")
|
||||||
|
|
||||||
|
def _split_primary_key(self, category, custom_group_data, data):
|
||||||
|
levels = self._get_levels(category, custom_group_data)
|
||||||
|
if levels == 0:
|
||||||
|
return (((), data),)
|
||||||
|
|
||||||
|
def flatten(levels_remaining, currdata, parent_key=()):
|
||||||
|
items = []
|
||||||
|
for k, v in currdata.items():
|
||||||
|
new_key = parent_key + (k,)
|
||||||
|
if levels_remaining > 1:
|
||||||
|
items.extend(flatten(levels_remaining - 1, v, new_key).items())
|
||||||
|
else:
|
||||||
|
items.append((new_key, v))
|
||||||
|
return dict(items)
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
for k, v in flatten(levels, data).items():
|
||||||
|
ret.append((k, v))
|
||||||
|
return tuple(ret)
|
||||||
|
|
||||||
|
async def export_data(self, custom_group_data):
|
||||||
|
categories = [c.value for c in ConfigCategory]
|
||||||
|
categories.extend(custom_group_data.keys())
|
||||||
|
|
||||||
|
ret = []
|
||||||
|
for c in categories:
|
||||||
|
ident_data = IdentifierData(
|
||||||
|
self.unique_cog_identifier,
|
||||||
|
c,
|
||||||
|
(),
|
||||||
|
(),
|
||||||
|
custom_group_data.get(c, {}),
|
||||||
|
is_custom=c in custom_group_data,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
data = await self.get(ident_data)
|
||||||
|
except KeyError:
|
||||||
|
continue
|
||||||
|
ret.append((c, data))
|
||||||
|
return ret
|
||||||
|
|
||||||
|
async def import_data(self, cog_data, custom_group_data):
|
||||||
|
for category, all_data in cog_data:
|
||||||
|
splitted_pkey = self._split_primary_key(category, custom_group_data, all_data)
|
||||||
|
for pkey, data in splitted_pkey:
|
||||||
|
ident_data = IdentifierData(
|
||||||
|
self.unique_cog_identifier,
|
||||||
|
category,
|
||||||
|
pkey,
|
||||||
|
(),
|
||||||
|
custom_group_data.get(category, {}),
|
||||||
|
is_custom=category in custom_group_data,
|
||||||
|
)
|
||||||
|
await self.set(ident_data, data)
|
||||||
|
|||||||
@ -69,6 +69,9 @@ class JSON(BaseDriver):
|
|||||||
|
|
||||||
self._load_data()
|
self._load_data()
|
||||||
|
|
||||||
|
async def has_valid_connection(self) -> bool:
|
||||||
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def data(self):
|
def data(self):
|
||||||
return _shared_datastore.get(self.cog_name)
|
return _shared_datastore.get(self.cog_name)
|
||||||
@ -123,5 +126,29 @@ class JSON(BaseDriver):
|
|||||||
else:
|
else:
|
||||||
await self.jsonIO._threadsafe_save_json(self.data)
|
await self.jsonIO._threadsafe_save_json(self.data)
|
||||||
|
|
||||||
|
async def import_data(self, cog_data, custom_group_data):
|
||||||
|
def update_write_data(identifier_data: IdentifierData, data):
|
||||||
|
partial = self.data
|
||||||
|
idents = identifier_data.to_tuple()
|
||||||
|
for ident in idents[:-1]:
|
||||||
|
if ident not in partial:
|
||||||
|
partial[ident] = {}
|
||||||
|
partial = partial[ident]
|
||||||
|
partial[idents[-1]] = data
|
||||||
|
|
||||||
|
for category, all_data in cog_data:
|
||||||
|
splitted_pkey = self._split_primary_key(category, custom_group_data, all_data)
|
||||||
|
for pkey, data in splitted_pkey:
|
||||||
|
ident_data = IdentifierData(
|
||||||
|
self.unique_cog_identifier,
|
||||||
|
category,
|
||||||
|
pkey,
|
||||||
|
(),
|
||||||
|
custom_group_data.get(category, {}),
|
||||||
|
is_custom=category in custom_group_data,
|
||||||
|
)
|
||||||
|
update_write_data(ident_data, data)
|
||||||
|
await self.jsonIO._threadsafe_save_json(self.data)
|
||||||
|
|
||||||
def get_config_details(self):
|
def get_config_details(self):
|
||||||
return
|
return
|
||||||
|
|||||||
@ -49,6 +49,10 @@ class Mongo(BaseDriver):
|
|||||||
if _conn is None:
|
if _conn is None:
|
||||||
_initialize(**kwargs)
|
_initialize(**kwargs)
|
||||||
|
|
||||||
|
async def has_valid_connection(self) -> bool:
|
||||||
|
# Maybe fix this?
|
||||||
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def db(self) -> motor.core.Database:
|
def db(self) -> motor.core.Database:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@ -267,11 +267,11 @@ async def reset_red():
|
|||||||
if confirm("\nDo you want to create a backup for an instance? (y/n) "):
|
if confirm("\nDo you want to create a backup for an instance? (y/n) "):
|
||||||
for index, instance in instances.items():
|
for index, instance in instances.items():
|
||||||
print("\nRemoving {}...".format(index))
|
print("\nRemoving {}...".format(index))
|
||||||
await create_backup(index, instance)
|
await create_backup(index)
|
||||||
await remove_instance(index, instance)
|
await remove_instance(index)
|
||||||
else:
|
else:
|
||||||
for index, instance in instances.items():
|
for index, instance in instances.items():
|
||||||
await remove_instance(index, instance)
|
await remove_instance(index)
|
||||||
print("All instances have been removed.")
|
print("All instances have been removed.")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
306
redbot/setup.py
306
redbot/setup.py
@ -1,5 +1,4 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import argparse
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
@ -8,19 +7,30 @@ import tarfile
|
|||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from datetime import datetime as dt
|
from datetime import datetime as dt
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
import logging
|
||||||
|
|
||||||
import appdirs
|
import appdirs
|
||||||
|
import click
|
||||||
|
|
||||||
|
import redbot.logging
|
||||||
from redbot.core.cli import confirm
|
from redbot.core.cli import confirm
|
||||||
from redbot.core.data_manager import (
|
from redbot.core.data_manager import (
|
||||||
basic_config_default,
|
basic_config_default,
|
||||||
load_basic_configuration,
|
load_basic_configuration,
|
||||||
instance_name,
|
instance_name,
|
||||||
basic_config,
|
basic_config,
|
||||||
|
cog_data_path,
|
||||||
|
core_data_path,
|
||||||
|
storage_details,
|
||||||
)
|
)
|
||||||
from redbot.core.json_io import JsonIO
|
from redbot.core.json_io import JsonIO
|
||||||
from redbot.core.utils import safe_delete
|
from redbot.core.utils import safe_delete
|
||||||
|
from redbot.core import Config
|
||||||
|
from redbot.core.drivers import BackendType, IdentifierData
|
||||||
from redbot.core.drivers.red_json import JSON
|
from redbot.core.drivers.red_json import JSON
|
||||||
|
|
||||||
|
conversion_log = logging.getLogger("red.converter")
|
||||||
|
|
||||||
config_dir = None
|
config_dir = None
|
||||||
appdir = appdirs.AppDirs("Red-DiscordBot")
|
appdir = appdirs.AppDirs("Red-DiscordBot")
|
||||||
if sys.platform == "linux":
|
if sys.platform == "linux":
|
||||||
@ -36,15 +46,6 @@ except PermissionError:
|
|||||||
config_file = config_dir / "config.json"
|
config_file = config_dir / "config.json"
|
||||||
|
|
||||||
|
|
||||||
def parse_cli_args():
|
|
||||||
parser = argparse.ArgumentParser(description="Red - Discord Bot's instance manager (V3)")
|
|
||||||
parser.add_argument(
|
|
||||||
"--delete", "-d", help="Interactively delete an instance", action="store_true"
|
|
||||||
)
|
|
||||||
parser.add_argument("--edit", "-e", help="Interactively edit an instance", action="store_true")
|
|
||||||
return parser.parse_known_args()
|
|
||||||
|
|
||||||
|
|
||||||
def load_existing_config():
|
def load_existing_config():
|
||||||
if not config_file.exists():
|
if not config_file.exists():
|
||||||
return {}
|
return {}
|
||||||
@ -52,6 +53,13 @@ def load_existing_config():
|
|||||||
return JsonIO(config_file)._load_json()
|
return JsonIO(config_file)._load_json()
|
||||||
|
|
||||||
|
|
||||||
|
instance_data = load_existing_config()
|
||||||
|
if instance_data is None:
|
||||||
|
instance_list = []
|
||||||
|
else:
|
||||||
|
instance_list = list(instance_data.keys())
|
||||||
|
|
||||||
|
|
||||||
def save_config(name, data, remove=False):
|
def save_config(name, data, remove=False):
|
||||||
config = load_existing_config()
|
config = load_existing_config()
|
||||||
if remove and name in config:
|
if remove and name in config:
|
||||||
@ -173,37 +181,138 @@ def basic_setup():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def json_to_mongo(current_data_dir: Path, storage_details: dict):
|
def get_current_backend(instance) -> BackendType:
|
||||||
from redbot.core.drivers.red_mongo import Mongo
|
return BackendType(instance_data[instance]["STORAGE_TYPE"])
|
||||||
|
|
||||||
|
|
||||||
|
def get_target_backend(backend) -> BackendType:
|
||||||
|
if backend == "json":
|
||||||
|
return BackendType.JSON
|
||||||
|
elif backend == "mongo":
|
||||||
|
return BackendType.MONGO
|
||||||
|
|
||||||
|
|
||||||
|
async def json_to_mongov2(instance):
|
||||||
|
instance_vals = instance_data[instance]
|
||||||
|
current_data_dir = Path(instance_vals["DATA_PATH"])
|
||||||
|
|
||||||
|
load_basic_configuration(instance)
|
||||||
|
|
||||||
|
from redbot.core.drivers import red_mongo
|
||||||
|
|
||||||
|
storage_details = red_mongo.get_config_details()
|
||||||
|
|
||||||
|
core_conf = Config.get_core_conf()
|
||||||
|
new_driver = red_mongo.Mongo(cog_name="Core", identifier="0", **storage_details)
|
||||||
|
|
||||||
|
core_conf.init_custom("CUSTOM_GROUPS", 2)
|
||||||
|
custom_group_data = await core_conf.custom("CUSTOM_GROUPS").all()
|
||||||
|
|
||||||
|
curr_custom_data = custom_group_data.get("Core", {}).get("0", {})
|
||||||
|
exported_data = await core_conf.driver.export_data(curr_custom_data)
|
||||||
|
conversion_log.info("Starting Core conversion...")
|
||||||
|
await new_driver.import_data(exported_data, curr_custom_data)
|
||||||
|
conversion_log.info("Core conversion complete.")
|
||||||
|
|
||||||
core_data_file = current_data_dir / "core" / "settings.json"
|
|
||||||
driver = Mongo(cog_name="Core", identifier="0", **storage_details)
|
|
||||||
with core_data_file.open(mode="r") as f:
|
|
||||||
core_data = json.loads(f.read())
|
|
||||||
data = core_data.get("0", {})
|
|
||||||
for key, value in data.items():
|
|
||||||
await driver.set(key, value=value)
|
|
||||||
for p in current_data_dir.glob("cogs/**/settings.json"):
|
for p in current_data_dir.glob("cogs/**/settings.json"):
|
||||||
cog_name = p.parent.stem
|
cog_name = p.parent.stem
|
||||||
|
if "." in cog_name:
|
||||||
|
# Garbage handler
|
||||||
|
continue
|
||||||
with p.open(mode="r") as f:
|
with p.open(mode="r") as f:
|
||||||
cog_data = json.load(f)
|
cog_data = json.load(f)
|
||||||
for identifier, data in cog_data.items():
|
for identifier, all_data in cog_data.items():
|
||||||
driver = Mongo(cog_name, identifier, **storage_details)
|
try:
|
||||||
for key, value in data.items():
|
conf = Config.get_conf(None, int(identifier), cog_name=cog_name)
|
||||||
await driver.set(key, value=value)
|
except ValueError:
|
||||||
|
continue
|
||||||
|
new_driver = red_mongo.Mongo(
|
||||||
|
cog_name=cog_name, identifier=conf.driver.unique_cog_identifier, **storage_details
|
||||||
|
)
|
||||||
|
|
||||||
|
curr_custom_data = custom_group_data.get(cog_name, {}).get(identifier, {})
|
||||||
|
|
||||||
|
exported_data = await conf.driver.export_data(curr_custom_data)
|
||||||
|
conversion_log.info(f"Converting {cog_name} with identifier {identifier}...")
|
||||||
|
await new_driver.import_data(exported_data, curr_custom_data)
|
||||||
|
|
||||||
|
conversion_log.info("Cog conversion complete.")
|
||||||
|
|
||||||
|
return storage_details
|
||||||
|
|
||||||
|
|
||||||
async def mongo_to_json(current_data_dir: Path, storage_details: dict):
|
async def mongov2_to_json(instance):
|
||||||
|
load_basic_configuration(instance)
|
||||||
|
|
||||||
|
core_path = core_data_path()
|
||||||
|
|
||||||
|
from redbot.core.drivers import red_json
|
||||||
|
|
||||||
|
core_conf = Config.get_core_conf()
|
||||||
|
new_driver = red_json.JSON(cog_name="Core", identifier="0", data_path_override=core_path)
|
||||||
|
|
||||||
|
core_conf.init_custom("CUSTOM_GROUPS", 2)
|
||||||
|
custom_group_data = await core_conf.custom("CUSTOM_GROUPS").all()
|
||||||
|
|
||||||
|
curr_custom_data = custom_group_data.get("Core", {}).get("0", {})
|
||||||
|
exported_data = await core_conf.driver.export_data(curr_custom_data)
|
||||||
|
conversion_log.info("Starting Core conversion...")
|
||||||
|
await new_driver.import_data(exported_data, curr_custom_data)
|
||||||
|
conversion_log.info("Core conversion complete.")
|
||||||
|
|
||||||
|
collection_names = await core_conf.driver.db.list_collection_names()
|
||||||
|
splitted_names = list(
|
||||||
|
filter(
|
||||||
|
lambda elem: elem[1] != "" and elem[0] != "Core",
|
||||||
|
[n.split(".") for n in collection_names],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
ident_map = {} # Cogname: idents list
|
||||||
|
for cog_name, category in splitted_names:
|
||||||
|
if cog_name not in ident_map:
|
||||||
|
ident_map[cog_name] = set()
|
||||||
|
|
||||||
|
idents = await core_conf.driver.db[cog_name][category].distinct("_id.RED_uuid")
|
||||||
|
ident_map[cog_name].update(set(idents))
|
||||||
|
|
||||||
|
for cog_name, idents in ident_map.items():
|
||||||
|
for identifier in idents:
|
||||||
|
curr_custom_data = custom_group_data.get(cog_name, {}).get(identifier, {})
|
||||||
|
try:
|
||||||
|
conf = Config.get_conf(None, int(identifier), cog_name=cog_name)
|
||||||
|
except ValueError:
|
||||||
|
continue
|
||||||
|
exported_data = await conf.driver.export_data(curr_custom_data)
|
||||||
|
|
||||||
|
new_path = cog_data_path(raw_name=cog_name)
|
||||||
|
new_driver = red_json.JSON(cog_name, identifier, data_path_override=new_path)
|
||||||
|
conversion_log.info(f"Converting {cog_name} with identifier {identifier}...")
|
||||||
|
await new_driver.import_data(exported_data, curr_custom_data)
|
||||||
|
|
||||||
|
# cog_data_path(raw_name=cog_name)
|
||||||
|
|
||||||
|
conversion_log.info("Cog conversion complete.")
|
||||||
|
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
async def mongo_to_json(instance):
|
||||||
|
load_basic_configuration(instance)
|
||||||
|
|
||||||
from redbot.core.drivers.red_mongo import Mongo
|
from redbot.core.drivers.red_mongo import Mongo
|
||||||
|
|
||||||
m = Mongo("Core", "0", **storage_details)
|
m = Mongo("Core", "0", **storage_details())
|
||||||
db = m.db
|
db = m.db
|
||||||
collection_names = await db.list_collection_names()
|
collection_names = await db.list_collection_names()
|
||||||
for collection_name in collection_names:
|
for collection_name in collection_names:
|
||||||
if collection_name == "Core":
|
if "." in collection_name:
|
||||||
c_data_path = current_data_dir / "core"
|
# Fix for one of Zeph's problems
|
||||||
|
continue
|
||||||
|
elif collection_name == "Core":
|
||||||
|
c_data_path = core_data_path()
|
||||||
else:
|
else:
|
||||||
c_data_path = current_data_dir / "cogs" / collection_name
|
c_data_path = cog_data_path(raw_name=collection_name)
|
||||||
c_data_path.mkdir(parents=True, exist_ok=True)
|
c_data_path.mkdir(parents=True, exist_ok=True)
|
||||||
# Every cog name has its own collection
|
# Every cog name has its own collection
|
||||||
collection = db[collection_name]
|
collection = db[collection_name]
|
||||||
@ -212,9 +321,16 @@ async def mongo_to_json(current_data_dir: Path, storage_details: dict):
|
|||||||
# This means if two cogs have the same name but different identifiers, they will
|
# This means if two cogs have the same name but different identifiers, they will
|
||||||
# be two separate documents in the same collection
|
# be two separate documents in the same collection
|
||||||
cog_id = document.pop("_id")
|
cog_id = document.pop("_id")
|
||||||
|
if not isinstance(cog_id, str):
|
||||||
|
# Another garbage data check
|
||||||
|
continue
|
||||||
|
elif not str(cog_id).isdigit():
|
||||||
|
continue
|
||||||
driver = JSON(collection_name, cog_id, data_path_override=c_data_path)
|
driver = JSON(collection_name, cog_id, data_path_override=c_data_path)
|
||||||
for key, value in document.items():
|
for category, value in document.items():
|
||||||
await driver.set(key, value=value)
|
ident_data = IdentifierData(str(cog_id), category, (), (), {})
|
||||||
|
await driver.set(ident_data, value=value)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
async def edit_instance():
|
async def edit_instance():
|
||||||
@ -257,32 +373,6 @@ async def edit_instance():
|
|||||||
else:
|
else:
|
||||||
default_dirs["DATA_PATH"] = str(current_data_dir.resolve())
|
default_dirs["DATA_PATH"] = str(current_data_dir.resolve())
|
||||||
|
|
||||||
if confirm("Would you like to change the storage type? (y/n):"):
|
|
||||||
storage = get_storage_type()
|
|
||||||
|
|
||||||
storage_dict = {1: "JSON", 2: "MongoDBV2"}
|
|
||||||
default_dirs["STORAGE_TYPE"] = storage_dict[storage]
|
|
||||||
if storage_dict.get(storage, 1) == "MongoDBV2":
|
|
||||||
from redbot.core.drivers.red_mongo import get_config_details
|
|
||||||
|
|
||||||
storage_details = get_config_details()
|
|
||||||
default_dirs["STORAGE_DETAILS"] = storage_details
|
|
||||||
|
|
||||||
if instance_data["STORAGE_TYPE"] == "JSON":
|
|
||||||
raise NotImplementedError("We cannot convert from JSON to MongoDB at this time.")
|
|
||||||
# if confirm("Would you like to import your data? (y/n) "):
|
|
||||||
# await json_to_mongo(current_data_dir, storage_details)
|
|
||||||
elif storage_dict.get(storage, 1) == "JSON":
|
|
||||||
storage_details = instance_data["STORAGE_DETAILS"]
|
|
||||||
default_dirs["STORAGE_DETAILS"] = {}
|
|
||||||
if instance_data["STORAGE_TYPE"] == "MongoDB":
|
|
||||||
if confirm("Would you like to import your data? (y/n) "):
|
|
||||||
await mongo_to_json(current_data_dir, storage_details)
|
|
||||||
elif instance_data["STORAGE_TYPE"] == "MongoDBV2":
|
|
||||||
raise NotImplementedError(
|
|
||||||
"We cannot convert from this version of MongoDB to JSON at this time."
|
|
||||||
)
|
|
||||||
|
|
||||||
if name != selected:
|
if name != selected:
|
||||||
save_config(selected, {}, remove=True)
|
save_config(selected, {}, remove=True)
|
||||||
save_config(name, default_dirs)
|
save_config(name, default_dirs)
|
||||||
@ -290,16 +380,17 @@ async def edit_instance():
|
|||||||
print("Your basic configuration has been edited")
|
print("Your basic configuration has been edited")
|
||||||
|
|
||||||
|
|
||||||
async def create_backup(selected, instance_data):
|
async def create_backup(instance):
|
||||||
|
instance_vals = instance_data[instance]
|
||||||
if confirm("Would you like to make a backup of the data for this instance? (y/n)"):
|
if confirm("Would you like to make a backup of the data for this instance? (y/n)"):
|
||||||
load_basic_configuration(selected)
|
load_basic_configuration(instance)
|
||||||
if instance_data["STORAGE_TYPE"] == "MongoDB":
|
if instance_vals["STORAGE_TYPE"] == "MongoDB":
|
||||||
await mongo_to_json(instance_data["DATA_PATH"], instance_data["STORAGE_DETAILS"])
|
await mongo_to_json(instance)
|
||||||
print("Backing up the instance's data...")
|
print("Backing up the instance's data...")
|
||||||
backup_filename = "redv3-{}-{}.tar.gz".format(
|
backup_filename = "redv3-{}-{}.tar.gz".format(
|
||||||
selected, dt.utcnow().strftime("%Y-%m-%d %H-%M-%S")
|
instance, dt.utcnow().strftime("%Y-%m-%d %H-%M-%S")
|
||||||
)
|
)
|
||||||
pth = Path(instance_data["DATA_PATH"])
|
pth = Path(instance_vals["DATA_PATH"])
|
||||||
if pth.exists():
|
if pth.exists():
|
||||||
backup_pth = pth.home()
|
backup_pth = pth.home()
|
||||||
backup_file = backup_pth / backup_filename
|
backup_file = backup_pth / backup_filename
|
||||||
@ -321,24 +412,27 @@ async def create_backup(selected, instance_data):
|
|||||||
repo_filename = pth / "cogs" / "RepoManager" / "repos.json"
|
repo_filename = pth / "cogs" / "RepoManager" / "repos.json"
|
||||||
with open(str(repo_filename), "w") as f:
|
with open(str(repo_filename), "w") as f:
|
||||||
f.write(json.dumps(repo_output, indent=4))
|
f.write(json.dumps(repo_output, indent=4))
|
||||||
instance_data = {instance_name: basic_config}
|
instance_vals = {instance_name: basic_config}
|
||||||
instance_file = pth / "instance.json"
|
instance_file = pth / "instance.json"
|
||||||
with open(str(instance_file), "w") as instance_out:
|
with open(str(instance_file), "w") as instance_out:
|
||||||
instance_out.write(json.dumps(instance_data, indent=4))
|
instance_out.write(json.dumps(instance_vals, indent=4))
|
||||||
for f in pth.glob("**/*"):
|
for f in pth.glob("**/*"):
|
||||||
if not any(ex in str(f) for ex in exclusions):
|
if not any(ex in str(f) for ex in exclusions):
|
||||||
to_backup.append(f)
|
to_backup.append(f)
|
||||||
with tarfile.open(str(backup_file), "w:gz") as tar:
|
with tarfile.open(str(backup_file), "w:gz") as tar:
|
||||||
for f in to_backup:
|
for f in to_backup:
|
||||||
tar.add(str(f), recursive=False)
|
tar.add(str(f), recursive=False)
|
||||||
print("A backup of {} has been made. It is at {}".format(selected, backup_file))
|
print("A backup of {} has been made. It is at {}".format(instance, backup_file))
|
||||||
|
|
||||||
|
|
||||||
async def remove_instance(selected, instance_data):
|
async def remove_instance(instance):
|
||||||
if instance_data["STORAGE_TYPE"] == "MongoDB":
|
await create_backup(instance)
|
||||||
|
|
||||||
|
instance_vals = instance_data[instance]
|
||||||
|
if instance_vals["STORAGE_TYPE"] == "MongoDB":
|
||||||
from redbot.core.drivers.red_mongo import Mongo
|
from redbot.core.drivers.red_mongo import Mongo
|
||||||
|
|
||||||
m = Mongo("Core", **instance_data["STORAGE_DETAILS"])
|
m = Mongo("Core", **instance_vals["STORAGE_DETAILS"])
|
||||||
db = m.db
|
db = m.db
|
||||||
collections = await db.collection_names(include_system_collections=False)
|
collections = await db.collection_names(include_system_collections=False)
|
||||||
for name in collections:
|
for name in collections:
|
||||||
@ -347,12 +441,11 @@ async def remove_instance(selected, instance_data):
|
|||||||
else:
|
else:
|
||||||
pth = Path(instance_data["DATA_PATH"])
|
pth = Path(instance_data["DATA_PATH"])
|
||||||
safe_delete(pth)
|
safe_delete(pth)
|
||||||
save_config(selected, {}, remove=True)
|
save_config(instance, {}, remove=True)
|
||||||
print("The instance {} has been removed\n".format(selected))
|
print("The instance {} has been removed\n".format(instance))
|
||||||
|
|
||||||
|
|
||||||
async def remove_instance_interaction():
|
async def remove_instance_interaction():
|
||||||
instance_list = load_existing_config()
|
|
||||||
if not instance_list:
|
if not instance_list:
|
||||||
print("No instances have been set up!")
|
print("No instances have been set up!")
|
||||||
return
|
return
|
||||||
@ -361,35 +454,76 @@ async def remove_instance_interaction():
|
|||||||
"You have chosen to remove an instance. The following "
|
"You have chosen to remove an instance. The following "
|
||||||
"is a list of instances that currently exist:\n"
|
"is a list of instances that currently exist:\n"
|
||||||
)
|
)
|
||||||
for instance in instance_list.keys():
|
for instance in instance_data.keys():
|
||||||
print("{}\n".format(instance))
|
print("{}\n".format(instance))
|
||||||
print("Please select one of the above by entering its name")
|
print("Please select one of the above by entering its name")
|
||||||
selected = input("> ")
|
selected = input("> ")
|
||||||
|
|
||||||
if selected not in instance_list.keys():
|
if selected not in instance_data.keys():
|
||||||
print("That isn't a valid instance!")
|
print("That isn't a valid instance!")
|
||||||
return
|
return
|
||||||
instance_data = instance_list[selected]
|
|
||||||
|
|
||||||
await create_backup(selected, instance_data)
|
await create_backup(selected)
|
||||||
await remove_instance(selected, instance_data)
|
await remove_instance(selected)
|
||||||
|
|
||||||
|
|
||||||
def main():
|
@click.group(invoke_without_command=True)
|
||||||
args, _ = parse_cli_args()
|
@click.option("--debug", type=bool)
|
||||||
if args.delete:
|
@click.pass_context
|
||||||
loop = asyncio.get_event_loop()
|
def cli(ctx, debug):
|
||||||
loop.run_until_complete(remove_instance_interaction())
|
level = logging.DEBUG if debug else logging.INFO
|
||||||
elif args.edit:
|
redbot.logging.init_logging(level=level, location=Path.cwd() / "red_setup_logs")
|
||||||
loop = asyncio.get_event_loop()
|
if ctx.invoked_subcommand is None:
|
||||||
loop.run_until_complete(edit_instance())
|
|
||||||
else:
|
|
||||||
basic_setup()
|
basic_setup()
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.argument("instance", type=click.Choice(instance_list))
|
||||||
|
def delete(instance):
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
loop.run_until_complete(remove_instance(instance))
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.argument("instance", type=click.Choice(instance_list))
|
||||||
|
@click.argument("backend", type=click.Choice(["json", "mongo"]))
|
||||||
|
def convert(instance, backend):
|
||||||
|
current_backend = get_current_backend(instance)
|
||||||
|
target = get_target_backend(backend)
|
||||||
|
|
||||||
|
default_dirs = deepcopy(basic_config_default)
|
||||||
|
default_dirs["DATA_PATH"] = str(Path(instance_data[instance]["DATA_PATH"]))
|
||||||
|
|
||||||
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
new_storage_details = None
|
||||||
|
|
||||||
|
if current_backend == BackendType.MONGOV1:
|
||||||
|
if target == BackendType.MONGO:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Please see conversion docs for updating to the latest mongo version."
|
||||||
|
)
|
||||||
|
elif target == BackendType.JSON:
|
||||||
|
new_storage_details = loop.run_until_complete(mongo_to_json(instance))
|
||||||
|
elif current_backend == BackendType.JSON:
|
||||||
|
if target == BackendType.MONGO:
|
||||||
|
new_storage_details = loop.run_until_complete(json_to_mongov2(instance))
|
||||||
|
elif current_backend == BackendType.MONGO:
|
||||||
|
if target == BackendType.JSON:
|
||||||
|
new_storage_details = loop.run_until_complete(mongov2_to_json(instance))
|
||||||
|
|
||||||
|
if new_storage_details is not None:
|
||||||
|
default_dirs["STORAGE_TYPE"] = target.value
|
||||||
|
default_dirs["STORAGE_DETAILS"] = new_storage_details
|
||||||
|
save_config(instance, default_dirs)
|
||||||
|
conversion_log.info(f"Conversion to {target} complete.")
|
||||||
|
else:
|
||||||
|
conversion_log.info(f"Cannot convert {current_backend} to {target} at this time.")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
try:
|
try:
|
||||||
main()
|
cli()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("Exiting...")
|
print("Exiting...")
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -32,6 +32,7 @@ install_requires =
|
|||||||
async-timeout==3.0.1
|
async-timeout==3.0.1
|
||||||
attrs==18.2.0
|
attrs==18.2.0
|
||||||
chardet==3.0.4
|
chardet==3.0.4
|
||||||
|
click==7.0
|
||||||
colorama==0.4.1
|
colorama==0.4.1
|
||||||
distro==1.4.0; sys_platform == "linux"
|
distro==1.4.0; sys_platform == "linux"
|
||||||
fuzzywuzzy==0.17.0
|
fuzzywuzzy==0.17.0
|
||||||
@ -86,7 +87,7 @@ test =
|
|||||||
[options.entry_points]
|
[options.entry_points]
|
||||||
console_scripts =
|
console_scripts =
|
||||||
redbot=redbot.__main__:main
|
redbot=redbot.__main__:main
|
||||||
redbot-setup=redbot.setup:main
|
redbot-setup=redbot.setup:cli
|
||||||
redbot-launcher=redbot.launcher:main
|
redbot-launcher=redbot.launcher:main
|
||||||
pytest11 =
|
pytest11 =
|
||||||
red-discordbot=redbot.pytest
|
red-discordbot=redbot.pytest
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user