Add data_manager.instance_name() public API (#6146)

This commit is contained in:
Jakub Kuczys 2023-05-12 12:53:53 +02:00 committed by GitHub
parent 91f19c7410
commit 2369017f6a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 23 additions and 10 deletions

View File

@ -108,7 +108,7 @@ class LavalinkTasks(MixinMeta, metaclass=CompositeMetaClass):
password=password, password=password,
port=port, port=port,
timeout=timeout, timeout=timeout,
resume_key=f"Red-Core-Audio-{self.bot.user.id}-{data_manager.instance_name}", resume_key=f"Red-Core-Audio-{self.bot.user.id}-{data_manager.instance_name()}",
secured=secured, secured=secured,
) )
except lavalink.AbortingNodeConnection: except lavalink.AbortingNodeConnection:

View File

@ -131,13 +131,14 @@ class DebugInfo:
) )
async def _get_red_vars_section(self) -> DebugInfoSection: async def _get_red_vars_section(self) -> DebugInfoSection:
if data_manager.instance_name is None: instance_name = data_manager.instance_name()
if instance_name is None:
return DebugInfoSection( return DebugInfoSection(
"Red variables", "Red variables",
f"Metadata file: {data_manager.config_file}", f"Metadata file: {data_manager.config_file}",
) )
parts = [f"Instance name: {data_manager.instance_name}"] parts = [f"Instance name: {instance_name}"]
if self.bot is not None: if self.bot is not None:
owners = [] owners = []
for uid in self.bot.owner_ids: for uid in self.bot.owner_ids:

View File

@ -18,16 +18,17 @@ __all__ = (
"core_data_path", "core_data_path",
"bundled_data_path", "bundled_data_path",
"data_path", "data_path",
"instance_name",
"metadata_file", "metadata_file",
"storage_details",
"storage_type", "storage_type",
"storage_details",
) )
log = logging.getLogger("red.data_manager") log = logging.getLogger("red.data_manager")
basic_config = None basic_config = None
instance_name = None _instance_name = None
basic_config_default: Dict[str, Any] = { basic_config_default: Dict[str, Any] = {
"DATA_PATH": None, "DATA_PATH": None,
@ -106,8 +107,8 @@ def load_basic_configuration(instance_name_: str):
redbot setup. redbot setup.
""" """
global basic_config global basic_config
global instance_name global _instance_name
instance_name = instance_name_ _instance_name = instance_name_
try: try:
with config_file.open(encoding="utf-8") as fs: with config_file.open(encoding="utf-8") as fs:
@ -119,7 +120,7 @@ def load_basic_configuration(instance_name_: str):
) )
sys.exit(ExitCodes.CONFIGURATION_ERROR) sys.exit(ExitCodes.CONFIGURATION_ERROR)
try: try:
basic_config = config[instance_name] basic_config = config[_instance_name]
except KeyError: except KeyError:
print( print(
"Instance with this name doesn't exist." "Instance with this name doesn't exist."
@ -234,6 +235,17 @@ def data_path() -> Path:
return _base_data_path() return _base_data_path()
def instance_name() -> str:
"""Gets instance's name.
Returns
-------
str
Instance name.
"""
return _instance_name
def metadata_file() -> Path: def metadata_file() -> Path:
"""Gets the path of metadata file. """Gets the path of metadata file.

View File

@ -217,7 +217,7 @@ async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
dest.mkdir(parents=True, exist_ok=True) dest.mkdir(parents=True, exist_ok=True)
timestr = datetime.utcnow().strftime("%Y-%m-%dT%H-%M-%S") timestr = datetime.utcnow().strftime("%Y-%m-%dT%H-%M-%S")
backup_fpath = dest / f"redv3_{data_manager.instance_name}_{timestr}.tar.gz" backup_fpath = dest / f"redv3_{data_manager.instance_name()}_{timestr}.tar.gz"
to_backup = [] to_backup = []
exclusions = [ exclusions = [
@ -242,7 +242,7 @@ async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
json.dump(repo_output, fs, indent=4) json.dump(repo_output, fs, indent=4)
instance_file = data_path / "instance.json" instance_file = data_path / "instance.json"
with instance_file.open("w") as fs: with instance_file.open("w") as fs:
json.dump({data_manager.instance_name: data_manager.basic_config}, fs, indent=4) json.dump({data_manager.instance_name(): data_manager.basic_config}, fs, indent=4)
for f in data_path.glob("**/*"): for f in data_path.glob("**/*"):
if not any(ex in str(f) for ex in exclusions) and f.is_file(): if not any(ex in str(f) for ex in exclusions) and f.is_file():
to_backup.append(f) to_backup.append(f)