Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add redbot-setup restore cli command for restoring instance from backup #3681

Draft
wants to merge 13 commits into
base: V3/develop
Choose a base branch
from
17 changes: 1 addition & 16 deletions redbot/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -221,22 +221,7 @@ def _edit_instance_name(old_name, new_name, confirm_overwrite, no_prompt):
)
elif not no_prompt and confirm("Would you like to change the instance name?", default=False):
name = get_name()
if name in _get_instance_names():
print(
"WARNING: An instance already exists with this name. "
"Continuing will overwrite the existing instance config."
)
if not confirm(
"Are you absolutely certain you want to continue with this instance name?",
default=False,
):
print("Instance name will remain unchanged.")
name = old_name
else:
print("Instance name updated.")
else:
print("Instance name updated.")
print()
print("Instance name updated.\n")
else:
name = old_name
return name
Expand Down
22 changes: 13 additions & 9 deletions redbot/cogs/downloader/downloader.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ def __init__(self, bot: Red):
super().__init__()
self.bot = bot

# any changes to Config here need to also be applied to RepoManager._restore_from_backup()
self.config = Config.get_conf(self, identifier=998240343, force_registration=True)

self.config.register_global(schema_version=0, installed_cogs={}, installed_libraries={})
Expand Down Expand Up @@ -95,26 +96,29 @@ def _done_callback(task: asyncio.Task) -> None:

async def initialize(self) -> None:
await self._repo_manager.initialize()
await self._maybe_update_config()
await self._maybe_update_config(self.config)
self._ready.set()

async def _maybe_update_config(self) -> None:
schema_version = await self.config.schema_version()
@classmethod
async def _maybe_update_config(cls, config: Config) -> None:
# this method might also be called from RepoManager._restore_from_backup()
schema_version = await config.schema_version()

if schema_version == 0:
await self._schema_0_to_1()
await cls._schema_0_to_1(config)
schema_version += 1
await self.config.schema_version.set(schema_version)
await config.schema_version.set(schema_version)

async def _schema_0_to_1(self):
@classmethod
async def _schema_0_to_1(cls, config: Config) -> None:
"""
This contains migration to allow saving state
of both installed cogs and shared libraries.
"""
old_conf = await self.config.get_raw("installed", default=[])
old_conf = await config.get_raw("installed", default=[])
if not old_conf:
return
async with self.config.installed_cogs() as new_cog_conf:
async with config.installed_cogs() as new_cog_conf:
for cog_json in old_conf:
repo_name = cog_json["repo_name"]
module_name = cog_json["cog_name"]
Expand All @@ -126,7 +130,7 @@ async def _schema_0_to_1(self):
"commit": "",
"pinned": False,
}
await self.config.clear_raw("installed")
await config.clear_raw("installed")
# no reliable way to get installed libraries (i.a. missing repo name)
# but it only helps `[p]cog update` run faster so it's not an issue

Expand Down
43 changes: 43 additions & 0 deletions redbot/cogs/downloader/repo_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import asyncio
import functools
import json
import os
import pkgutil
import shlex
Expand Down Expand Up @@ -1216,3 +1217,45 @@ def _parse_url(self, url: str, branch: Optional[str]) -> Tuple[str, Optional[str
if branch is None:
branch = tree_url_match["branch"]
return url, branch

async def _restore_from_backup(self) -> None:
"""Restore cogs using `repos.json` in cog's data path.

Used by `redbot-setup restore` cli command.
"""
with open(data_manager.cog_data_path(self) / "repos.json") as fp:
raw_repos = json.load(fp)

from tqdm import tqdm

progress_bar = tqdm(raw_repos, desc="Downloading repos", unit="repo", dynamic_ncols=True)

for repo_data in progress_bar:
try:
await self.add_repo(repo_data["url"], repo_data["name"], repo_data["branch"])
except errors.CloningError:
log.exception(
"Something went wrong whilst cloning %s (to branch: %s)",
repo_data["url"],
repo_data["branch"],
)
except OSError:
log.exception(
"Something went wrong trying to add repo %s under name %s",
repo_data["url"],
repo_data["name"],
)

from .downloader import Downloader

# this solution is far from perfect, but a better one requires a rewrite
conf = Config.get_conf(None, identifier=998240343, cog_name="Downloader")
self.conf.register_global(schema_version=0, installed_cogs={}, installed_libraries={})
await Downloader._maybe_update_config(conf)
# clear out saved commit so that `[p]cog update` triggers install for all cogs
async with self.conf.installed_cogs() as installed_cogs:
for repo_data in installed_cogs.values():
for cog_data in repo_data.values():
cog_data["commit"] = ""
cog_data["pinned"] = False
await self.conf.installed_libraries.set({})
56 changes: 46 additions & 10 deletions redbot/core/utils/_internal_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,11 @@
import re
import shutil
import tarfile
import time
from datetime import datetime
from io import BytesIO
from pathlib import Path
from tarfile import TarInfo
from typing import (
AsyncIterator,
Awaitable,
Expand All @@ -23,6 +26,7 @@

import discord
from fuzzywuzzy import fuzz, process
from tqdm import tqdm

from redbot.core import data_manager
from redbot.core.utils.chat_formatting import box
Expand Down Expand Up @@ -182,7 +186,23 @@ async def format_fuzzy_results(
return "Perhaps you wanted one of these? " + box("\n".join(lines), lang="vhdl")


def _tar_addfile_from_string(tar: tarfile.TarFile, name: str, string: str) -> None:
encoded = string.encode("utf-8")
fp = BytesIO(encoded)

# TarInfo needs `mtime` and `size`
# https://stackoverflow.com/q/53306000
tar_info = tarfile.TarInfo(name)
tar_info.mtime = time.time()
tar_info.size = len(encoded)

tar.addfile(tar_info, fp)


async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
# version of backup
BACKUP_VERSION = 2

data_path = Path(data_manager.core_data_path().parent)
if not data_path.exists():
return None
Expand All @@ -192,12 +212,19 @@ async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
backup_fpath = dest / f"redv3_{data_manager.instance_name}_{timestr}.tar.gz"

to_backup = []
# we need trailing separator to not exclude files and folders that only start with these names
exclusions = [
"__pycache__",
# Lavalink will be downloaded on Audio load
"Lavalink.jar",
os.path.join("Downloader", "lib"),
os.path.join("CogManager", "cogs"),
os.path.join("RepoManager", "repos"),
# cogs and repos installed through Downloader can be reinstalled using restore command
os.path.join("Downloader", "lib", ""),
os.path.join("CogManager", "cogs", ""),
os.path.join("RepoManager", "repos", ""),
# these files are created during backup so we exclude them from data path backup
os.path.join("RepoManager", "repos.json"),
"instance.json",
"backup.version",
]

# Avoiding circular imports
Expand All @@ -208,19 +235,28 @@ async def create_backup(dest: Path = Path.home()) -> Optional[Path]:
repo_output = []
for repo in repo_mgr.repos:
repo_output.append({"url": repo.url, "name": repo.name, "branch": repo.branch})
repos_file = data_path / "cogs" / "RepoManager" / "repos.json"
with repos_file.open("w") as fs:
json.dump(repo_output, fs, indent=4)
instance_file = data_path / "instance.json"
with instance_file.open("w") as fs:
json.dump({data_manager.instance_name: data_manager.basic_config}, fs, indent=4)

for f in data_path.glob("**/*"):
if not any(ex in str(f) for ex in exclusions) and f.is_file():
to_backup.append(f)

with tarfile.open(str(backup_fpath), "w:gz") as tar:
for f in to_backup:
progress_bar = tqdm(to_backup, desc="Compressing data", unit=" files", dynamic_ncols=True)
for f in progress_bar:
tar.add(str(f), arcname=str(f.relative_to(data_path)), recursive=False)

# add repos backup
repos_data = json.dumps(repo_output, indent=4)
_tar_addfile_from_string(tar, "cogs/RepoManager/repos.json", repos_data)

# add instance's original data
instance_data = json.dumps(
{data_manager.instance_name: data_manager.basic_config}, indent=4
)
_tar_addfile_from_string(tar, "instance.json", instance_data)

# add info about backup version
_tar_addfile_from_string(tar, "backup.version", str(BACKUP_VERSION))
return backup_fpath


Expand Down
Loading