Skip to content

Commit

Permalink
Add conan cache backup-upload (#15013)
Browse files Browse the repository at this point in the history
* Add conan cache backup-upload

* Even prettier
  • Loading branch information
AbrilRBS authored Oct 26, 2023
1 parent cd401c7 commit 255171f
Show file tree
Hide file tree
Showing 5 changed files with 86 additions and 33 deletions.
22 changes: 14 additions & 8 deletions conan/api/subapi/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,22 +57,28 @@ def upload(self, package_list, remote):
executor = UploadExecutor(app)
executor.upload(package_list, remote)

def upload_backup_sources(self, package_list):
def get_backup_sources(self, package_list=None):
"""Get list of backup source files currently present in the cache,
either all of them if no argument, else filter by those belonging to the references in the package_list"""
app = ConanApp(self.conan_api.cache_folder)
config = app.cache.new_config
url = config.get("core.sources:upload_url")
download_cache_path = config.get("core.sources:download_cache")
download_cache_path = download_cache_path or HomePaths(
self.conan_api.cache_folder).default_sources_backup_folder
excluded_urls = config.get("core.sources:exclude_urls", check_type=list, default=[])
download_cache = DownloadCache(download_cache_path)
return download_cache.get_backup_sources_files_to_upload(excluded_urls, package_list)

def upload_backup_sources(self, files):
app = ConanApp(self.conan_api.cache_folder)
config = app.cache.new_config
url = config.get("core.sources:upload_url", check_type=str)
if url is None:
return
url = url if url.endswith("/") else url + "/"
download_cache_path = config.get("core.sources:download_cache")
download_cache_path = download_cache_path or HomePaths(self.conan_api.cache_folder).default_sources_backup_folder
excluded_urls = config.get("core.sources:exclude_urls", check_type=list, default=[])

output = ConanOutput()
output.subtitle("Uploading backup sources")
output.info("Gathering files to upload")
files = DownloadCache(download_cache_path).get_backup_sources_files_to_upload(package_list,
excluded_urls)
if not files:
output.info("No backup sources files to upload")
return files
Expand Down
11 changes: 10 additions & 1 deletion conan/cli/commands/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,10 +126,19 @@ def cache_save(conan_api: ConanAPI, parser, subparser, *args):
"json": print_list_json})
def cache_restore(conan_api: ConanAPI, parser, subparser, *args):
"""
Put the artifacts from a an archive into the cache
Put the artifacts from an archive into the cache
"""
subparser.add_argument("file", help="Path to archive to restore")
args = parser.parse_args(*args)
path = make_abs_path(args.file)
package_list = conan_api.cache.restore(path)
return {"results": {"Local Cache": package_list.serialize()}}


@conan_subcommand()
def cache_backup_upload(conan_api: ConanAPI, parser, subparser, *args):
"""
Upload all the source backups present in the cache
"""
files = conan_api.upload.get_backup_sources()
conan_api.upload.upload_backup_sources(files)
3 changes: 2 additions & 1 deletion conan/cli/commands/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,8 @@ def upload(conan_api: ConanAPI, parser, *args):

if not args.dry_run:
conan_api.upload.upload(package_list, remote)
conan_api.upload.upload_backup_sources(package_list)
backup_files = conan_api.upload.get_backup_sources(package_list)
conan_api.upload.upload_backup_sources(backup_files)
elif args.list:
# Don't error on no recipes for automated workflows using list,
# but warn to tell the user that no packages were uploaded
Expand Down
52 changes: 29 additions & 23 deletions conans/client/downloaders/download_cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,14 +44,11 @@ def lock(self, lock_id):
finally:
thread_lock.release()

def get_backup_sources_files_to_upload(self, package_list, excluded_urls):
def get_backup_sources_files_to_upload(self, excluded_urls, package_list=None):
""" from a package_list of packages to upload, collect from the backup-sources cache
the matching references to upload those backups too
the matching references to upload those backups too.
If no package_list is passed, it gets all
"""
def should_upload_sources(package):
return any(prev["upload"] for prev in package["revisions"].values())

files_to_upload = []
path_backups = os.path.join(self._path, self._SOURCE_BACKUP)

if not os.path.exists(path_backups):
Expand All @@ -60,25 +57,34 @@ def should_upload_sources(package):
if excluded_urls is None:
excluded_urls = []

def has_excluded_urls(backup_urls):
return all(any(url.startswith(excluded_url)
for excluded_url in excluded_urls)
for url in backup_urls)

def should_upload_sources(package):
return any(prev["upload"] for prev in package["revisions"].values())

all_refs = set()
for k, ref in package_list.refs().items():
packages = ref.get("packages", {}).values()
if ref.get("upload") or any(should_upload_sources(p) for p in packages):
all_refs.add(str(k))

for f in os.listdir(path_backups):
if f.endswith(".json"):
f = os.path.join(path_backups, f)
content = json.loads(load(f))
refs = content["references"]
# unknown entries are not uploaded at this moment, the flow is not expected.
if package_list is not None:
for k, ref in package_list.refs().items():
packages = ref.get("packages", {}).values()
if ref.get("upload") or any(should_upload_sources(p) for p in packages):
all_refs.add(str(k))

files_to_upload = []

for path in os.listdir(path_backups):
if not path.endswith(".json"):
blob_path = os.path.join(path_backups, path)
metadata_path = os.path.join(blob_path + ".json")
metadata = json.loads(load(metadata_path))
refs = metadata["references"]
# unknown entries are not uploaded at this moment unless no package_list is passed
for ref, urls in refs.items():
is_excluded = all(any(url.startswith(excluded_url)
for excluded_url in excluded_urls)
for url in urls)
if not is_excluded and ref in all_refs:
files_to_upload.append(f)
files_to_upload.append(f[:-5])
if not has_excluded_urls(urls) and (package_list is None or ref in all_refs):
files_to_upload.append(metadata_path)
files_to_upload.append(blob_path)
break
return files_to_upload

Expand Down
31 changes: 31 additions & 0 deletions conans/test/integration/cache/backup_sources_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -628,3 +628,34 @@ def source(self):
self.client.run("upload * --only-recipe -c -r=default")
# Ensure we are testing for an already uploaded recipe
assert f"Recipe 'pkg/1.0#{exported_rev}' already in server, skipping upload" in self.client.out

def test_source_then_upload_workflow(self):
mkdir(os.path.join(self.download_cache_folder, "s"))

http_server_base_folder_internet = os.path.join(self.file_server.store, "internet")
http_server_base_folder_backup = os.path.join(self.file_server.store, "backup")

sha256 = "315f5bdb76d078c43b8ac0064e4a0164612b1fce77c869345bfc94c75894edd3"
save(os.path.join(http_server_base_folder_internet, "myfile.txt"), "Hello, world!")

conanfile = textwrap.dedent(f"""
from conan import ConanFile
from conan.tools.files import download
class Pkg2(ConanFile):
def source(self):
download(self, "{self.file_server.fake_url}/internet/myfile.txt", "myfile.txt",
sha256="{sha256}")
""")

self.client.save(
{"global.conf": f"core.sources:download_cache={self.download_cache_folder}\n"
f"core.sources:download_urls=['{self.file_server.fake_url}/backup/', 'origin']\n"
f"core.sources:upload_url={self.file_server.fake_url}/backup/"},
path=self.client.cache.cache_folder)

self.client.save({"conanfile.py": conanfile})
self.client.run("source .")
self.client.run("cache backup-upload")
# This used to crash because we were trying to list a missing dir if only exports were made
assert "[Errno 2] No such file or directory" not in self.client.out
assert sha256 in os.listdir(http_server_base_folder_backup)

0 comments on commit 255171f

Please sign in to comment.