Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Debugging] Print info about link_only items #348

Closed
wants to merge 11 commits into from
71 changes: 36 additions & 35 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,51 +16,52 @@ concurrency:
cancel-in-progress: true

jobs:
test-windows:
env:
PYTHONUNBUFFERED: "1"
FORCE_COLOR: "1"
runs-on: windows-latest
steps:
- uses: actions/checkout@v3
- uses: conda-incubator/setup-miniconda@v2
with:
python-version: ${{ matrix.python-version }}
channels: conda-forge
miniforge-variant: Mambaforge
miniforge-version: 22.9.0-1
auto-update-conda: false
# test-windows:
# env:
# PYTHONUNBUFFERED: "1"
# FORCE_COLOR: "1"
# runs-on: windows-latest
# steps:
# - uses: actions/checkout@v3
# - uses: conda-incubator/setup-miniconda@v2
# with:
# python-version: ${{ matrix.python-version }}
# channels: conda-forge
# miniforge-variant: Mambaforge
# miniforge-version: 22.9.0-1
# auto-update-conda: false

- name: Install dev deps
run: |
conda activate test
mamba install pip pytest-cov pytest-xdist
python -m pip install -r requirements-dev.txt
# - name: Install dev deps
# run: |
# conda activate test
# mamba install pip pytest-cov pytest-xdist
# python -m pip install -r requirements-dev.txt

- name: install conda-lock
run: |
conda activate test
pip install -e . --force-reinstall
# - name: install conda-lock
# run: |
# conda activate test
# pip install -e . --force-reinstall

- name: run-test
run: |
conda activate test
copy pyproject.toml "%RUNNER_TEMP%"
Xcopy /E /I tests "%RUNNER_TEMP%\\tests"
pushd "${RUNNER_TEMP}"
set TMPDIR="%RUNNER_TEMP%"
dir
pytest --cov=conda_lock --cov-branch --cov-report=xml --cov-report=term tests
copy coverage.xml %GITHUB_WORKSPACE%
- uses: codecov/codecov-action@v3
# - name: run-test
# run: |
# conda activate test
# copy pyproject.toml "%RUNNER_TEMP%"
# Xcopy /E /I tests "%RUNNER_TEMP%\\tests"
# pushd "${RUNNER_TEMP}"
# set TMPDIR="%RUNNER_TEMP%"
# dir
# pytest tests
# copy coverage.xml %GITHUB_WORKSPACE%
# - uses: codecov/codecov-action@v3

test:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ ubuntu-latest, macos-latest ]
python-version: [ "3.8", "3.11" ]
python-version: [ "3.8", "3.9", "3.10", "3.11" ]
run-multiplier: [ A, B ]
defaults:
run:
shell: bash -l {0}
Expand Down
170 changes: 138 additions & 32 deletions conda_lock/conda_solver.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
import subprocess
import sys
import tempfile
import time

from contextlib import contextmanager
from typing import (
Expand Down Expand Up @@ -210,8 +211,47 @@ def normalize_url(url: str) -> str:
return planned


def print_pkgs_dirs_contents(pkgs_dirs: List[pathlib.Path]) -> None:
for pkgs_dir in pkgs_dirs:
print(f"\n\n---\n{pkgs_dir=}\n\n---\n")
for entry in pkgs_dir.iterdir():
print(entry)


def _get_repodata_record(
pkgs_dirs: List[pathlib.Path], dist_name: str
) -> Optional[FetchAction]:
"""Get the repodata_record.json of a given distribution from the package cache.
On rare occasion during the CI tests, conda fails to find a package in the
package cache, perhaps because the package is still being processed? Waiting for
0.1 seconds seems to solve the issue. Here we allow for a full second to elapse
before giving up.
"""
NUM_RETRIES = 10
for retry in range(1, NUM_RETRIES + 1):
for pkgs_dir in pkgs_dirs:
record = pkgs_dir / dist_name / "info" / "repodata_record.json"
if record.exists():
with open(record) as f:
repodata: FetchAction = json.load(f)
return repodata
logger.warn(
f"Failed to find repodata_record.json for {dist_name}. "
f"Retrying in 0.1 seconds ({retry}/{NUM_RETRIES})"
)
time.sleep(0.1)
logger.warn(f"Failed to find repodata_record.json for {dist_name}. Giving up.")
return None


def _reconstruct_fetch_actions(
conda: PathLike, platform: str, dry_run_install: DryRunInstall
conda: PathLike,
platform: str,
dry_run_install: DryRunInstall,
proc: Optional[subprocess.CompletedProcess],
solve_or_update: str,
args: Optional[List[str]],
env: Optional[Dict[str, str]],
) -> DryRunInstall:
"""
Conda may choose to link a previously downloaded distribution from pkgs_dirs rather
Expand Down Expand Up @@ -246,19 +286,11 @@ def _reconstruct_fetch_actions(

for link_pkg_name in link_only_names:
link_action = link_actions[link_pkg_name]
for pkgs_dir in pkgs_dirs:
record = (
pkgs_dir
/ link_action["dist_name"]
/ "info"
/ "repodata_record.json"
)
if record.exists():
with open(record) as f:
repodata: FetchAction = json.load(f)
break
else:
raise FileExistsError(
repodata = _get_repodata_record(pkgs_dirs, link_action["dist_name"])
if repodata is None:
print(f"\n\n---\n{link_pkg_name=}\n\n---\n{link_action=}")
print_pkgs_dirs_contents(pkgs_dirs)
raise FileNotFoundError(
f'Distribution \'{link_action["dist_name"]}\' not found in pkgs_dirs {pkgs_dirs}'
)
dry_run_install["actions"]["FETCH"].append(repodata)
Expand All @@ -269,20 +301,85 @@ def _reconstruct_fetch_actions(
# this change in the future.
for link_pkg_name in link_only_names:
item = cast(Dict[str, Any], link_actions[link_pkg_name])
repodata = {
"channel": item["channel"],
"constrains": item.get("constrains"),
"depends": item.get("depends"),
"fn": item["fn"],
"md5": item["md5"],
"name": item["name"],
"subdir": item["subdir"],
"timestamp": item["timestamp"],
"url": item["url"],
"version": item["version"],
"sha256": item.get("sha256"),
}

try:
repodata = {
"channel": item["channel"],
"constrains": item.get("constrains"),
"depends": item.get("depends"),
"fn": item["fn"],
"md5": item["md5"],
"name": item["name"],
"subdir": item["subdir"],
"timestamp": item["timestamp"],
"url": item["url"],
"version": item["version"],
"sha256": item.get("sha256"),
}
except KeyError as e:
print(f"\n\n---\n{link_actions=}\n\n---\n{fetch_actions=}")
print(f"\n\n---\n{link_pkg_name=}, {link_only_names=}")
print(f"\n\n---\n{item=}")
print(f"\n\n---\n{args=}")
print(f"\n\n---\n{env=}")
if proc is not None:
print(f"\n\n---\n{proc.stdout=}")
print(f"\n\n---\n{proc.stderr=}")
if args is not None:

def print_proc(proc: subprocess.CompletedProcess) -> None:
print(f" Command: {proc.args}", file=sys.stderr)
if proc.stdout:
print(f" STDOUT:\n{proc.stdout}", file=sys.stderr)
if proc.stderr:
print(f" STDERR:\n{proc.stderr}", file=sys.stderr)

print("\n\n---\nWith mamba:\n")
proc = subprocess.run(
["mamba"] + args[1:],
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
print_proc(proc)
print("\n\n---\nWith conda:\n")
proc = subprocess.run(
["conda"] + args[1:],
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
print_proc(proc)
print("\n\n---\nAgain with micromamba:\n")
proc = subprocess.run(
args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
print_proc(proc)
print("\n\n---\n")
pkgs_dirs = [
pathlib.Path(d)
for d in json.loads(
extract_json_object(
subprocess.check_output(
["conda", "info", "--json"],
env=conda_env_override(platform),
).decode()
)
)["pkgs_dirs"]
]
dist_name = item["fn"].removesuffix(".tar.bz2").removesuffix(".conda")
for pkgs_dir in pkgs_dirs:
record = pkgs_dir / dist_name / "info" / "repodata_record.json"
if record.exists():
with open(record) as f:
repodata2: FetchAction = json.load(f)
break
else:
raise FileExistsError(
f"Distribution '{dist_name}' not found in pkgs_dirs {pkgs_dirs}"
)
print(f"{record=}, {repodata2=}")
print("\n\n---\n")
raise e
dry_run_install["actions"]["FETCH"].append(repodata)
return dry_run_install

Expand Down Expand Up @@ -331,9 +428,10 @@ def solve_specs_for_arch(
args.extend(["--channel", "msys2"])
args.extend(specs)
logger.info("%s using specs %s", platform, specs)
env = conda_env_override(platform)
proc = subprocess.run(
[str(arg) for arg in args],
env=conda_env_override(platform),
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf8",
Expand Down Expand Up @@ -371,7 +469,9 @@ def print_proc(proc: subprocess.CompletedProcess) -> None:

try:
dryrun_install: DryRunInstall = json.loads(extract_json_object(proc.stdout))
return _reconstruct_fetch_actions(conda, platform, dryrun_install)
return _reconstruct_fetch_actions(
conda, platform, dryrun_install, proc, "solve", list(args), env
)
except json.JSONDecodeError:
raise

Expand Down Expand Up @@ -454,12 +554,13 @@ def update_specs_for_arch(
"update" if is_micromamba(conda) else "install",
*_get_conda_flags(channels=channels, platform=platform),
]
env = conda_env_override(platform)
proc = subprocess.run(
[
str(arg)
for arg in args + ["-p", prefix, "--json", "--dry-run", *to_update]
],
env=conda_env_override(platform),
env=env,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
encoding="utf8",
Expand All @@ -476,6 +577,9 @@ def update_specs_for_arch(
dryrun_install: DryRunInstall = json.loads(proc.stdout)
else:
dryrun_install = {"actions": {"LINK": [], "FETCH": []}}
env = None
proc = None
args = None

if "actions" not in dryrun_install:
dryrun_install["actions"] = {"LINK": [], "FETCH": []}
Expand Down Expand Up @@ -512,7 +616,9 @@ def update_specs_for_arch(
}
)
dryrun_install["actions"]["LINK"].append(entry)
return _reconstruct_fetch_actions(conda, platform, dryrun_install)
return _reconstruct_fetch_actions(
conda, platform, dryrun_install, proc, "update", args, env
)


@contextmanager
Expand Down