-
-
Save cheind/00d186302089238eeb6402236098bd6c to your computer and use it in GitHub Desktop.
| import argparse | |
| import platform | |
| import sys | |
| import subprocess | |
| from pathlib import Path | |
| def describe() -> dict: | |
| meta = { | |
| "python": "py" + ".".join(platform.python_version_tuple()[:2]), | |
| "platform": sys.platform, | |
| "machine": platform.machine(), | |
| } | |
| meta["file-prefix"] = "{python}-{platform}-{machine}-".format_map(meta) | |
| return meta | |
| def generate_in_files(args) -> list[Path]: | |
| meta = describe() | |
| outpaths = [] | |
| for fname in args.SRC_FILES: | |
| inpath = Path(fname) | |
| prefix = "{python}-{platform}-{machine}-".format_map(meta) | |
| outpath = inpath.parent / f"{prefix}{inpath.name}" | |
| outpaths.append(outpath) | |
| print(f"Generating {str(outpath)}") | |
| with open(inpath, "r") as f: | |
| data = f.read().format_map(meta) | |
| with open(outpath, "w") as of: | |
| of.write(data) | |
| return outpaths | |
| def compile(paths: list[Path], pipcompile_args: list[str]): | |
| for fpath in paths: | |
| finalargs = ["pip-compile"] + pipcompile_args + [str(fpath)] | |
| print(f'{" ".join(finalargs)}') | |
| subprocess.check_call(finalargs) | |
| def main(): | |
| parser = argparse.ArgumentParser() | |
| parser.add_argument("SRC_FILES", nargs="+") | |
| parser.add_argument("--compile", action="store_true") | |
| args, unknown = parser.parse_known_args() | |
| outpaths = generate_in_files(args) | |
| if args.compile: | |
| compile(outpaths, unknown) | |
| if __name__ == "__main__": | |
| main() |
@webknjaz interesting thanks, is that your personal opinion in the latter comment or an official statement (concerning when to pin and when not). I mostly agree, but also found rare use-cases for library projects having pinned deps. Anyhow what's the current state with pip-tools to manage pinned dependencies cross-platform?
Kinda personal with some PyPA folks agreeing regarding the pinning being misunderstood. I don't think there's "official statements" in the project, short of "the ecosystem doesn't support cross-compilation so it's impossible, it might be once such support is in place". I know that Filipe Laíns works on a PEP to make CPython expose better details of the runtimes, that's one of the blockers and projects like Beeware/Briefcase are trying to look into the cross-compilation side of things.
Here are some notes from this year's packaging summit https://hackmd.io/@CAM-Gerlach/py-packaging-summit-2023.
So the current state hasn't changed, except maybe the new possibility to pin the build env deps that's not yet released.
As for the libraries pinning the runtime deps — that may work for somebody who doesn't expect their things to be used outside their own application, which might be a legit use-case for that specific person/project but wouldn't really be usable by the wider community. FWIW I'm happy with my per-env pinning setup, short of the non-reusable tox integration that I need to fix.
See also: