Created
October 12, 2018 21:46
-
-
Save mingwandroid/a288a946449e41775cce32dc31f10aae to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import argparse | |
try: | |
from conda.base.constants import CONDA_TARBALL_EXTENSIONS | |
except Exception: | |
from conda.base.constants import CONDA_TARBALL_EXTENSION | |
CONDA_TARBALL_EXTENSIONS = (CONDA_TARBALL_EXTENSION,) | |
from conda_build import environ | |
from conda_build import utils | |
from conda_build.conda_interface import memoized, iteritems | |
from conda_build.os_utils.liefldd import (get_exports, get_imports, | |
get_relocations, get_runpaths, get_symbols) | |
from conda_build.os_utils.pyldd import codefile_type, inspect_linkages | |
from conda_build.inspect_pkg import which_package | |
from conda_build import utils | |
import contextlib | |
import fnmatch | |
import glob2 | |
from glob2 import glob | |
import json | |
import libarchive | |
import logging | |
import os | |
from os import chdir, getcwd, makedirs | |
from os.path import exists, isabs, join | |
import subprocess | |
import shutil | |
import sys | |
import tempfile | |
from tempfile import NamedTemporaryFile | |
import yaml | |
conda_subdir = 'osx-64' | |
@contextlib.contextmanager | |
def tmp_chdir(dest): | |
curdir = getcwd() | |
try: | |
chdir(dest) | |
yield | |
finally: | |
chdir(curdir) | |
def tar_xf(tarball, dir_path): | |
flags = libarchive.extract.EXTRACT_TIME | \ | |
libarchive.extract.EXTRACT_PERM | \ | |
libarchive.extract.EXTRACT_SECURE_NODOTDOT | \ | |
libarchive.extract.EXTRACT_SECURE_SYMLINKS | \ | |
libarchive.extract.EXTRACT_SECURE_NOABSOLUTEPATHS | |
if not isabs(tarball): | |
tarball = join(getcwd(), tarball) | |
with tmp_chdir(dir_path): | |
libarchive.extract_file(tarball, flags) | |
def print_msg(errors, text): | |
if text.startswith(" ERROR"): | |
errors.append(text) | |
print(text) | |
@memoized | |
def get_exports_sha256(filename, sha256, arch='native'): | |
return get_exports(filename, arch=arch) | |
def get_exports_cached(filename, arch='native'): | |
from conda_build.utils import sha256_checksum | |
# Use realpath so memoize hashes symlinks as their targets. | |
return get_exports_sha256(os.path.realpath(filename), sha256_checksum(filename), arch=arch) | |
def check_overlinking(pkg_name, pkg_version, build_str, build_number, ignore_run_exports, | |
requirements_run, requirements_build, host_prefix, build_prefix, | |
missing_dso_whitelist, files): | |
error_static_linking = False | |
error_overlinking = True | |
verbose = True | |
errors = [] | |
vendoring_record = dict() | |
pkg_vendoring_name = pkg_name | |
pkg_vendoring_version = pkg_version | |
pkg_vendoring_build_str = build_str | |
pkg_vendoring_build_number = build_number | |
pkg_vendoring_key = '-'.join([pkg_vendoring_name, pkg_vendoring_version, pkg_vendoring_build_str]) | |
class FakeDist: | |
def __init__(self, name, version, build_number, build_str): | |
self.name = name | |
self.quad = [name] | |
self.version = version | |
self.build_number = build_number | |
self.build_string = build_str | |
pkg_vendored_dist = FakeDist(pkg_vendoring_name, | |
pkg_vendoring_version, | |
pkg_vendoring_build_number, | |
pkg_vendoring_build_str) | |
ignore_list = utils.ensure_list(ignore_run_exports) | |
ignore_list_syms = ['main', '_main', '*get_pc_thunk*', '___clang_call_terminate', '_timeout'] | |
ignore_for_statics = ['gcc_impl_linux*', 'compiler-rt*', 'llvm-openmp*', 'gfortran_osx*'] | |
run_reqs = [req.split(' ')[0] for req in requirements_run] | |
# For checking if static linking has happened | |
build_reqs = [req.split(' ')[0] for req in requirements_build] | |
# sysroots and whitelists are similar, but the subtle distinctions are important. | |
sysroots = glob(os.path.join(build_prefix, '**', 'sysroot')) | |
whitelist = [] | |
# LIEF is very slow at decoding some DSOs, so we only let it look at ones that we link to (and ones we | |
# have built). | |
all_needed_dsos = set() | |
needed_dsos_for_file = dict() | |
for f in files: | |
path = os.path.join(host_prefix, f) | |
if not codefile_type(path): | |
continue | |
needed = inspect_linkages(path, resolve_filenames=True, recurse=False) | |
needed_dsos_for_file[f] = needed | |
needed = [os.path.relpath(n, host_prefix) if n.startswith(host_prefix) | |
else n for n in needed] | |
all_needed_dsos = all_needed_dsos.union(needed) | |
all_needed_dsos.add(f) | |
# Form a mapping of file => package | |
prefix_owners = {} | |
contains_dsos = {} | |
contains_static_libs = {} | |
# Used for both dsos and static_libs | |
all_lib_exports = {} | |
for prefix in (host_prefix, build_prefix): | |
for subdir, dirs, filez in os.walk(prefix): | |
for file in filez: | |
fp = os.path.join(subdir, file) | |
dynamic_lib = any(glob2.fnmatch.fnmatch(fp, ext) for ext in ('*.so*', '*.dylib*', '*.dll')) and \ | |
codefile_type(fp, skip_symlinks=False) != None | |
static_lib = any(glob2.fnmatch.fnmatch(fp, ext) for ext in ('*.a', '*.lib')) | |
# Looking at all the files is very slow. | |
if not dynamic_lib and not static_lib: | |
continue | |
rp = os.path.relpath(fp, prefix) | |
if dynamic_lib and rp not in all_needed_dsos: | |
continue | |
if rp in all_lib_exports: | |
continue | |
owners = prefix_owners[rp] if rp in prefix_owners else [] | |
# Self-vendoring, not such a big deal but may as well report it? | |
if not len(owners): | |
if rp in files: | |
owners.append(pkg_vendored_dist) | |
new_pkgs = list(which_package(rp, prefix)) | |
# Cannot filter here as this means the DSO (eg libomp.dylib) will not be found in any package | |
# [owners.append(new_pkg) for new_pkg in new_pkgs if new_pkg not in owners | |
# and not any([glob2.fnmatch.fnmatch(new_pkg.name, i) for i in ignore_for_statics])] | |
[owners.append(new_pkg) for new_pkg in new_pkgs if new_pkg not in owners] | |
for new_pkg in new_pkgs: | |
if new_pkg not in owners: | |
owners.append(new_pkg) | |
prefix_owners[rp] = owners | |
if len(prefix_owners[rp]): | |
exports = set(e for e in get_exports_cached(fp) if not | |
any(glob2.fnmatch.fnmatch(e, pattern) for pattern in ignore_list_syms)) | |
all_lib_exports[rp] = exports | |
# Check codefile_type to filter out linker scripts. | |
if dynamic_lib: | |
contains_dsos[prefix_owners[rp][0]] = True | |
elif static_lib: | |
if 'sysroot' in fp: | |
if (prefix_owners[rp][0].name.startswith('gcc_impl_linux') or | |
prefix_owners[rp][0].name == 'llvm'): | |
continue | |
print("sysroot in {}, owner is {}".format(fp,prefix_owners[rp][0])) | |
contains_static_libs[prefix_owners[rp][0]] = True | |
for f in files: | |
path = os.path.join(host_prefix, f) | |
if not codefile_type(path): | |
continue | |
needed = needed_dsos_for_file[f] | |
for needed_dso in needed: | |
if needed_dso.startswith(host_prefix): | |
in_prefix_dso = os.path.normpath(needed_dso.replace(host_prefix + '/', '')) | |
if not in_prefix_dso in prefix_owners: | |
print("What a terrible failure {} not in prefix_owners".format(in_prefix_dso)) | |
sys.exit(1) | |
if conda_subdir == 'osx-64': | |
if not len(sysroots): | |
sysroots = ['/usr/lib', '/opt/X11', '/System/Library/Frameworks'] | |
whitelist = ['/opt/X11/', | |
'/usr/lib/libSystem.B.dylib', | |
'/usr/lib/libcrypto.0.9.8.dylib', | |
'/usr/lib/libobjc.A.dylib', | |
'/System/Library/Frameworks/Accelerate.framework/*', | |
'/System/Library/Frameworks/AGL.framework/*', | |
'/System/Library/Frameworks/AppKit.framework/*', | |
'/System/Library/Frameworks/ApplicationServices.framework/*', | |
'/System/Library/Frameworks/AudioToolbox.framework/*', | |
'/System/Library/Frameworks/AudioUnit.framework/*', | |
'/System/Library/Frameworks/AVFoundation.framework/*', | |
'/System/Library/Frameworks/CFNetwork.framework/*', | |
'/System/Library/Frameworks/Carbon.framework/*', | |
'/System/Library/Frameworks/Cocoa.framework/*', | |
'/System/Library/Frameworks/CoreAudio.framework/*', | |
'/System/Library/Frameworks/CoreFoundation.framework/*', | |
'/System/Library/Frameworks/CoreGraphics.framework/*', | |
'/System/Library/Frameworks/CoreMedia.framework/*', | |
'/System/Library/Frameworks/CoreBluetooth.framework/*', | |
'/System/Library/Frameworks/CoreMIDI.framework/*', | |
'/System/Library/Frameworks/CoreMedia.framework/*', | |
'/System/Library/Frameworks/CoreServices.framework/*', | |
'/System/Library/Frameworks/CoreText.framework/*', | |
'/System/Library/Frameworks/CoreVideo.framework/*', | |
'/System/Library/Frameworks/CoreWLAN.framework/*', | |
'/System/Library/Frameworks/DiskArbitration.framework/*', | |
'/System/Library/Frameworks/Foundation.framework/*', | |
'/System/Library/Frameworks/GameController.framework/*', | |
'/System/Library/Frameworks/GLKit.framework/*', | |
'/System/Library/Frameworks/ImageIO.framework/*', | |
'/System/Library/Frameworks/IOBluetooth.framework/*', | |
'/System/Library/Frameworks/IOKit.framework/*', | |
'/System/Library/Frameworks/IOSurface.framework/*', | |
'/System/Library/Frameworks/OpenAL.framework/*', | |
'/System/Library/Frameworks/OpenGL.framework/*', | |
'/System/Library/Frameworks/Quartz.framework/*', | |
'/System/Library/Frameworks/QuartzCore.framework/*', | |
'/System/Library/Frameworks/Security.framework/*', | |
'/System/Library/Frameworks/StoreKit.framework/*', | |
'/System/Library/Frameworks/SystemConfiguration.framework/*', | |
'/System/Library/Frameworks/WebKit.framework/*'] | |
whitelist += missing_dso_whitelist | |
runpath_whitelist = [] | |
usage_of_run_req = dict() | |
for f in files: | |
path = os.path.join(host_prefix, f) | |
if not codefile_type(path): | |
continue | |
warn_prelude = "WARNING ({},{})".format(pkg_name, f) | |
err_prelude = " ERROR ({},{})".format(pkg_name, f) | |
info_prelude = " INFO ({},{})".format(pkg_name, f) | |
msg_prelude = err_prelude if error_overlinking else warn_prelude | |
try: | |
runpaths = get_runpaths(path) | |
except: | |
print_msg(errors, '{}: pyldd.py failed to process'.format(warn_prelude)) | |
continue | |
if runpaths and not (runpath_whitelist or | |
any(fnmatch.fnmatch(f, w) for w in runpath_whitelist)): | |
print_msg(errors, '{}: runpaths {} found in {}'.format(msg_prelude, | |
runpaths, | |
path)) | |
needed = inspect_linkages(path, resolve_filenames=True, recurse=False) | |
imps = get_imports(path, None) | |
exps = get_exports(path, None) | |
relocs = get_relocations(path, None) | |
imported_syms = set(get_symbols(path, defined=False, undefined=True, arch=None)) | |
defined_syms = set(get_symbols(path, defined=True, undefined=False, arch=None)) | |
# Need to remove symbols that are found in any DSOs from syms? Which will get used though? | |
# We first check for satisfaction from dylibs and then remove those from consideration during | |
# the second static lib pass. | |
dso_satisfied = set() | |
for linkage_type in ('dynamic', 'static'): | |
for lib_name, lib_exp in iteritems(all_lib_exports): | |
if any(glob2.fnmatch.fnmatch(lib_name, ext) for ext in ('*.so*', '*.dylib*', '*.dll')): | |
type = 'dynamic' | |
elif any(glob2.fnmatch.fnmatch(lib_name, ext) for ext in ('*.a', '*.lib')): | |
type = 'static' | |
else: | |
print("ERROR :: What type of file is this? seems to be a code file {}".format(lib_name)) | |
sys.exit(1) | |
if type != linkage_type: | |
continue | |
# As it stands, the code will find exports and imports from a DSO to itself. These are irrelevant. | |
full_lib_name = os.path.realpath(os.path.join(host_prefix, lib_name)) | |
full_path = os.path.realpath(path) | |
if full_lib_name == full_path: | |
continue | |
links_to_this_dylib = full_lib_name in needed | |
if type == 'dynamic' and not links_to_this_dylib: | |
continue | |
if len(lib_exp): | |
exports_copy = lib_exp.copy() | |
# Fetch this now so the static pass can filter out dynamically satisfied symbols. | |
old_records = vendoring_record[pkg_vendoring_key] if pkg_vendoring_key in vendoring_record \ | |
else dict({}) | |
if type == 'dynamic': | |
isect = exports_copy.intersection(imps) | |
perc_used = float(len(isect)) / float(len(exports_copy)) | |
if type == 'dynamic': | |
dso_satisfied.update(isect) | |
elif type == 'static': | |
# Remove symbols satisfied by DSOs (imported_syms) | |
isect = exports_copy.intersection(defined_syms - dso_satisfied) | |
perc_used = float(len(isect)) / float(len(exports_copy)) | |
if perc_used > 0.0: | |
pkg_vendored = prefix_owners[lib_name][0] | |
if any([glob2.fnmatch.fnmatch(pkg_vendored.quad[0], i) for i in ignore_for_statics]): | |
continue | |
# We never report self-vendoring as a warning or an error. | |
vendoring = 'VENDORING' | |
if pkg_vendored.quad[0] and not isinstance(pkg_vendored, FakeDist): | |
static_prelude = err_prelude if error_static_linking else warn_prelude | |
else: | |
# Only ever informational. | |
static_prelude = info_prelude | |
if isinstance(pkg_vendored, FakeDist): | |
vendoring = 'SELFVENDORING' | |
vendoring_details = dict({'from_file': lib_name, | |
'from_name': pkg_vendored.quad[0], | |
'from_version': pkg_vendored.version, | |
'from_build_number': pkg_vendored.build_number, | |
'from_build_string': pkg_vendored.build_string, | |
'total_symbols': len(exports_copy), | |
'used_symbols': sorted(list(isect)), | |
}) | |
if f in old_records: | |
old_records[f].append(dict({type: vendoring_details})) | |
else: | |
old_records[f] = [dict({type: vendoring_details})] | |
vendoring_record[pkg_vendoring_key] = old_records | |
from_pkg = '-'.join([pkg_vendored.quad[0], pkg_vendored.version, pkg_vendored.build_string]) | |
print_msg(errors, "{pre}: {type}: {perc:.2%} of {total} entry-point symbols from {ln} used: {l}".format( | |
pre=static_prelude if type == 'static' else info_prelude, | |
type=vendoring if type == 'static' else 'IMPORTING', | |
total=vendoring_details['total_symbols'], | |
perc=perc_used, | |
ln=lib_name + ' in ' + from_pkg, | |
l=sorted(list(isect))[:10])) | |
for needed_dso in needed: | |
if needed_dso.startswith(host_prefix): | |
in_prefix_dso = os.path.normpath(needed_dso.replace(host_prefix + '/', '')) | |
n_dso_p = "Needed DSO {}".format(in_prefix_dso) | |
and_also = " (and also in this package)" if in_prefix_dso in files else "" | |
# Filter out the package itself here. | |
pkgs = [p for p in prefix_owners[in_prefix_dso] if not isinstance(p, FakeDist)] | |
in_pkgs_in_run_reqs = [pkg.quad[0] for pkg in pkgs if pkg.quad[0] in run_reqs] | |
if len(in_pkgs_in_run_reqs) == 1 and in_pkgs_in_run_reqs[0]: | |
if in_pkgs_in_run_reqs[0] in usage_of_run_req: | |
usage_of_run_req[in_pkgs_in_run_reqs[0]].append(f) | |
else: | |
usage_of_run_req[in_pkgs_in_run_reqs[0]] = [f] | |
in_whitelist = any([glob2.fnmatch.fnmatch(in_prefix_dso, w) for w in whitelist]) | |
if in_whitelist: | |
print_msg(errors, '{}: {} found in the whitelist'. | |
format(info_prelude, n_dso_p)) | |
elif len(in_pkgs_in_run_reqs) == 1 and verbose: | |
print_msg(errors, '{}: {} found in {}{}'.format(info_prelude, | |
n_dso_p, | |
in_pkgs_in_run_reqs[0], | |
and_also)) | |
elif len(in_pkgs_in_run_reqs) == 0 and len(pkgs) > 0: | |
print_msg(errors, '{}: {} found in {}{}'.format(msg_prelude, | |
n_dso_p, | |
[p.quad[0] for p in pkgs], | |
and_also)) | |
print_msg(errors, '{}: .. but {} not in reqs/run, i.e. it is overlinked' | |
' (likely) or a missing dependency (less likely)'. | |
format(msg_prelude, [p.quad[0] for p in pkgs])) | |
elif len(in_pkgs_in_run_reqs) > 1: | |
print_msg(errors, '{}: {} found in multiple packages in run/reqs: {}{}' | |
.format(warn_prelude, | |
in_prefix_dso, | |
[p.quad[0] for p in in_pkgs_in_run_reqs], | |
and_also)) | |
else: | |
if in_prefix_dso not in files: | |
print_msg(errors, '{}: {} not found in any packages'.format(msg_prelude, | |
in_prefix_dso)) | |
elif verbose: | |
print_msg(errors, '{}: {} found in this package'.format(info_prelude, | |
in_prefix_dso)) | |
elif needed_dso.startswith(build_prefix): | |
print_msg(errors, "ERROR: {} found in build prefix; should never happen".format( | |
needed_dso)) | |
else: | |
# A system or ignored dependency. We should be able to find it in one of the CDT o | |
# compiler packages on linux or at in a sysroot folder on other OSes. These usually | |
# start with '$RPATH/' which indicates pyldd did not find them, so remove that now. | |
if needed_dso.startswith('$RPATH/'): | |
needed_dso = needed_dso.replace('$RPATH/', '') | |
in_whitelist = any([glob2.fnmatch.fnmatch(needed_dso, w) for w in whitelist]) | |
if in_whitelist: | |
n_dso_p = "Needed DSO {}".format(needed_dso) | |
print_msg(errors, '{}: {} found in the whitelist'. | |
format(info_prelude, n_dso_p)) | |
elif verbose and len(sysroots): | |
# Check id we have a CDT package. | |
dso_fname = os.path.basename(needed_dso) | |
sysroot_files = [] | |
for sysroot in sysroots: | |
sysroot_files.extend(glob(os.path.join(sysroot, '**', dso_fname))) | |
if len(sysroot_files): | |
# Removing sysroot_prefix is only *really* for Linux, though we could | |
# use CONDA_BUILD_SYSROOT for macOS. We should figure out what to do about | |
# /opt/X11 too. | |
# Find the longest suffix match. | |
rev_needed_dso = needed_dso[::-1] | |
match_lens = [len(os.path.commonprefix([s[::-1], rev_needed_dso])) | |
for s in sysroot_files] | |
idx = max(range(len(match_lens)), key=match_lens.__getitem__) | |
in_prefix_dso = os.path.normpath(sysroot_files[idx].replace( | |
sysroot_prefix + '/', '')) | |
n_dso_p = "Needed DSO {}".format(in_prefix_dso) | |
pkgs = list(which_package(in_prefix_dso, sysroot_prefix)) | |
if len(pkgs): | |
print_msg(errors, '{}: {} found in CDT/compiler package {}'. | |
format(info_prelude, n_dso_p, pkgs[0])) | |
else: | |
print_msg(errors, '{}: {} not found in any CDT/compiler package,' | |
' nor the whitelist?!'. | |
format(msg_prelude, n_dso_p)) | |
else: | |
print_msg(errors, "{}: {} not found in sysroot, is this binary repackaging?" | |
" .. do you need to use install_name_tool/patchelf?". | |
format(msg_prelude, needed_dso)) | |
else: | |
print_msg(errors, "{}: did not find - or even know where to look for: {}". | |
format(msg_prelude, needed_dso)) | |
if pkg_vendoring_key in vendoring_record: | |
imports = vendoring_record[pkg_vendoring_key] | |
return imports | |
if len(errors): | |
sys.exit(1) | |
def augment_extracted_package(package_dir, prefixes_dir): | |
print("Augmenting .. ") | |
files = join(package_dir, 'info', 'files') | |
if exists(files): | |
with open(files) as fh: | |
files = fh.read().splitlines() | |
else: | |
files = utils.prefix_files(dir) | |
myt = join(package_dir, 'info', 'recipe', 'meta.yaml') | |
if not exists(myt): | |
logging.warning("{} does not exist".format(myt)) | |
return | |
with open(myt) as fh: | |
m = yaml.safe_load(fh) | |
build_deps = m['requirements']['build'] | |
host_deps = m['requirements']['host'] | |
package = m['package'] | |
build = m['build'] | |
new_host_deps = host_deps.copy() | |
name = package.get('name', "unknown_package") | |
version = package.get('version', "0") | |
string = build.get('string', "0") | |
number = build.get('number', 0) | |
missing_dso_whitelist = build.get('missing_dso_whitelist', []) | |
host_deps.append('{} {} {}'.format(name, version, string)) | |
hash_host_deps = hash(frozenset(host_deps)) | |
hash_build_deps = hash(frozenset(build_deps)) | |
host_prefix = join(prefixes_dir, format(abs(hash_host_deps), 'x')) | |
build_prefix = join(prefixes_dir, format(abs(hash_build_deps), 'x')) | |
try: | |
makedirs(host_prefix) | |
makedirs(build_prefix) | |
except: | |
pass | |
class Config(object): | |
def __init__(self): | |
self.debug = True | |
self.timeout = 90 | |
self.debug = True | |
self.verbose = True | |
self.locking = True | |
self.bldpkgs_dir = set('/tmp') # '/opt/conda/conda-bld/osx-64' '/opt/conda/conda-bld/noarch' | |
self.max_env_retry = 3 | |
self.output_folder = '/tmp' | |
self.disable_pip = True | |
# TODO :: msys2 for Windows subdirs | |
# TODO :: Detect subdir = what about noarch? Skip 'em. | |
self.channel_urls = ('r', 'https://repo.anaconda.com/pkgs/main') | |
config = Config() | |
for env, prefix, deps in (('build', build_prefix, build_deps), | |
('host', host_prefix, host_deps)): | |
actions = environ.get_install_actions(prefix, | |
tuple(deps), env, | |
subdir=conda_subdir, | |
debug=config.debug, | |
verbose=config.verbose, | |
locking=config.locking, | |
bldpkgs_dirs=config.bldpkgs_dir, | |
timeout=config.timeout, | |
disable_pip=config.disable_pip, | |
max_env_retry=config.max_env_retry, | |
output_folder=config.output_folder, | |
channel_urls=config.channel_urls) | |
environ.create_env(prefix, actions, env=env, config=config, | |
subdir=conda_subdir, is_cross=True, is_conda=False) | |
return check_overlinking(name, | |
version, | |
string, | |
number, | |
build.get('ignore_run_exports', []), | |
host_deps, | |
build_deps, | |
host_prefix, | |
build_prefix, | |
missing_dso_whitelist, | |
files) | |
def augment_packages(outdir, packages): | |
tmpdir_package = tempfile.mkdtemp() | |
tmpdir_prefixes = tempfile.mkdtemp() | |
tmp_archives = [] | |
for package in packages: | |
out_package = package | |
for ext2 in CONDA_TARBALL_EXTENSIONS: | |
if out_package.endswith(ext2): | |
out_package = out_package[:-len(ext2)] | |
break | |
out_package = join(outdir, os.path.basename(out_package)) | |
tar_xf(package, tmpdir_package) | |
imports = augment_extracted_package(tmpdir_package, tmpdir_prefixes) | |
with open(os.path.join(tmpdir_package, 'info', "imports.json"), 'w') as fh: | |
fh.write(json.dumps(imports, sort_keys=True, indent=2, separators=(',', ': '))) | |
files = utils.prefix_files(tmpdir_package) | |
def order(f): | |
# we don't care about empty files so send them back via 100000 | |
fsize = os.stat(join(tmpdir_package, f)).st_size or 100000 | |
# info/* records will be False == 0, others will be 1. | |
info_order = int(os.path.dirname(f) != 'info') | |
if info_order: | |
_, ext = os.path.splitext(f) | |
# Strip any .dylib.* and .so.* and rename .dylib to .so | |
ext = re.sub(r'(\.dylib|\.so).*$', r'.so', ext) | |
if not ext: | |
# Files without extensions should be sorted by dirname | |
info_order = 1 + hash(os.path.dirname(f)) % (10 ** 8) | |
else: | |
info_order = 1 + abs(hash(ext)) % (10 ** 8) | |
return info_order, fsize | |
binsort = os.path.join(sys.prefix, 'bin', 'binsort') | |
if os.path.exists(binsort): | |
with NamedTemporaryFile(mode='w', suffix='.filelist', delete=False) as fl: | |
with tmp_chdir(tmpdir_package): | |
fl.writelines(map(lambda x: '.' + os.sep + x + '\n', files)) | |
fl.close() | |
cmd = binsort + ' -t 1 -q -d -o 1000 {}'.format(fl.name) | |
out, _ = subprocess.Popen(cmd, shell=True, | |
stdout=subprocess.PIPE).communicate() | |
files_list = out.decode('utf-8').strip().split('\n') | |
# binsort returns the absolute paths. | |
files_list = [f.split(tmpdir_package + os.sep, 1)[-1] | |
for f in files_list] | |
os.unlink(fl.name) | |
else: | |
files_list = list(f for f in sorted(files, key=order)) | |
with tmp_chdir(tmpdir_package): | |
for (ext, filter, opts) in (('.tar.bz2', 'bzip2', ''), ('.tar.zst', 'zstd', 'zstd:compression-level=22')): | |
if ext not in CONDA_TARBALL_EXTENSIONS: | |
continue | |
out_tar = out_package + ext | |
with libarchive.file_writer(out_tar, 'gnutar', filter_name=filter, options=opts) as archive: | |
archive.add_files(*files_list) | |
tmp_archives.append(out_tar) | |
print('\n'.join(tmp_archives)) | |
shutil.rmtree(tmpdir_package) | |
def cli(): | |
parser = argparse.ArgumentParser(description='Augments a package with Enterprise-specific infomation.') | |
parser.add_argument('packages', metavar='PACKAGE', type=str, nargs='+', | |
help='packages to convert') | |
parser.add_argument('--outdir', nargs='?', dest='outdir', default = getcwd(), | |
help='sum the integers (default: find the max)') | |
args = parser.parse_args() | |
print(args.packages) | |
print(args.outdir) | |
augment_packages(args.outdir, args.packages) | |
# No return value means no error. | |
# Return a value of 1 or higher to signify an error. | |
# See https://docs.python.org/3/library/sys.html#sys.exit |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment