Skip to content
Open
Show file tree
Hide file tree
Changes from 6 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
88 changes: 71 additions & 17 deletions conda_smithy/configure_feedstock.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import conda_build.utils
import conda_build.variants
from conda_build import __version__ as conda_build_version
from conda_build.metadata import MetaData
from jinja2 import FileSystemLoader
from jinja2.sandbox import SandboxedEnvironment

Expand Down Expand Up @@ -89,6 +90,24 @@
os.environ.get("CONDA_FORGE_PINNING_LIFETIME", 15 * 60)
)

ALWAYS_KEEP_KEYS = {
"zip_keys",
"pin_run_as_build",
"MACOSX_DEPLOYMENT_TARGET",
"MACOSX_SDK_VERSION",
"macos_min_version",
"macos_machine",
"channel_sources",
"channel_targets",
"docker_image",
"build_number_decrement",
# The following keys are required for some of our aarch64 builds
# Added in https://github.com/conda-forge/conda-forge-pinning-feedstock/pull/180
"cdt_arch",
"cdt_name",
"BUILD",
}


# use lru_cache to avoid repeating warnings endlessly;
# this keeps track of 10 different messages and then warns again
Expand Down Expand Up @@ -601,23 +620,7 @@ def _collapse_subpackage_variants(
)

# Add in some variables that should always be preserved
always_keep_keys = {
"zip_keys",
"pin_run_as_build",
"MACOSX_DEPLOYMENT_TARGET",
"MACOSX_SDK_VERSION",
"macos_min_version",
"macos_machine",
"channel_sources",
"channel_targets",
"docker_image",
"build_number_decrement",
# The following keys are required for some of our aarch64 builds
# Added in https://github.com/conda-forge/conda-forge-pinning-feedstock/pull/180
"cdt_arch",
"cdt_name",
"BUILD",
}
always_keep_keys = set() | ALWAYS_KEEP_KEYS

if not is_noarch:
always_keep_keys.add("target_platform")
Expand Down Expand Up @@ -878,6 +881,49 @@ def migrate_combined_spec(combined_spec, forge_dir, config, forge_config):
return combined_spec


def reduce_variants(recipe_path, config, input_variants):
"""Subset of render_recipe to compute reduced variant matrix

large numbers of unused variants greatly increase render time
"""

# from render_recipe
with conda_build.render.open_recipe(recipe_path) as recipe:
metadata = MetaData(str(recipe), config=config)

# from distribute_variants
# explode variants dict to list of variants
variants = initial_variants = conda_build.variants.get_package_variants(
metadata, variants=input_variants
)
logger.debug(f"Starting with {len(initial_variants)} input variants")
metadata.config.variant = variants[0]
metadata.config.variants = variants

# force_global finds variables in the whole recipe, not just a single output.
# Without this, dependencies won't be found for multi-output recipes
# This may not be comprehensive!
all_used = metadata.get_used_vars(force_global=True)
# trim unused dimensions, these cost a lot in render_recipe!
# because `get_used_vars` above _may_ not catch all possible used variables,
# only trim unused variables that increase dimensionality.
all_used.update(ALWAYS_KEEP_KEYS)
all_used.update(
{"target_platform", "extend_keys", "ignore_build_only_deps"}
)
new_input_variants = input_variants.copy()
for key, value in input_variants.items():
# only consider keys that increase render dimensionality for trimming at this stage
# so we don't have to trust all_used to find _everything_
if len(value) > 1 and key not in all_used:
logger.debug(f"Trimming unused dimension: {key}")
new_input_variants.pop(key)
_trim_unused_zip_keys(new_input_variants)
# new_variants = metadata.get_reduced_variant_set(all_used)
# logger.info(f"Rendering with {len(new_variants)} input variants")
return new_input_variants


def _conda_build_api_render_for_smithy(
recipe_path,
config=None,
Expand Down Expand Up @@ -911,6 +957,12 @@ def _conda_build_api_render_for_smithy(

config = get_or_merge_config(config, **kwargs)

# reduce unused variants first, they get very expensive in render_recipe
if variants:
variants = reduce_variants(
recipe_path, config=config, input_variants=variants
)

metadata_tuples = render_recipe(
recipe_path,
bypass_env_check=bypass_env_check,
Expand All @@ -920,6 +972,7 @@ def _conda_build_api_render_for_smithy(
permit_unsatisfiable_variants=permit_unsatisfiable_variants,
)
output_metas = []
# reduce input variant set to those that are actually used
for meta, download, render_in_env in metadata_tuples:
if not meta.skip() or not config.trim_skip:
for od, om in meta.get_output_metadata_set(
Expand Down Expand Up @@ -1061,6 +1114,7 @@ def _render_ci_provider(
# CBC yaml files where variants in the migrators are not in the CBC.
# Thus we move it out of the way.
# TODO: upstream this as a flag in conda-build
logger.info(f"Getting variants for {platform}-{arch}")
try:
_recipe_cbc = os.path.join(
forge_dir,
Expand Down
3 changes: 3 additions & 0 deletions news/1968_input_variants.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
**Fixed:**

* Improved render time for large numbers of variants