fix ci/pypi (#30)

This commit is contained in:
Scott Lessans
2025-08-05 17:44:04 -07:00
committed by GitHub
parent d8db548846
commit f1774c5110
3 changed files with 146 additions and 8 deletions

View File

@@ -152,7 +152,7 @@ If you want to modify the code or try the metal implementation set the project u
```shell ```shell
git clone https://github.com/openai/gpt-oss.git git clone https://github.com/openai/gpt-oss.git
pip install -e ".[metal]" GPTOSS_BUILD_METAL=1 pip install -e ".[metal]"
``` ```
## Download the model ## Download the model
@@ -228,6 +228,7 @@ python gpt_oss/metal/scripts/create-local-model.py -s <model_dir> -d <output_fil
``` ```
Or downloaded the pre-converted weight: Or downloaded the pre-converted weight:
```shell ```shell
huggingface-cli download openai/gpt-oss-120b --include "metal/*" --local-dir gpt-oss-120b/metal/ huggingface-cli download openai/gpt-oss-120b --include "metal/*" --local-dir gpt-oss-120b/metal/
huggingface-cli download openai/gpt-oss-20b --include "metal/*" --local-dir gpt-oss-20b/metal/ huggingface-cli download openai/gpt-oss-20b --include "metal/*" --local-dir gpt-oss-20b/metal/

View File

@@ -0,0 +1,140 @@
"""
Build backend for gpt-oss that supports two modes:
1) Default (pure wheel for PyPI)
- Delegates to setuptools.build_meta.
- Produces a py3-none-any wheel so PyPI accepts it (no linux_x86_64 tag).
2) Optional Metal/C extension build (local only)
- If the environment variable GPTOSS_BUILD_METAL is set to a truthy value
(1/true/on/yes), delegates to scikit_build_core.build.
- Dynamically injects build requirements (scikit-build-core, cmake, ninja,
pybind11) only for this mode.
Why this is needed
- PyPI rejects Linux wheels tagged linux_x86_64; manylinux/musllinux is required
for binary wheels. We ship a pure wheel by default, but still allow developers
to build/install the native Metal backend locally when needed.
Typical usage
- Publish pure wheel: `python -m build` (do not set GPTOSS_BUILD_METAL).
- Local Metal dev: `GPTOSS_BUILD_METAL=1 pip install -e ".[metal]"`.
- CI: keep GPTOSS_BUILD_METAL unset for releases; set it in internal jobs that
exercise the extension.
Notes
- The base package remains importable without the extension. The Metal backend
is only used when `gpt_oss.metal` is explicitly imported.
- This file is discovered via `backend-path = ["_build"]` and
`build-backend = "gpt_oss_build_backend.backend"` in pyproject.toml.
"""
import os
from importlib import import_module
from typing import Any, Mapping, Sequence
TRUE_VALUES = {"1", "true", "TRUE", "on", "ON", "yes", "YES"}
def _use_metal_backend() -> bool:
return str(os.environ.get("GPTOSS_BUILD_METAL", "")).strip() in TRUE_VALUES
def _setuptools_backend():
from setuptools import build_meta as _bm # type: ignore
return _bm
def _scikit_build_backend():
return import_module("scikit_build_core.build")
def _backend():
return _scikit_build_backend() if _use_metal_backend() else _setuptools_backend()
# Required PEP 517 hooks
def build_wheel(
wheel_directory: str,
config_settings: Mapping[str, Any] | None = None,
metadata_directory: str | None = None,
) -> str:
return _backend().build_wheel(wheel_directory, config_settings, metadata_directory)
def build_sdist(
sdist_directory: str, config_settings: Mapping[str, Any] | None = None
) -> str:
return _backend().build_sdist(sdist_directory, config_settings)
def prepare_metadata_for_build_wheel(
metadata_directory: str, config_settings: Mapping[str, Any] | None = None
) -> str:
# Fallback if backend doesn't implement it
be = _backend()
fn = getattr(be, "prepare_metadata_for_build_wheel", None)
if fn is None:
# setuptools exposes it; scikit-build-core may not. Defer to building a wheel for metadata.
return _setuptools_backend().prepare_metadata_for_build_wheel(
metadata_directory, config_settings
)
return fn(metadata_directory, config_settings)
# Optional hooks
def build_editable(
editable_directory: str, config_settings: Mapping[str, Any] | None = None
) -> str:
be = _backend()
fn = getattr(be, "build_editable", None)
if fn is None:
# setuptools implements build_editable; if not available, raise the standard error
raise RuntimeError("Editable installs not supported by the selected backend")
return fn(editable_directory, config_settings)
def get_requires_for_build_wheel(
config_settings: Mapping[str, Any] | None = None,
) -> Sequence[str]:
if _use_metal_backend():
# Add dynamic build requirements only when building the Metal backend
return [
"scikit-build-core>=0.10",
"pybind11>=2.12",
"cmake>=3.26",
"ninja",
]
# setuptools usually returns []
return list(_setuptools_backend().get_requires_for_build_wheel(config_settings))
def get_requires_for_build_sdist(
config_settings: Mapping[str, Any] | None = None,
) -> Sequence[str]:
# No special requirements for SDist
be = _backend()
fn = getattr(be, "get_requires_for_build_sdist", None)
if fn is None:
return []
return list(fn(config_settings))
def get_requires_for_build_editable(
config_settings: Mapping[str, Any] | None = None,
) -> Sequence[str]:
if _use_metal_backend():
return [
"scikit-build-core>=0.10",
"pybind11>=2.12",
"cmake>=3.26",
"ninja",
]
be = _setuptools_backend()
fn = getattr(be, "get_requires_for_build_editable", None)
if fn is None:
return []
return list(fn(config_settings))

View File

@@ -23,19 +23,16 @@ requires-python = ">=3.12,<3.13"
version = "0.0.1" version = "0.0.1"
[project.optional-dependencies] [project.optional-dependencies]
triton = [ triton = ["triton", "safetensors>=0.5.3", "torch>=2.7.0"]
"triton",
"safetensors>=0.5.3",
"torch>=2.7.0",
]
torch = ["safetensors>=0.5.3", "torch>=2.7.0"] torch = ["safetensors>=0.5.3", "torch>=2.7.0"]
metal = ["numpy", "tqdm", "safetensors", "torch"] metal = ["numpy", "tqdm", "safetensors", "torch"]
test = ["pytest>=8.4.1", "httpx>=0.28.1"] test = ["pytest>=8.4.1", "httpx>=0.28.1"]
eval = ["pandas", "numpy", "openai", "jinja2", "tqdm", "blobfile"] eval = ["pandas", "numpy", "openai", "jinja2", "tqdm", "blobfile"]
[build-system] [build-system]
requires = ["scikit-build-core>=0.9", "pybind11>=2.12", "cmake>=3.26", "ninja"] requires = ["setuptools>=68"]
build-backend = "scikit_build_core.build" build-backend = "gpt_oss_build_backend.backend"
backend-path = ["_build"]
[tool.setuptools] [tool.setuptools]
packages = ["gpt_oss"] packages = ["gpt_oss"]