treewide: remove openai-triton (alias) references

The repository moved out of the openai org, so it doesn't make sense to
prefix the package with it.

(cherry picked from commit af13bb4513647eec3c3790c5272dbd4aa190d208)
This commit is contained in:
Dennis Wuitz 2024-07-18 22:24:12 +02:00 committed by Someone Serge
parent 1b23dc7dc2
commit 4542cc7e33
13 changed files with 31 additions and 31 deletions

View File

@ -36,7 +36,7 @@ let
llvmTargetsToBuild' = [ "AMDGPU" "NVPTX" ] ++ builtins.map inferNativeTarget llvmTargetsToBuild;
# This LLVM version can't seem to find pygments/pyyaml,
# but a later update will likely fix this (openai-triton-2.1.0)
# but a later update will likely fix this (triton-2.1.0)
python =
if buildTests
then python3Packages.python.withPackages (p: with p; [ psutil pygments pyyaml ])
@ -44,7 +44,7 @@ let
isNative = stdenv.hostPlatform == stdenv.buildPlatform;
in stdenv.mkDerivation (finalAttrs: {
pname = "openai-triton-llvm";
pname = "triton-llvm";
version = "17.0.0-c5dede880d17";
outputs = [
@ -55,7 +55,7 @@ in stdenv.mkDerivation (finalAttrs: {
"man"
];
# See https://github.com/openai/triton/blob/main/python/setup.py
# See https://github.com/triton-lang/triton/blob/main/python/setup.py
# and https://github.com/ptillet/triton-llvm-releases/releases
src = fetchFromGitHub {
owner = "llvm";

View File

@ -15,7 +15,7 @@
more-itertools,
numba,
numpy,
openai-triton,
triton,
tiktoken,
torch,
tqdm,
@ -53,7 +53,7 @@ buildPythonPackage rec {
tiktoken
torch
tqdm
] ++ lib.optionals (lib.meta.availableOn stdenv.hostPlatform openai-triton) [ openai-triton ];
] ++ lib.optionals (lib.meta.availableOn stdenv.hostPlatform triton) [ triton ];
preCheck = ''
export HOME=$TMPDIR

View File

@ -40,7 +40,7 @@
tabulate,
tiktoken,
transformers,
openai-triton,
triton,
xformers,
}:
@ -117,7 +117,7 @@ buildPythonPackage rec {
# auto-gptq
]; # ++ autogptq.optional-dependencies.triton;
grpc = [ bentoml ] ++ bentoml.optional-dependencies.grpc;
mpt = [ openai-triton ];
mpt = [ triton ];
openai = [
openai
tiktoken

View File

@ -22,7 +22,7 @@
jinja2,
networkx,
filelock,
openai-triton,
triton,
}:
let
@ -88,7 +88,7 @@ buildPythonPackage {
jinja2
networkx
filelock
] ++ lib.optionals (stdenv.isLinux && stdenv.isx86_64) [ openai-triton ];
] ++ lib.optionals (stdenv.isLinux && stdenv.isx86_64) [ triton ];
postInstall = ''
# ONNX conversion

View File

@ -53,9 +53,9 @@
cffi,
click,
typing-extensions,
# ROCm build and `torch.compile` requires `openai-triton`
# ROCm build and `torch.compile` requires `triton`
tritonSupport ? (!stdenv.isDarwin),
openai-triton,
triton,
# Unit tests
hypothesis,
@ -486,7 +486,7 @@ buildPythonPackage rec {
CoreServices
libobjc
]
++ lib.optionals tritonSupport [ openai-triton ]
++ lib.optionals tritonSupport [ triton ]
++ lib.optionals MPISupport [ mpi ]
++ lib.optionals rocmSupport [ rocmtoolkit_joined ];
@ -514,7 +514,7 @@ buildPythonPackage rec {
# torch/csrc requires `pybind11` at runtime
pybind11
] ++ lib.optionals tritonSupport [ openai-triton ];
] ++ lib.optionals tritonSupport [ triton ];
propagatedCxxBuildInputs =
[ ] ++ lib.optionals MPISupport [ mpi ] ++ lib.optionals rocmSupport [ rocmtoolkit_joined ];

View File

@ -80,12 +80,12 @@ buildPythonPackage rec {
meta = with lib; {
description = "Language and compiler for custom Deep Learning operations";
homepage = "https://github.com/openai/triton/";
changelog = "https://github.com/openai/triton/releases/tag/v${version}";
homepage = "https://github.com/triton-lang/triton/";
changelog = "https://github.com/triton-lang/triton/releases/tag/v${version}";
# Includes NVIDIA's ptxas, but redistributions of the binary are not limited.
# https://docs.nvidia.com/cuda/eula/index.html
# triton's license is MIT.
# openai-triton-bin includes ptxas binary, therefore unfreeRedistributable is set.
# triton-bin includes ptxas binary, therefore unfreeRedistributable is set.
license = with licenses; [
unfreeRedistributable
mit

View File

@ -53,7 +53,7 @@ buildPythonPackage rec {
./0000-dont-download-ptxas.patch
]
++ lib.optionals (!cudaSupport) [
# openai-triton wants to get ptxas version even if ptxas is not
# triton wants to get ptxas version even if ptxas is not
# used, resulting in ptxas not found error.
./0001-ptxas-disable-version-key-for-non-cuda-targets.patch
];
@ -127,7 +127,7 @@ buildPythonPackage rec {
propagatedBuildInputs = [
filelock
# openai-triton uses setuptools at runtime:
# triton uses setuptools at runtime:
# https://github.com/NixOS/nixpkgs/pull/286763/#discussion_r1480392652
setuptools
];

View File

@ -18,7 +18,7 @@
fairscale,
scipy,
cmake,
openai-triton,
triton,
networkx,
#, apex
einops,
@ -103,7 +103,7 @@ buildPythonPackage {
scipy
cmake
networkx
openai-triton
triton
# apex
einops
transformers

View File

@ -67,7 +67,7 @@ in stdenv.mkDerivation (finalAttrs: {
openblas
] ++ lib.optionals buildBenchmarks [
clblast
python3Packages.openai-triton
python3Packages.triton
];
cmakeFlags = [

View File

@ -1026,7 +1026,6 @@ mapAliases ({
onevpl-intel-gpu = lib.warn "onevpl-intel-gpu has been renamed to vpl-gpu-rt" vpl-gpu-rt; # Added 2024-06-04
opa = throw "opa has been removed from nixpkgs as upstream has abandoned the project"; # Added 2023-03-21
opam_1_2 = throw "'opam_1_2' has been renamed to/replaced by 'opam'"; # Added 2023-03-08
openai-triton-llvm = triton-llvm;
openafs_1_8 = openafs; # Added 2022-08-22
openapi-generator-cli-unstable = throw "openapi-generator-cli-unstable was removed as it was not being updated; consider openapi-generator-cli instead"; # Added 2024-01-02
openbangla-keyboard = throw "openbangla-keyboard has been replaced by ibus-engines.openbangla-keyboard and fcitx5-openbangla-keyboard"; # added 2023-10-10
@ -1400,6 +1399,7 @@ mapAliases ({
transfig = fig2dev; # Added 2022-02-15
transifex-client = transifex-cli; # Added 2023-12-29
trezor_agent = trezor-agent; # Added 2024-01-07
openai-triton-llvm = triton-llvm; # added 2024-07-18
trustedGrub = throw "trustedGrub has been removed, because it is not maintained upstream anymore"; # Added 2023-05-10
trustedGrub-for-HP = throw "trustedGrub-for-HP has been removed, because it is not maintained upstream anymore"; # Added 2023-05-10
tumpa = throw "tumpa has been removed, as it is broken"; # Added 2024-07-15

View File

@ -339,10 +339,10 @@ mapAliases ({
notifymuch = throw "notifymuch has been promoted to a top-level attribute name: `pkgs.notifymuch`"; # added 2022-10-02
Nuitka = nuitka; # added 2023-02-19
ntlm-auth = throw "ntlm-auth has been removed, because it relies on the md4 implementation provided by openssl. Use pyspnego instead.";
openai-triton = triton;
openai-triton-cuda = triton-cuda;
openai-triton-no-cuda = triton-no-cuda;
openai-triton-bin = triton-bin;
openai-triton = triton; # added 2024-07-18
openai-triton-bin = triton-bin; # added 2024-07-18
openai-triton-cuda = triton-cuda; # added 2024-07-18
openai-triton-no-cuda = triton-no-cuda; # added 2024-07-18
openapi-schema-pydantic = throw "openapi-schema-pydantic has been removed, since it is no longer maintained"; # added 2023-10-30
opencv3 = throw "opencv3 has been removed as it is obsolete"; # added 2023-10-12
opsdroid_get_image_size = opsdroid-get-image-size; # added 2023-10-16

View File

@ -4572,7 +4572,7 @@ self: super: with self; {
oelint-parser = callPackage ../development/python-modules/oelint-parser { };
openllm = callPackage ../development/python-modules/openllm {
openai-triton = self.openai-triton-cuda;
triton = self.triton-cuda;
};
openllm-client = callPackage ../development/python-modules/openllm-client { };
@ -15566,13 +15566,13 @@ self: super: with self; {
torch-pitch-shift = callPackage ../development/python-modules/torch-pitch-shift { };
torch-bin = callPackage ../development/python-modules/torch/bin.nix {
openai-triton = self.openai-triton-bin;
triton = self.triton-bin;
};
torchsnapshot = callPackage ../development/python-modules/torchsnapshot { };
torchWithCuda = self.torch.override {
openai-triton = self.openai-triton-cuda;
triton = self.triton-cuda;
cudaSupport = true;
rocmSupport = false;
};
@ -15582,7 +15582,7 @@ self: super: with self; {
};
torchWithRocm = self.torch.override {
openai-triton = self.openai-triton-no-cuda;
triton = self.triton-no-cuda;
rocmSupport = true;
cudaSupport = false;
};

View File

@ -132,7 +132,7 @@ let
mxnet = linux;
numpy = linux; # Only affected by MKL?
onnx = linux;
openai-triton = linux;
triton = linux;
openai-whisper = linux;
opencv4 = linux;
opensfm = linux;