build with docker and improve CUDA compute capability support (#714)

This commit is contained in:
ReenigneArcher 2023-01-15 09:31:37 -05:00 committed by GitHub
parent 9fe539f87d
commit 3510b8636a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
16 changed files with 1456 additions and 206 deletions

View File

@ -1 +0,0 @@
linux/amd64,linux/arm64/v8

View File

@ -6,7 +6,6 @@
# ignore repo directories and files
docs/
packaging/
scripts/
tools/
crowdin.yml
@ -14,3 +13,6 @@ crowdin.yml
# ignore dev directories
build/
venv/
# ignore artifacts
artifacts/

View File

@ -363,12 +363,6 @@ jobs:
fail-fast: false # false to test all, true to fail entire job if any fail
matrix:
include: # package these differently
- type: cpack
EXTRA_ARGS: ''
dist: 20.04
- type: cpack
EXTRA_ARGS: ''
dist: 22.04
- type: appimage
EXTRA_ARGS: '-DSUNSHINE_CONFIGURE_APPIMAGE=ON'
dist: 20.04
@ -430,6 +424,7 @@ jobs:
libcurl4-openssl-dev \
libdrm-dev \
libevdev-dev \
libmfx-dev \
libnuma-dev \
libopus-dev \
libpulse-dev \
@ -459,9 +454,9 @@ jobs:
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
# Install CuDA
# Install CUDA
sudo wget \
https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux.run \
https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run \
--progress=bar:force:noscroll -q --show-progress -O /root/cuda.run
sudo chmod a+x /root/cuda.run
sudo /root/cuda.run --silent --toolkit --toolkitpath=/usr --no-opengl-libs --no-man-page --no-drm

View File

@ -3,10 +3,18 @@
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
# the above-mentioned repo.
# This workflow is intended to work with all our organization Docker projects. Docker platforms/architectures should be
# listed in a file named `.docker_platforms`, comma separated list with no spaces. A readme named `DOCKER_README.md`
# This workflow is intended to work with all our organization Docker projects. A readme named `DOCKER_README.md`
# will be used to update the description on Docker hub.
# custom comments in dockerfiles:
# `# platforms: `
# Comma separated list of platforms, i.e. `# platforms: linux/386,linux/amd64`. Docker platforms can alternatively
# be listed in a file named `.docker_platforms`.
# `# artifacts: `
# `true` to build in two steps, stopping at `artifacts` build stage and extracting the image from there to the
# GitHub runner.
name: CI Docker
on:
@ -22,56 +30,55 @@ concurrency:
cancel-in-progress: true
jobs:
check_dockerfile:
name: Check Dockerfile
check_dockerfiles:
name: Check Dockerfiles
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Check
id: check
- name: Find dockerfiles
id: find
run: |
if [ -f "./Dockerfile" ]
then
FOUND=true
else
FOUND=false
fi
dockerfiles=$(find . -type f -iname "Dockerfile" -o -iname "*.dockerfile")
echo "dockerfile=${FOUND}" >> $GITHUB_OUTPUT
echo "found dockerfiles: ${dockerfiles}"
# do not quote to keep this as a single line
echo dockerfiles=${dockerfiles} >> $GITHUB_OUTPUT
MATRIX_COMBINATIONS=""
for FILE in ${dockerfiles}; do
# extract tag from file name
tag=$(echo $FILE | sed -r -z -e 's/(\.\/)*.*\/(Dockerfile)/None/gm')
if [[ $tag == "None" ]]; then
MATRIX_COMBINATIONS="$MATRIX_COMBINATIONS {\"dockerfile\": \"$FILE\"},"
else
tag=$(echo $FILE | sed -r -z -e 's/(\.\/)*.*\/(.+)(\.dockerfile)/-\2/gm')
MATRIX_COMBINATIONS="$MATRIX_COMBINATIONS {\"dockerfile\": \"$FILE\", \"tag\": \"$tag\"},"
fi
done
# removes the last character (i.e. comma)
MATRIX_COMBINATIONS=${MATRIX_COMBINATIONS::-1}
# setup matrix for later jobs
matrix=$((
echo "{ \"include\": [$MATRIX_COMBINATIONS] }"
) | jq -c .)
echo $matrix
echo $matrix | jq .
echo "matrix=$matrix" >> $GITHUB_OUTPUT
outputs:
dockerfile: ${{ steps.check.outputs.dockerfile }}
lint_dockerfile:
name: Lint Dockerfile
needs: [check_dockerfile]
if: ${{ needs.check_dockerfile.outputs.dockerfile == 'true' }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Hadolint
id: hadolint
uses: hadolint/hadolint-action@v3.0.0
with:
dockerfile: ./Dockerfile
ignore: DL3008,DL3013,DL3016,DL3018,DL3028,DL3059
output-file: ./hadolint.log
verbose: true
- name: Log
if: failure()
run: |
echo "Hadolint outcome: ${{ steps.hadolint.outcome }}" >> $GITHUB_STEP_SUMMARY
cat "./hadolint.log" >> $GITHUB_STEP_SUMMARY
dockerfiles: ${{ steps.find.outputs.dockerfiles }}
matrix: ${{ steps.find.outputs.matrix }}
check_changelog:
name: Check Changelog
needs: [check_dockerfile]
if: ${{ needs.check_dockerfile.outputs.dockerfile == 'true' }}
needs: [check_dockerfiles]
if: ${{ needs.check_dockerfiles.outputs.dockerfiles }}
runs-on: ubuntu-latest
steps:
- name: Checkout
@ -87,15 +94,99 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
outputs:
next_version: ${{ steps.verify_changelog.outputs.changelog_parser_version }}
next_version_bare: ${{ steps.verify_changelog.outputs.changelog_parser_version_bare }}
last_version: ${{ steps.verify_changelog.outputs.latest_release_tag_name }}
release_body: ${{ steps.verify_changelog.outputs.changelog_parser_description }}
setup_release:
name: Setup Release
needs: check_changelog
runs-on: ubuntu-latest
steps:
- name: Set release details
id: release_details
env:
RELEASE_BODY: ${{ needs.check_changelog.outputs.release_body }}
run: |
# determine to create a release or not
if [[ $GITHUB_EVENT_NAME == "push" ]]; then
RELEASE=true
else
RELEASE=false
fi
# set the release tag
COMMIT=${{ github.sha }}
if [[ $GITHUB_REF == refs/heads/master ]]; then
TAG="${{ needs.check_changelog.outputs.next_version }}"
RELEASE_NAME="${{ needs.check_changelog.outputs.next_version }}"
RELEASE_BODY="$RELEASE_BODY"
PRE_RELEASE="false"
elif [[ $GITHUB_REF == refs/heads/nightly ]]; then
TAG="nightly-dev"
RELEASE_NAME="nightly"
RELEASE_BODY="automated nightly release - $(date -u +'%Y-%m-%dT%H:%M:%SZ') - ${COMMIT}"
PRE_RELEASE="true"
fi
echo "create_release=${RELEASE}" >> $GITHUB_OUTPUT
echo "release_tag=${TAG}" >> $GITHUB_OUTPUT
echo "release_commit=${COMMIT}" >> $GITHUB_OUTPUT
echo "release_name=${RELEASE_NAME}" >> $GITHUB_OUTPUT
echo "pre_release=${PRE_RELEASE}" >> $GITHUB_OUTPUT
# this is stupid but works for multiline strings
echo "RELEASE_BODY<<EOF" >> $GITHUB_ENV
echo "$RELEASE_BODY" >> $GITHUB_ENV
echo "EOF" >> $GITHUB_ENV
outputs:
create_release: ${{ steps.release_details.outputs.create_release }}
release_tag: ${{ steps.release_details.outputs.release_tag }}
release_commit: ${{ steps.release_details.outputs.release_commit }}
release_name: ${{ steps.release_details.outputs.release_name }}
release_body: ${{ env.RELEASE_BODY }}
pre_release: ${{ steps.release_details.outputs.pre_release }}
lint_dockerfile:
needs: [check_dockerfiles]
if: ${{ needs.check_dockerfiles.outputs.dockerfiles }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.check_dockerfiles.outputs.matrix) }}
name: Lint Dockerfile${{ matrix.tag }}
steps:
- name: Checkout
uses: actions/checkout@v3
- name: Hadolint
id: hadolint
uses: hadolint/hadolint-action@v3.0.0
with:
dockerfile: ${{ matrix.dockerfile }}
ignore: DL3008,DL3013,DL3016,DL3018,DL3028,DL3059
output-file: ./hadolint.log
verbose: true
- name: Log
if: failure()
run: |
echo "Hadolint outcome: ${{ steps.hadolint.outcome }}" >> $GITHUB_STEP_SUMMARY
cat "./hadolint.log" >> $GITHUB_STEP_SUMMARY
docker:
name: Docker
needs: [check_dockerfile, check_changelog]
if: ${{ needs.check_dockerfile.outputs.dockerfile == 'true' }}
needs: [check_dockerfiles, check_changelog, setup_release]
if: ${{ needs.check_dockerfiles.outputs.dockerfiles }}
runs-on: ubuntu-latest
permissions:
packages: write
contents: write
strategy:
fail-fast: false
matrix: ${{ fromJson(needs.check_dockerfiles.outputs.matrix) }}
name: Docker${{ matrix.tag }}
steps:
- name: Checkout
@ -106,7 +197,7 @@ jobs:
- name: Prepare
id: prepare
env:
NEXT_VERSION: ${{ needs.check_changelog.outputs.next_version }}
NV: ${{ needs.check_changelog.outputs.next_version }}
run: |
# get branch name
BRANCH=${GITHUB_HEAD_REF}
@ -129,27 +220,49 @@ jobs:
BASE_TAG=$(echo $REPOSITORY | tr '[:upper:]' '[:lower:]')
COMMIT=${{ github.sha }}
TAGS="${BASE_TAG}:${COMMIT:0:7},ghcr.io/${BASE_TAG}:${COMMIT:0:7}"
TAGS="${BASE_TAG}:${COMMIT:0:7}${{ matrix.tag }},ghcr.io/${BASE_TAG}:${COMMIT:0:7}${{ matrix.tag }}"
if [[ $GITHUB_REF == refs/heads/master ]]; then
TAGS="${TAGS},${BASE_TAG}:latest,ghcr.io/${BASE_TAG}:latest"
TAGS="${TAGS},${BASE_TAG}:master,ghcr.io/${BASE_TAG}:master"
TAGS="${TAGS},${BASE_TAG}:latest${{ matrix.tag }},ghcr.io/${BASE_TAG}:latest${{ matrix.tag }}"
TAGS="${TAGS},${BASE_TAG}:master${{ matrix.tag }},ghcr.io/${BASE_TAG}:master${{ matrix.tag }}"
elif [[ $GITHUB_REF == refs/heads/nightly ]]; then
TAGS="${TAGS},${BASE_TAG}:nightly,ghcr.io/${BASE_TAG}:nightly"
TAGS="${TAGS},${BASE_TAG}:nightly${{ matrix.tag }},ghcr.io/${BASE_TAG}:nightly${{ matrix.tag }}"
else
TAGS="${TAGS},${BASE_TAG}:test,ghcr.io/${BASE_TAG}:test"
TAGS="${TAGS},${BASE_TAG}:test${{ matrix.tag }},ghcr.io/${BASE_TAG}:test${{ matrix.tag }}"
fi
if [[ ${NEXT_VERSION} != "" ]]; then
TAGS="${TAGS},${BASE_TAG}:${NEXT_VERSION},ghcr.io/${BASE_TAG}:${NEXT_VERSION}"
if [[ ${NV} != "" ]]; then
TAGS="${TAGS},${BASE_TAG}:${NV}${{ matrix.tag }},ghcr.io/${BASE_TAG}:${NV}${{ matrix.tag }}"
fi
# read the platforms from `.docker_platforms`
PLATFORMS=$(<.docker_platforms)
# parse custom directives out of dockerfile
# try to get the platforms from the dockerfile custom directive, i.e. `# platforms: xxx,yyy`
while read -r line; do
if [[ $line == "# platforms: "* && $PLATFORMS == "" ]]; then
# echo the line and use `sed` to remove the custom directive
PLATFORMS=$(echo -e "$line" | sed 's/# platforms: //')
elif [[ $line == "# artifacts: "* && $ARTIFACTS == "" ]]; then
# echo the line and use `sed` to remove the custom directive
ARTIFACTS=$(echo -e "$line" | sed 's/# artifacts: //')
elif [[ $PLATFORMS != "" && $ARTIFACTS != "" ]]; then
# break while loop once all custom directives are found
break
fi
done <"${{ matrix.dockerfile }}"
# if PLATFORMS is blank, fall back to the legacy method of reading from the `.docker_platforms` file
if [[ $PLATFORMS == "" ]]; then
# read the platforms from `.docker_platforms`
PLATFORMS=$(<.docker_platforms)
fi
# if PLATFORMS is still blank, fall back to `linux/amd64`
if [[ $PLATFORMS == "" ]]; then
PLATFORMS="linux/amd64"
fi
echo "branch=${BRANCH}" >> $GITHUB_OUTPUT
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
echo "commit=${COMMIT}" >> $GITHUB_OUTPUT
echo "artifacts=${ARTIFACTS}" >> $GITHUB_OUTPUT
echo "platforms=${PLATFORMS}" >> $GITHUB_OUTPUT
echo "push=${PUSH}" >> $GITHUB_OUTPUT
echo "tags=${TAGS}" >> $GITHUB_OUTPUT
@ -165,9 +278,9 @@ jobs:
uses: actions/cache@v3
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
key: Docker-buildx${{ matrix.tag }}-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-
Docker-buildx${{ matrix.tag }}-
- name: Log in to Docker Hub
if: ${{ steps.prepare.outputs.push == 'true' }} # PRs do not have access to secrets
@ -184,11 +297,32 @@ jobs:
username: ${{ secrets.GH_BOT_NAME }}
password: ${{ secrets.GH_BOT_TOKEN }}
- name: Build and push
- name: Build artifacts
if: ${{ steps.prepare.outputs.artifacts == 'true' }}
id: build_artifacts
uses: docker/build-push-action@v3
with:
context: ./
file: ./Dockerfile
file: ${{ matrix.dockerfile }}
target: artifacts
outputs: type=local,dest=artifacts
push: false
platforms: ${{ steps.prepare.outputs.platforms }}
build-args: |
BRANCH=${{ steps.prepare.outputs.branch }}
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
BUILD_VERSION=${{ needs.check_changelog.outputs.next_version }}
COMMIT=${{ steps.prepare.outputs.commit }}
tags: ${{ steps.prepare.outputs.tags }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Build and push
id: build
uses: docker/build-push-action@v3
with:
context: ./
file: ${{ matrix.dockerfile }}
push: ${{ steps.prepare.outputs.push }}
platforms: ${{ steps.prepare.outputs.platforms }}
build-args: |
@ -200,6 +334,36 @@ jobs:
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache
- name: Arrange Artifacts
if: ${{ steps.prepare.outputs.artifacts == 'true' }}
working-directory: artifacts
run: |
# artifacts will be in sub directories named after the docker target platform, e.g. `linux_amd64`
# so move files to the artifacts directory
# https://unix.stackexchange.com/a/52816
find ./ -type f -exec mv -t ./ -n '{}' +
- name: Upload Artifacts
if: ${{ steps.prepare.outputs.artifacts == 'true' }}
uses: actions/upload-artifact@v3
with:
name: sunshine${{ matrix.tag }}
path: artifacts/
- name: Create/Update GitHub Release
if: ${{ needs.setup_release.outputs.create_release == 'true' && steps.prepare.outputs.artifacts == 'true' }}
uses: ncipollo/release-action@v1
with:
name: ${{ needs.setup_release.outputs.release_name }}
tag: ${{ needs.setup_release.outputs.release_tag }}
commit: ${{ needs.setup_release.outputs.release_commit }}
artifacts: "*artifacts/*"
token: ${{ secrets.GH_BOT_TOKEN }}
allowUpdates: true
body: ${{ needs.setup_release.outputs.release_body }}
discussionCategory: announcements
prerelease: ${{ needs.setup_release.outputs.pre_release }}
- name: Update Docker Hub Description
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
uses: peter-evans/dockerhub-description@v3

View File

@ -1,4 +1,5 @@
cmake_minimum_required(VERSION 3.0)
cmake_minimum_required(VERSION 3.18)
# `CMAKE_CUDA_ARCHITECTURES` requires 3.18
project(Sunshine VERSION 0.17.0
DESCRIPTION "Sunshine is a self-hosted game stream host for Moonlight."
@ -64,7 +65,7 @@ include_directories(third-party/miniupnp/miniupnpc/include)
find_package(Threads REQUIRED)
find_package(OpenSSL REQUIRED)
find_package(PkgConfig REQUIRED)
pkg_check_modules (CURL REQUIRED libcurl)
pkg_check_modules(CURL REQUIRED libcurl)
if(NOT APPLE)
set(Boost_USE_STATIC_LIBS ON) # cmake-lint: disable=C0103
@ -197,12 +198,70 @@ else()
check_language(CUDA)
if(CMAKE_CUDA_COMPILER)
if(NOT DEFINED CMAKE_CUDA_ARCHITECTURES)
set(CMAKE_CUDA_ARCHITECTURES 35)
endif()
set(CUDA_FOUND ON)
enable_language(CUDA)
message(STATUS "CUDA Compiler Version: ${CMAKE_CUDA_COMPILER_VERSION}")
set(CMAKE_CUDA_ARCHITECTURES "")
# https://tech.amikelive.com/node-930/cuda-compatibility-of-nvidia-display-gpu-drivers/
if(CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 6.5)
list(APPEND CMAKE_CUDA_ARCHITECTURES 10)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_10,code=sm_10")
elseif(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 6.5)
list(APPEND CMAKE_CUDA_ARCHITECTURES 50 52)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_50,code=sm_50")
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_52,code=sm_52")
endif()
if(CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 7.0)
list(APPEND CMAKE_CUDA_ARCHITECTURES 11)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_11,code=sm_11")
elseif(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER 7.6)
list(APPEND CMAKE_CUDA_ARCHITECTURES 60 61 62)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_60,code=sm_60")
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_61,code=sm_61")
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_62,code=sm_62")
endif()
if(CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 9.0)
list(APPEND CMAKE_CUDA_ARCHITECTURES 20)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_20,code=sm_20")
elseif(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 9.0)
list(APPEND CMAKE_CUDA_ARCHITECTURES 70)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_70,code=sm_70")
endif()
if(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 10.0)
list(APPEND CMAKE_CUDA_ARCHITECTURES 75)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_75,code=sm_75")
endif()
if(CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 11.0)
list(APPEND CMAKE_CUDA_ARCHITECTURES 30)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_30,code=sm_30")
elseif(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 11.0)
list(APPEND CMAKE_CUDA_ARCHITECTURES 80)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_80,code=sm_80")
endif()
if(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 11.1)
list(APPEND CMAKE_CUDA_ARCHITECTURES 86)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_86,code=sm_86")
endif()
if(CMAKE_CUDA_COMPILER_VERSION VERSION_GREATER_EQUAL 11.8)
list(APPEND CMAKE_CUDA_ARCHITECTURES 90)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_90,code=sm_90")
endif()
if(CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 12.0)
list(APPEND CMAKE_CUDA_ARCHITECTURES 35)
# set(CUDA_NVCC_FLAGS "${CUDA_NVCC_FLAGS} -gencode arch=compute_35,code=sm_35")
endif()
# message(STATUS "CUDA NVCC Flags: ${CUDA_NVCC_FLAGS}")
message(STATUS "CUDA Architectures: ${CMAKE_CUDA_ARCHITECTURES}")
endif()
endif()
if(${SUNSHINE_ENABLE_DRM})

View File

@ -1,102 +0,0 @@
FROM ubuntu:22.04 AS sunshine-base
ARG DEBIAN_FRONTEND=noninteractive
FROM sunshine-base as sunshine-build
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends \
build-essential=12.9* \
cmake=3.22.1* \
libavdevice-dev=7:4.4.* \
libboost-filesystem-dev=1.74.0* \
libboost-log-dev=1.74.0* \
libboost-thread-dev=1.74.0* \
libboost-program-options-dev=1.74.0* \
libcap-dev=1:2.44* \
libcurl4-openssl-dev=7.81.0* \
libdrm-dev=2.4.110* \
libevdev-dev=1.12.1* \
libnuma-dev=2.0.14* \
libopus-dev=1.3.1* \
libpulse-dev=1:15.99.1* \
libssl-dev=3.0.2* \
libva-dev=2.14.0* \
libvdpau-dev=1.4* \
libwayland-dev=1.20.0* \
libx11-dev=2:1.7.5* \
libxcb-shm0-dev=1.14* \
libxcb-xfixes0-dev=1.14* \
libxcb1-dev=1.14* \
libxfixes-dev=1:6.0.0* \
libxrandr-dev=2:1.5.2* \
libxtst-dev=2:1.2.3* \
nodejs=12.22.9* \
npm=8.5.1* \
nvidia-cuda-dev=11.5.1* \
nvidia-cuda-toolkit=11.5.1* \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# copy repository
WORKDIR /root/sunshine-build/
COPY . .
# setup npm and dependencies
RUN npm install
# setup build directory
WORKDIR /root/sunshine-build/build
# cmake and cpack
RUN cmake -DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DSUNSHINE_ASSETS_DIR=share/sunshine \
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
-DSUNSHINE_ENABLE_WAYLAND=ON \
-DSUNSHINE_ENABLE_X11=ON \
-DSUNSHINE_ENABLE_DRM=ON \
-DSUNSHINE_ENABLE_CUDA=ON \
/root/sunshine-build \
&& make -j "$(nproc)" \
&& cpack -G DEB
FROM sunshine-base as sunshine
# copy deb from builder
COPY --from=sunshine-build /root/sunshine-build/build/cpack_artifacts/Sunshine.deb /sunshine.deb
# install sunshine
RUN apt-get update -y \
&& apt-get install -y --no-install-recommends /sunshine.deb \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
# network setup
EXPOSE 47984-47990/tcp
EXPOSE 48010
EXPOSE 47998-48000/udp
# setup user
ARG PGID=1000
ENV PGID=${PGID}
ARG PUID=1000
ENV PUID=${PUID}
ENV TZ="UTC"
ARG UNAME=lizard
ENV UNAME=${UNAME}
ENV HOME=/home/$UNAME
RUN groupadd -f -g "${PGID}" "${UNAME}" && \
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}" && \
mkdir -p ${HOME}/.config/sunshine && \
ln -s ${HOME}/.config/sunshine /config && \
chown -R ${UNAME} ${HOME}
USER ${UNAME}
WORKDIR ${HOME}
# entrypoint
ENTRYPOINT ["/usr/bin/sunshine"]

View File

@ -0,0 +1,154 @@
# artifacts: true
# platforms: linux/amd64,linux/arm64/v8
ARG BASE=debian
ARG TAG=bullseye
FROM ${BASE}:${TAG} AS sunshine-base
ENV DEBIAN_FRONTEND=noninteractive
FROM sunshine-base as sunshine-build
ARG TARGETPLATFORM
RUN echo "target_platform: ${TARGETPLATFORM}"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# install dependencies
RUN <<_DEPS
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends \
build-essential=12.9* \
cmake=3.18.4* \
libavdevice-dev=7:4.3.* \
libboost-filesystem-dev=1.74.0* \
libboost-log-dev=1.74.0* \
libboost-program-options-dev=1.74.0* \
libboost-thread-dev=1.74.0* \
libcap-dev=1:2.44* \
libcurl4-openssl-dev=7.74.0* \
libdrm-dev=2.4.104* \
libevdev-dev=1.11.0* \
libnuma-dev=2.0.12* \
libopus-dev=1.3.1* \
libpulse-dev=14.2* \
libssl-dev=1.1.1* \
libva-dev=2.10.0* \
libvdpau-dev=1.4* \
libwayland-dev=1.18.0* \
libx11-dev=2:1.7.2* \
libxcb-shm0-dev=1.14* \
libxcb-xfixes0-dev=1.14* \
libxcb1-dev=1.14* \
libxfixes-dev=1:5.0.3* \
libxrandr-dev=2:1.5.1* \
libxtst-dev=2:1.2.3* \
nodejs=12.22* \
npm=7.5.2* \
wget=1.21*
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
apt-get install -y --no-install-recommends \
libmfx-dev=21.1.0*
fi
apt-get clean
rm -rf /var/lib/apt/lists/*
_DEPS
# install cuda
WORKDIR /build/cuda
# versions: https://developer.nvidia.com/cuda-toolkit-archive
ENV CUDA_VERSION="11.8.0"
ENV CUDA_BUILD="520.61.05"
# hadolint ignore=SC3010
RUN <<_INSTALL_CUDA
#!/bin/bash
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
cuda_suffix=""
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cuda_suffix="_sbsa"
fi
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
echo "cuda url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
rm ./cuda.run
_INSTALL_CUDA
# copy repository
WORKDIR /build/sunshine/
COPY .. .
# setup npm dependencies
RUN npm install
# setup build directory
WORKDIR /build/sunshine/build
# cmake and cpack
RUN <<_MAKE
#!/bin/bash
cmake \
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DSUNSHINE_ASSETS_DIR=share/sunshine \
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
-DSUNSHINE_ENABLE_WAYLAND=ON \
-DSUNSHINE_ENABLE_X11=ON \
-DSUNSHINE_ENABLE_DRM=ON \
-DSUNSHINE_ENABLE_CUDA=ON \
/build/sunshine
make -j "$(nproc)"
cpack -G DEB
_MAKE
FROM scratch AS artifacts
ARG BASE
ARG TAG
ARG TARGETARCH
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
FROM sunshine-base as sunshine
# copy deb from builder
COPY --from=artifacts /sunshine*.deb /sunshine.deb
# install sunshine
RUN <<_INSTALL_SUNSHINE
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends /sunshine.deb
apt-get clean
rm -rf /var/lib/apt/lists/*
_INSTALL_SUNSHINE
# network setup
EXPOSE 47984-47990/tcp
EXPOSE 48010
EXPOSE 47998-48000/udp
# setup user
ARG PGID=1000
ENV PGID=${PGID}
ARG PUID=1000
ENV PUID=${PUID}
ENV TZ="UTC"
ARG UNAME=lizard
ENV UNAME=${UNAME}
ENV HOME=/home/$UNAME
# setup user
RUN <<_SETUP_USER
groupadd -f -g "${PGID}" "${UNAME}"
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
mkdir -p ${HOME}/.config/sunshine
ln -s ${HOME}/.config/sunshine /config
chown -R ${UNAME} ${HOME}
_SETUP_USER
USER ${UNAME}
WORKDIR ${HOME}
# entrypoint
ENTRYPOINT ["/usr/bin/sunshine"]

155
docker/fedora-36.dockerfile Normal file
View File

@ -0,0 +1,155 @@
# artifacts: true
# platforms: linux/amd64,linux/arm64/v8
ARG BASE=fedora
ARG TAG=36
FROM ${BASE}:${TAG} AS sunshine-base
FROM sunshine-base as sunshine-build
ARG TARGETPLATFORM
RUN echo "target_platform: ${TARGETPLATFORM}"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# install dependencies
# hadolint ignore=DL3041
RUN <<_DEPS
#!/bin/bash
dnf -y update
dnf -y group install "Development Tools"
dnf -y install \
boost-devel-1.76.0* \
boost-static-1.76.0* \
cmake-3.22.2* \
gcc-12.0.1* \
gcc-c++-12.0.1* \
libcap-devel-2.48* \
libcurl-devel-7.82.0* \
libdrm-devel-2.4.110* \
libevdev-devel-1.12.0* \
libva-devel-2.14.0* \
libvdpau-devel-1.5* \
libX11-devel-1.7.3* \
libxcb-devel-1.13.1* \
libXcursor-devel-1.2.0* \
libXfixes-devel-6.0.0* \
libXi-devel-1.8* \
libXinerama-devel-1.1.4* \
libXrandr-devel-1.5.2* \
libXtst-devel-1.2.3* \
mesa-libGL-devel-22.0.1* \
npm-8.3.1* \
numactl-devel-2.0.14* \
openssl-devel-3.0.2* \
opus-devel-1.3.1* \
pulseaudio-libs-devel-15.0* \
rpm-build-4.17.0* \
wget-1.21.3* \
which-2.21*
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
apt-get install -y --no-install-recommends \
# libmfx-devel is not listed for fedora 36/37
https://kojipkgs.fedoraproject.org//packages/libmfx/1.25/4.el8/x86_64/libmfx-devel-1.25-4.el8.x86_64.rpm
fi
dnf clean all
rm -rf /var/cache/yum
_DEPS
# install cuda
WORKDIR /build/cuda
# versions: https://developer.nvidia.com/cuda-toolkit-archive
ENV CUDA_VERSION="12.0.0"
ENV CUDA_BUILD="525.60.13"
# hadolint ignore=SC3010
RUN <<_INSTALL_CUDA
#!/bin/bash
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
cuda_suffix=""
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cuda_suffix="_sbsa"
fi
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
echo "cuda url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
rm ./cuda.run
_INSTALL_CUDA
# copy repository
WORKDIR /build/sunshine/
COPY .. .
# setup npm dependencies
RUN npm install
# setup build directory
WORKDIR /build/sunshine/build
# cmake and cpack
RUN <<_MAKE
#!/bin/bash
cmake \
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DSUNSHINE_ASSETS_DIR=share/sunshine \
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
-DSUNSHINE_ENABLE_WAYLAND=ON \
-DSUNSHINE_ENABLE_X11=ON \
-DSUNSHINE_ENABLE_DRM=ON \
-DSUNSHINE_ENABLE_CUDA=ON \
/build/sunshine
make -j "$(nproc)"
cpack -G RPM
_MAKE
FROM scratch AS artifacts
ARG BASE
ARG TAG
ARG TARGETARCH
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.rpm /sunshine-${BASE}-${TAG}-${TARGETARCH}.rpm
FROM sunshine-base as sunshine
# copy deb from builder
COPY --from=artifacts /sunshine*.rpm /sunshine.rpm
# install sunshine
RUN <<_INSTALL_SUNSHINE
#!/bin/bash
dnf -y update
dnf -y install /sunshine.rpm
dnf clean all
rm -rf /var/cache/yum
_INSTALL_SUNSHINE
# network setup
EXPOSE 47984-47990/tcp
EXPOSE 48010
EXPOSE 47998-48000/udp
# setup user
ARG PGID=1000
ENV PGID=${PGID}
ARG PUID=1000
ENV PUID=${PUID}
ENV TZ="UTC"
ARG UNAME=lizard
ENV UNAME=${UNAME}
ENV HOME=/home/$UNAME
# setup user
RUN <<_SETUP_USER
groupadd -f -g "${PGID}" "${UNAME}"
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
mkdir -p ${HOME}/.config/sunshine
ln -s ${HOME}/.config/sunshine /config
chown -R ${UNAME} ${HOME}
_SETUP_USER
USER ${UNAME}
WORKDIR ${HOME}
# entrypoint
ENTRYPOINT ["/usr/bin/sunshine"]

155
docker/fedora-37.dockerfile Normal file
View File

@ -0,0 +1,155 @@
# artifacts: true
# platforms: linux/amd64,linux/arm64/v8
ARG BASE=fedora
ARG TAG=37
FROM ${BASE}:${TAG} AS sunshine-base
FROM sunshine-base as sunshine-build
ARG TARGETPLATFORM
RUN echo "target_platform: ${TARGETPLATFORM}"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# install dependencies
# hadolint ignore=DL3041
RUN <<_DEPS
#!/bin/bash
dnf -y update
dnf -y group install "Development Tools"
dnf -y install \
boost-devel-1.78.0* \
boost-static-1.78.0* \
cmake-3.24.1* \
gcc-12.2.1* \
gcc-c++-12.2.1* \
libcap-devel-2.48* \
libcurl-devel-7.85.0* \
libdrm-devel-2.4.112* \
libevdev-devel-1.13.0* \
libva-devel-2.15.0* \
libvdpau-devel-1.5* \
libX11-devel-1.8.1* \
libxcb-devel-1.13.1* \
libXcursor-devel-1.2.1* \
libXfixes-devel-6.0.0* \
libXi-devel-1.8* \
libXinerama-devel-1.1.4* \
libXrandr-devel-1.5.2* \
libXtst-devel-1.2.3* \
mesa-libGL-devel-22.2.2* \
npm-8.15.0* \
numactl-devel-2.0.14* \
openssl-devel-3.0.5* \
opus-devel-1.3.1* \
pulseaudio-libs-devel-16.1* \
rpm-build-4.18.0* \
wget-1.21.3* \
which-2.21*
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
apt-get install -y --no-install-recommends \
# libmfx-devel is not listed for fedora 36/37
https://kojipkgs.fedoraproject.org//packages/libmfx/1.25/4.el8/x86_64/libmfx-devel-1.25-4.el8.x86_64.rpm
fi
dnf clean all
rm -rf /var/cache/yum
_DEPS
# install cuda
WORKDIR /build/cuda
# versions: https://developer.nvidia.com/cuda-toolkit-archive
ENV CUDA_VERSION="12.0.0"
ENV CUDA_BUILD="525.60.13"
# hadolint ignore=SC3010
RUN <<_INSTALL_CUDA
#!/bin/bash
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
cuda_suffix=""
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cuda_suffix="_sbsa"
fi
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
echo "cuda url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
rm ./cuda.run
_INSTALL_CUDA
# copy repository
WORKDIR /build/sunshine/
COPY .. .
# setup npm dependencies
RUN npm install
# setup build directory
WORKDIR /build/sunshine/build
# cmake and cpack
RUN <<_MAKE
#!/bin/bash
cmake \
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DSUNSHINE_ASSETS_DIR=share/sunshine \
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
-DSUNSHINE_ENABLE_WAYLAND=ON \
-DSUNSHINE_ENABLE_X11=ON \
-DSUNSHINE_ENABLE_DRM=ON \
-DSUNSHINE_ENABLE_CUDA=ON \
/build/sunshine
make -j "$(nproc)"
cpack -G RPM
_MAKE
FROM scratch AS artifacts
ARG BASE
ARG TAG
ARG TARGETARCH
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.rpm /sunshine-${BASE}-${TAG}-${TARGETARCH}.rpm
FROM sunshine-base as sunshine
# copy deb from builder
COPY --from=artifacts /sunshine*.rpm /sunshine.rpm
# install sunshine
RUN <<_INSTALL_SUNSHINE
#!/bin/bash
dnf -y update
dnf -y install /sunshine.rpm
dnf clean all
rm -rf /var/cache/yum
_INSTALL_SUNSHINE
# network setup
EXPOSE 47984-47990/tcp
EXPOSE 48010
EXPOSE 47998-48000/udp
# setup user
ARG PGID=1000
ENV PGID=${PGID}
ARG PUID=1000
ENV PUID=${PUID}
ENV TZ="UTC"
ARG UNAME=lizard
ENV UNAME=${UNAME}
ENV HOME=/home/$UNAME
# setup user
RUN <<_SETUP_USER
groupadd -f -g "${PGID}" "${UNAME}"
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
mkdir -p ${HOME}/.config/sunshine
ln -s ${HOME}/.config/sunshine /config
chown -R ${UNAME} ${HOME}
_SETUP_USER
USER ${UNAME}
WORKDIR ${HOME}
# entrypoint
ENTRYPOINT ["/usr/bin/sunshine"]

View File

@ -0,0 +1,208 @@
# artifacts: true
# platforms: linux/amd64,linux/arm64/v8
ARG BASE=ubuntu
ARG TAG=18.04
FROM ${BASE}:${TAG} AS sunshine-base
ENV DEBIAN_FRONTEND=noninteractive
FROM sunshine-base as sunshine-build
ARG TARGETPLATFORM
RUN echo "target_platform: ${TARGETPLATFORM}"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# install dependencies
RUN <<_DEPS
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends \
software-properties-common=0.96.24.32.18
add-apt-repository ppa:ubuntu-toolchain-r/test
apt-get install -y --no-install-recommends \
bison=2:3.0.4* \
build-essential=12.4* \
gcc-10=10.3.0* \
g++-10=10.3.0* \
libavdevice-dev=7:3.4.* \
libcap-dev=1:2.25* \
libcurl-openssl1.0-dev=7.58.0* \
libdrm-dev=2.4.101* \
libevdev-dev=1.5.8* \
libnuma-dev=2.0.11* \
libopus-dev=1.1.2* \
libpulse-dev=1:11.1* \
libssl1.0-dev=1.0.2* \
libva-dev=2.1.0* \
libvdpau-dev=1.1.1* \
libwayland-dev=1.16.0* \
libx11-dev=2:1.6.4* \
libxcb-shm0-dev=1.13* \
libxcb-xfixes0-dev=1.13* \
libxcb1-dev=1.13* \
libxfixes-dev=1:5.0.3* \
libxrandr-dev=2:1.5.1* \
libxtst-dev=2:1.2.3* \
npm=3.5.2* \
node-gyp=3.6.2* \
nodejs-dev=8.10.0* \
wget=1.19.4*
apt-get clean
rm -rf /var/lib/apt/lists/*
_DEPS
# Update gcc alias
# https://stackoverflow.com/a/70653945/11214013
RUN <<_GCC_ALIAS
#!/bin/bash
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-10 100 \
--slave /usr/bin/g++ g++ /usr/bin/g++-10 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
_GCC_ALIAS
# install boost
# cannot install boost for aarch64 using ppa:savoury1/boost-defaults-1.71
# otherwise add the repository and the following packages
# libboost-filesystem1.71-dev=1.71.0* \
# libboost-log1.71-dev=1.71.0* \
# libboost-program-options1.71-dev=1.71.0* \
# libboost-regex1.71-dev=1.71.0* \
# libboost-thread1.71-dev=1.71.0* \
WORKDIR /build/tmp
RUN <<_INSTALL_BOOST
url="https://boostorg.jfrog.io/artifactory/main/release/1.74.0/source/boost_1_74_0.tar.bz2"
wget "${url}" --progress=bar:force:noscroll -q --show-progress -O ./boost.tar.bz2
tar --bzip2 -xf boost.tar.bz2 --directory /build
mv /build/boost_*/ /build/boost
ls -a /build/boost
cd /build/boost
./bootstrap.sh --with-libraries=system,thread,log,program_options && \
./b2 install variant=release link=static,shared runtime-link=shared -j "$(nproc)"
_INSTALL_BOOST
# install cmake
# sunshine requires cmake >= 3.18
WORKDIR /build/cmake
# https://cmake.org/download/
ENV CMAKE_VERSION="3.25.1"
# hadolint ignore=SC3010
RUN <<_INSTALL_CMAKE
#!/bin/bash
cmake_prefix="https://github.com/Kitware/CMake/releases/download/v"
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
cmake_arch="x86_64"
elif [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cmake_arch="aarch64"
fi
url="${cmake_prefix}${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-${cmake_arch}.sh"
echo "cmake url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cmake.sh
sh ./cmake.sh --prefix=/usr/local --skip-license
cmake --version
_INSTALL_CMAKE
# install cuda
WORKDIR /build/cuda
# versions: https://developer.nvidia.com/cuda-toolkit-archive
ENV CUDA_VERSION="11.8.0"
ENV CUDA_BUILD="520.61.05"
# hadolint ignore=SC3010
RUN <<_INSTALL_CUDA
#!/bin/bash
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
cuda_suffix=""
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cuda_suffix="_sbsa"
fi
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
echo "cuda url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
rm ./cuda.run
_INSTALL_CUDA
# todo - install libmfx
# https://github.com/Intel-Media-SDK/MediaSDK
# copy repository
WORKDIR /build/sunshine/
COPY .. .
# setup npm dependencies
RUN npm install
# setup build directory
WORKDIR /build/sunshine/build
# cmake and cpack
RUN <<_MAKE
#!/bin/bash
cmake \
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DSUNSHINE_ASSETS_DIR=share/sunshine \
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
-DSUNSHINE_ENABLE_WAYLAND=ON \
-DSUNSHINE_ENABLE_X11=ON \
-DSUNSHINE_ENABLE_DRM=ON \
-DSUNSHINE_ENABLE_CUDA=ON \
/build/sunshine
make -j "$(nproc)"
cpack -G DEB
_MAKE
FROM scratch AS artifacts
ARG BASE
ARG TAG
ARG TARGETARCH
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
FROM sunshine-base as sunshine
# copy deb from builder
COPY --from=artifacts /sunshine*.deb /sunshine.deb
# install sunshine
RUN <<_INSTALL_SUNSHINE
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends /sunshine.deb
apt-get clean
rm -rf /var/lib/apt/lists/*
_INSTALL_SUNSHINE
# network setup
EXPOSE 47984-47990/tcp
EXPOSE 48010
EXPOSE 47998-48000/udp
# setup user
ARG PGID=1000
ENV PGID=${PGID}
ARG PUID=1000
ENV PUID=${PUID}
ENV TZ="UTC"
ARG UNAME=lizard
ENV UNAME=${UNAME}
ENV HOME=/home/$UNAME
# setup user
RUN <<_SETUP_USER
groupadd -f -g "${PGID}" "${UNAME}"
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
mkdir -p ${HOME}/.config/sunshine
ln -s ${HOME}/.config/sunshine /config
chown -R ${UNAME} ${HOME}
_SETUP_USER
USER ${UNAME}
WORKDIR ${HOME}
# entrypoint
ENTRYPOINT ["/usr/bin/sunshine"]

View File

@ -0,0 +1,188 @@
# artifacts: true
# platforms: linux/amd64,linux/arm64/v8
ARG BASE=ubuntu
ARG TAG=20.04
FROM ${BASE}:${TAG} AS sunshine-base
ENV DEBIAN_FRONTEND=noninteractive
FROM sunshine-base as sunshine-build
ARG TARGETPLATFORM
RUN echo "target_platform: ${TARGETPLATFORM}"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# install dependencies
RUN <<_DEPS
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends \
build-essential=12.8* \
gcc-10=10.3.0* \
g++-10=10.3.0* \
libavdevice-dev=7:4.2.* \
libboost-filesystem-dev=1.71.0* \
libboost-log-dev=1.71.0* \
libboost-program-options-dev=1.71.0* \
libboost-thread-dev=1.71.0* \
libcap-dev=1:2.32* \
libcurl4-openssl-dev=7.68.0* \
libdrm-dev=2.4.107* \
libevdev-dev=1.9.0* \
libnuma-dev=2.0.12* \
libopus-dev=1.3.1* \
libpulse-dev=1:13.99.1* \
libssl-dev=1.1.1* \
libva-dev=2.7.0* \
libvdpau-dev=1.3* \
libwayland-dev=1.18.0* \
libx11-dev=2:1.6.9* \
libxcb-shm0-dev=1.14* \
libxcb-xfixes0-dev=1.14* \
libxcb1-dev=1.14* \
libxfixes-dev=1:5.0.3* \
libxrandr-dev=2:1.5.2* \
libxtst-dev=2:1.2.3* \
nodejs=10.19.0* \
npm=6.14.4* \
wget=1.20.3*
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
apt-get install -y --no-install-recommends \
libmfx-dev=20.1.0*
fi
apt-get clean
rm -rf /var/lib/apt/lists/*
_DEPS
# Update gcc alias
# https://stackoverflow.com/a/70653945/11214013
RUN <<_GCC_ALIAS
#!/bin/bash
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-10 100 \
--slave /usr/bin/g++ g++ /usr/bin/g++-10 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
_GCC_ALIAS
# install cmake
# sunshine requires cmake >= 3.18
WORKDIR /build/cmake
# https://cmake.org/download/
ENV CMAKE_VERSION="3.25.1"
# hadolint ignore=SC3010
RUN <<_INSTALL_CMAKE
#!/bin/bash
cmake_prefix="https://github.com/Kitware/CMake/releases/download/v"
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
cmake_arch="x86_64"
elif [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cmake_arch="aarch64"
fi
url="${cmake_prefix}${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-${cmake_arch}.sh"
echo "cmake url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cmake.sh
sh ./cmake.sh --prefix=/usr/local --skip-license
cmake --version
_INSTALL_CMAKE
# install cuda
WORKDIR /build/cuda
# versions: https://developer.nvidia.com/cuda-toolkit-archive
ENV CUDA_VERSION="11.8.0"
ENV CUDA_BUILD="520.61.05"
# hadolint ignore=SC3010
RUN <<_INSTALL_CUDA
#!/bin/bash
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
cuda_suffix=""
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cuda_suffix="_sbsa"
fi
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
echo "cuda url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
rm ./cuda.run
_INSTALL_CUDA
# copy repository
WORKDIR /build/sunshine/
COPY .. .
# setup npm dependencies
RUN npm install
# setup build directory
WORKDIR /build/sunshine/build
# cmake and cpack
RUN <<_MAKE
#!/bin/bash
cmake \
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DSUNSHINE_ASSETS_DIR=share/sunshine \
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
-DSUNSHINE_ENABLE_WAYLAND=ON \
-DSUNSHINE_ENABLE_X11=ON \
-DSUNSHINE_ENABLE_DRM=ON \
-DSUNSHINE_ENABLE_CUDA=ON \
/build/sunshine
make -j "$(nproc)"
cpack -G DEB
_MAKE
FROM scratch AS artifacts
ARG BASE
ARG TAG
ARG TARGETARCH
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
FROM sunshine-base as sunshine
# copy deb from builder
COPY --from=artifacts /sunshine*.deb /sunshine.deb
# install sunshine
RUN <<_INSTALL_SUNSHINE
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends /sunshine.deb
apt-get clean
rm -rf /var/lib/apt/lists/*
_INSTALL_SUNSHINE
# network setup
EXPOSE 47984-47990/tcp
EXPOSE 48010
EXPOSE 47998-48000/udp
# setup user
ARG PGID=1000
ENV PGID=${PGID}
ARG PUID=1000
ENV PUID=${PUID}
ENV TZ="UTC"
ARG UNAME=lizard
ENV UNAME=${UNAME}
ENV HOME=/home/$UNAME
# setup user
RUN <<_SETUP_USER
groupadd -f -g "${PGID}" "${UNAME}"
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
mkdir -p ${HOME}/.config/sunshine
ln -s ${HOME}/.config/sunshine /config
chown -R ${UNAME} ${HOME}
_SETUP_USER
USER ${UNAME}
WORKDIR ${HOME}
# entrypoint
ENTRYPOINT ["/usr/bin/sunshine"]

View File

@ -0,0 +1,154 @@
# artifacts: true
# platforms: linux/amd64,linux/arm64/v8
ARG BASE=ubuntu
ARG TAG=22.04
FROM ${BASE}:${TAG} AS sunshine-base
ENV DEBIAN_FRONTEND=noninteractive
FROM sunshine-base as sunshine-build
ARG TARGETPLATFORM
RUN echo "target_platform: ${TARGETPLATFORM}"
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
# install dependencies
RUN <<_DEPS
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends \
build-essential=12.9* \
cmake=3.22.1* \
libavdevice-dev=7:4.4.* \
libboost-filesystem-dev=1.74.0* \
libboost-log-dev=1.74.0* \
libboost-program-options-dev=1.74.0* \
libboost-thread-dev=1.74.0* \
libcap-dev=1:2.44* \
libcurl4-openssl-dev=7.81.0* \
libdrm-dev=2.4.110* \
libevdev-dev=1.12.1* \
libnuma-dev=2.0.14* \
libopus-dev=1.3.1* \
libpulse-dev=1:15.99.1* \
libssl-dev=3.0.2* \
libva-dev=2.14.0* \
libvdpau-dev=1.4* \
libwayland-dev=1.20.0* \
libx11-dev=2:1.7.5* \
libxcb-shm0-dev=1.14* \
libxcb-xfixes0-dev=1.14* \
libxcb1-dev=1.14* \
libxfixes-dev=1:6.0.0* \
libxrandr-dev=2:1.5.2* \
libxtst-dev=2:1.2.3* \
nodejs=12.22.9* \
npm=8.5.1* \
wget=1.21.2*
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
apt-get install -y --no-install-recommends \
libmfx-dev=22.3.0*
fi
apt-get clean
rm -rf /var/lib/apt/lists/*
_DEPS
# install cuda
WORKDIR /build/cuda
# versions: https://developer.nvidia.com/cuda-toolkit-archive
ENV CUDA_VERSION="11.8.0"
ENV CUDA_BUILD="520.61.05"
# hadolint ignore=SC3010
RUN <<_INSTALL_CUDA
#!/bin/bash
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
cuda_suffix=""
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
cuda_suffix="_sbsa"
fi
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
echo "cuda url: ${url}"
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
rm ./cuda.run
_INSTALL_CUDA
# copy repository
WORKDIR /build/sunshine/
COPY .. .
# setup npm dependencies
RUN npm install
# setup build directory
WORKDIR /build/sunshine/build
# cmake and cpack
RUN <<_MAKE
#!/bin/bash
cmake \
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX=/usr \
-DSUNSHINE_ASSETS_DIR=share/sunshine \
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
-DSUNSHINE_ENABLE_WAYLAND=ON \
-DSUNSHINE_ENABLE_X11=ON \
-DSUNSHINE_ENABLE_DRM=ON \
-DSUNSHINE_ENABLE_CUDA=ON \
/build/sunshine
make -j "$(nproc)"
cpack -G DEB
_MAKE
FROM scratch AS artifacts
ARG BASE
ARG TAG
ARG TARGETARCH
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
FROM sunshine-base as sunshine
# copy deb from builder
COPY --from=artifacts /sunshine*.deb /sunshine.deb
# install sunshine
RUN <<_INSTALL_SUNSHINE
#!/bin/bash
apt-get update -y
apt-get install -y --no-install-recommends /sunshine.deb
apt-get clean
rm -rf /var/lib/apt/lists/*
_INSTALL_SUNSHINE
# network setup
EXPOSE 47984-47990/tcp
EXPOSE 48010
EXPOSE 47998-48000/udp
# setup user
ARG PGID=1000
ENV PGID=${PGID}
ARG PUID=1000
ENV PUID=${PUID}
ENV TZ="UTC"
ARG UNAME=lizard
ENV UNAME=${UNAME}
ENV HOME=/home/$UNAME
# setup user
RUN <<_SETUP_USER
groupadd -f -g "${PGID}" "${UNAME}"
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
mkdir -p ${HOME}/.config/sunshine
ln -s ${HOME}/.config/sunshine /config
chown -R ${UNAME} ${HOME}
_SETUP_USER
USER ${UNAME}
WORKDIR ${HOME}
# entrypoint
ENTRYPOINT ["/usr/bin/sunshine"]

View File

@ -23,6 +23,28 @@ Linux
-----
Follow the instructions for your preferred package type below.
**CUDA Compatibility**
CUDA is used for NVFBC capture.
.. Tip:: See `CUDA GPUS <https://developer.nvidia.com/cuda-gpus>`_ to cross reference Compute Capability to your GPU.
.. table::
:widths: auto
=========================================== ============== ============== ================================
Package CUDA Version Min Driver CUDA Compute Capabilities
=========================================== ============== ============== ================================
https://aur.archlinux.org/packages/sunshine User dependent User dependent User dependent
sunshine.AppImage 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
sunshine_{arch}.flatpak 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
sunshine-debian-bullseye-{arch}.deb 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
sunshine-fedora-36-{arch}.rpm 12.0.0 525.60.13 50;52;60;61;62;70;75;80;86;90
sunshine-fedora-37-{arch}.rpm 12.0.0 525.60.13 50;52;60;61;62;70;75;80;86;90
sunshine-ubuntu-20.04-{arch}.deb 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
sunshine-ubuntu-22.04-{arch}.deb 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
=========================================== ============== ============== ================================
AppImage
^^^^^^^^
According to AppImageLint the supported distro matrix of the AppImage is below.

View File

@ -17,11 +17,13 @@ Install Requirements
libavdevice-dev \
libboost-filesystem-dev \
libboost-log-dev \
libboost-thread-dev \
libboost-program-options-dev \
libboost-thread-dev \
libcap-dev \ # KMS
libcurl4-openssl-dev \
libdrm-dev \ # KMS
libevdev-dev \
libmfx-dev \ # x86_64 only
libnuma-dev \
libopus-dev \
libpulse-dev \
@ -41,25 +43,24 @@ Install Requirements
nvidia-cuda-dev \ # Cuda, NvFBC
nvidia-cuda-toolkit # Cuda, NvFBC
Fedora 36
^^^^^^^^^
Fedora 36, 37
^^^^^^^^^^^^^
End of Life: TBD
Install Repositories
.. code-block:: bash
sudo dnf update && \
sudo dnf group install "Development Tools" && \
sudo dnf install https://mirrors.rpmfusion.org/free/fedora/rpmfusion-free-release-$(rpm -E %fedora).noarch.rpm https://mirrors.rpmfusion.org/nonfree/fedora/rpmfusion-nonfree-release-$(rpm -E %fedora).noarch.rpm
Install Requirements
.. code-block:: bash
sudo dnf update && \
sudo dnf group install "Development Tools" && \
sudo dnf install \
boost-devel \
boost-static.x86_64 \
boost-static \
cmake \
gcc \
gcc-c++ \
libcap-devel \
libcurl-devel \
libdrm-devel \
libevdev-devel \
libva-devel \
libvdpau-devel \
@ -67,18 +68,21 @@ Install Requirements
libxcb-devel \ # X11
libXcursor-devel \ # X11
libXfixes-devel \ # X11
libXinerama-devel \ # X11
libXi-devel \ # X11
libXinerama-devel \ # X11
libXrandr-devel \ # X11
libXtst-devel \ # X11
mesa-libGL-devel \
nodejs \
npm \
numactl-devel \
openssl-devel \
opus-devel \
pulseaudio-libs-devel \
rpm-build # if you want to build an RPM binary package
rpm-build \ # if you want to build an RPM binary package
wget \ # necessary for cuda install with `run` file
which \ # necessary for cuda install with `run` file
# libmfx-devel is not listed for fedora, this is for x86_64 only
https://kojipkgs.fedoraproject.org//packages/libmfx/1.25/4.el8/x86_64/libmfx-devel-1.25-4.el8.x86_64.rpm
Ubuntu 20.04
^^^^^^^^^^^^
@ -99,6 +103,7 @@ Install Requirements
libcap-dev \ # KMS
libdrm-dev \ # KMS
libevdev-dev \
libmfx-dev \ # x86_64 only
libnuma-dev \
libopus-dev \
libpulse-dev \
@ -115,18 +120,17 @@ Install Requirements
libxtst-dev \ # X11
nodejs \
npm \
wget
wget # necessary for cuda install with `run` file
Update gcc alias
.. code-block:: bash
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 100 --slave /usr/bin/g++ g++ /usr/bin/g++-10
Install CuDA
.. code-block:: bash
wget https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux.run --progress=bar:force:noscroll -q --show-progress -O ./cuda.run && chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/usr --no-opengl-libs --no-man-page --no-drm && rm ./cuda.run
update-alternatives --install \
/usr/bin/gcc gcc /usr/bin/gcc-10 100 \
--slave /usr/bin/g++ g++ /usr/bin/g++-10 \
--slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
Ubuntu 22.04
^^^^^^^^^^^^
@ -146,6 +150,7 @@ Install Requirements
libcap-dev \ # KMS
libdrm-dev \ # KMS
libevdev-dev \
libmfx-dev \ # x86_64 only
libnuma-dev \
libopus-dev \
libpulse-dev \
@ -160,8 +165,26 @@ Install Requirements
libxtst-dev \ # X11
nodejs \
npm \
nvidia-cuda-dev \ # Cuda, NvFBC
nvidia-cuda-toolkit # Cuda, NvFBC
nvidia-cuda-dev \ # CUDA, NvFBC
nvidia-cuda-toolkit # CUDA, NvFBC
CUDA
----
If the version of CUDA available from your distro is not adequate, manually install CUDA.
.. Tip:: The version of CUDA you use will determine compatibility with various GPU generations.
See `CUDA compatibility <https://docs.nvidia.com/deploy/cuda-compatibility/index.html>`_ for more info.
Select the appropriate run file based on your desired CUDA version and architecture according to
`CUDA Toolkit Archive <https://developer.nvidia.com/cuda-toolkit-archive>`_.
.. code-block:: bash
wget https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux.run \
--progress=bar:force:noscroll -q --show-progress -O ./cuda.run
chmod a+x ./cuda.run
./cuda.run --silent --toolkit --toolkitpath=/usr --no-opengl-libs --no-man-page --no-drm
rm ./cuda.run
npm dependencies
----------------

View File

@ -9,9 +9,11 @@ arch=('x86_64' 'i686')
url=@PROJECT_HOMEPAGE_URL@
license=('GPL3')
depends=('avahi' 'boost-libs' 'curl' 'libevdev' 'libpulse' 'libva' 'libvdpau' 'libx11' 'libxcb' 'libxfixes' 'libxrandr' 'libxtst' 'numactl' 'openssl' 'opus' 'udev')
depends=('avahi' 'boost-libs' 'curl' 'libevdev' 'libmfx' 'libpulse' 'libva' 'libvdpau' 'libx11' 'libxcb' 'libxfixes' 'libxrandr' 'libxtst' 'numactl' 'openssl' 'opus' 'udev')
makedepends=('boost' 'cmake' 'git' 'make' 'nodejs' 'npm')
optdepends=('cuda' 'libcap' 'libdrm')
optdepends=('cuda: NvFBC capture support'
'libcap'
'libdrm')
provides=(@SUNSHINE_AUR_PROVIDES@)
conflicts=(@SUNSHINE_AUR_CONFLICTS@)

View File

@ -40,6 +40,12 @@ modules:
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/b/boost1.74/boost1.74_1.74.0.orig.tar.xz
sha256: 2467be4af625b5ae4b3c93fc7af196a09eba39c11a7338cd9e8b356fa44d2f45
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/b/boost1.74/boost1.74_1.74.0-17ubuntu1.debian.tar.xz
sha256: 22e623d98c84eb3fec57e19ea371157a5bc8225ba4c5907f7e5155072317a31d
- type: shell
commands:
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
- name: avahi
disabled: false
@ -74,6 +80,13 @@ modules:
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/a/avahi/avahi_0.8.orig.tar.gz
sha256: 060309d7a333d38d951bc27598c677af1796934dbd98e1024e7ad8de798fedda
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/a/avahi/avahi_0.8-6ubuntu1.debian.tar.xz
sha256: ebf1dfe5e853b6bc6843e3bd784cb6af632041f305abd0e5415114f80c1dcea4
- type: shell
commands:
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
- autoreconf -ivf
- name: libevdev
disabled: false
@ -87,6 +100,59 @@ modules:
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/libe/libevdev/libevdev_1.13.0+dfsg.orig.tar.xz
sha256: a882e13ef1dd6bd227318080cabf60fe5af3c06471259d3acfc9dbfb202351a7
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/libe/libevdev/libevdev_1.13.0+dfsg-1.debian.tar.xz
sha256: d33c56acbbfff2dc540e45c57a38d92210b5e7fd0947ac47fbe48183468aad74
- type: shell
commands:
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
- name: libdrm
# libdrm-intel is needed for libmfx-dev
disabled: false
buildsystem: meson
only-arches:
- x86_64
sources:
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/libd/libdrm/libdrm_2.4.110.orig.tar.xz
sha256: eecee4c4b47ed6d6ce1a9be3d6d92102548ea35e442282216d47d05293cf9737
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/libd/libdrm/libdrm_2.4.110-1ubuntu1.debian.tar.xz
sha256: 464b9553861f39beddfaee6b8924734b02a0febfae3968e4ca1360f2972bba8b
- type: shell
commands:
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
- name: libmfx-dev
disabled: false
buildsystem: cmake
config-opts:
- -DENABLE_OPENCL=ON
- -DENABLE_X11_DRI3=ON
- -DENABLE_WAYLAND=ON
- -DENABLE_ITT=OFF
- -DENABLE_TEXTLOG=OFF
- -DENABLE_STAT=OFF
- -DBUILD_ALL=OFF
- -DBUILD_RUNTIME=ON
- -DBUILD_SAMPLES=OFF
- -DBUILD_TESTS=OFF
- -DBUILD_TOOLS=OFF
- -DUSE_SYSTEM_GTEST=OFF
- -DMFX_ENABLE_KERNELS=ON
only-arches:
- x86_64
sources:
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/universe/i/intel-mediasdk/intel-mediasdk_22.3.0.orig.tar.gz
sha256: e1e74229f409e969b70c2b35b1955068de3d40db85ecc42bd6ff501468bc76d7
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/universe/i/intel-mediasdk/intel-mediasdk_22.3.0-1.debian.tar.xz
sha256: 024d98d2f63443d2765a90cfe997d104e7b897694889f199ca8fb4d9ffdcf1dc
- type: shell
commands:
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
- name: cuda
disabled: false
@ -100,20 +166,20 @@ modules:
- chmod u+x ./cuda.run
- ./cuda.run --silent --toolkit --toolkitpath=$FLATPAK_DEST/cuda --no-opengl-libs --no-man-page --no-drm
--tmpdir=$FLATPAK_BUILDER_BUILDDIR
- rm -r $FLATPAK_DEST/cuda/nsight-systems-2021.3.2
- rm -r $FLATPAK_DEST/cuda/nsight-systems-*
- rm ./cuda.run
sources:
- type: file
only-arches:
- x86_64
url: https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux.run
sha256: bbd87ca0e913f837454a796367473513cddef555082e4d86ed9a38659cc81f0a
url: https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run
sha256: 9223c4af3aebe4a7bbed9abd9b163b03a1b34b855fbc2b4a0d1b706ac09a5a16
dest-filename: cuda.run
- type: file
only-arches:
- aarch64
url: https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux_sbsa.run # yamllint disable-line rule:line-length
sha256: f2c4a52e06329606c8dfb7c5ea3f4cb4c0b28f9d3fdffeeb734fcc98daf580d8
url: https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux_sbsa.run # yamllint disable-line rule:line-length
sha256: e6e9a8d31163c9776b5e313fd7590877c5684e1ecddee741154f95704d4ed27c
dest-filename: cuda.run
- name: numactl
@ -124,6 +190,12 @@ modules:
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/n/numactl/numactl_2.0.14.orig.tar.gz
sha256: 1ee27abd07ff6ba140aaf9bc6379b37825e54496e01d6f7343330cf1a4487035
- type: archive
url: http://archive.ubuntu.com/ubuntu/pool/main/n/numactl/numactl_2.0.14-3ubuntu2.debian.tar.xz
sha256: 49089e5be5367f6367f8b0389d1d523944432607783b53f0605705792e1015ee
- type: shell
commands:
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
cleanup:
- "/bin"