mirror of
https://github.com/LizardByte/Sunshine.git
synced 2025-03-14 01:27:36 +00:00
ci(tests): add test framework (#1603)
This commit is contained in:
parent
934f81182a
commit
89e8b9628c
@ -4,8 +4,10 @@
|
||||
# do not ignore .git, needed for versioning
|
||||
!/.git
|
||||
|
||||
# do not ignore .rstcheck.cfg, needed to test building docs
|
||||
!/.rstcheck.cfg
|
||||
|
||||
# ignore repo directories and files
|
||||
docs/
|
||||
gh-pages-template/
|
||||
scripts/
|
||||
tools/
|
||||
|
298
.github/workflows/CI.yml
vendored
298
.github/workflows/CI.yml
vendored
@ -169,12 +169,8 @@ jobs:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Checkout Flathub Shared Modules
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
repository: flathub/shared-modules
|
||||
path: build/shared-modules
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup Dependencies Linux Flatpak
|
||||
run: |
|
||||
@ -185,8 +181,10 @@ jobs:
|
||||
cmake \
|
||||
flatpak \
|
||||
qemu-user-static
|
||||
|
||||
sudo su $(whoami) -c "flatpak --user remote-add --if-not-exists flathub \
|
||||
https://flathub.org/repo/flathub.flatpakrepo"
|
||||
|
||||
sudo su $(whoami) -c "flatpak --user install -y flathub \
|
||||
org.flatpak.Builder \
|
||||
org.freedesktop.Platform/${{ matrix.arch }}/${PLATFORM_VERSION} \
|
||||
@ -318,6 +316,7 @@ jobs:
|
||||
sudo rm /root/cuda.run
|
||||
|
||||
- name: Setup Dependencies Linux
|
||||
timeout-minutes: 5
|
||||
run: |
|
||||
# allow newer gcc
|
||||
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
|
||||
@ -353,7 +352,8 @@ jobs:
|
||||
libxcb1-dev \
|
||||
libxfixes-dev \
|
||||
libxrandr-dev \
|
||||
libxtst-dev
|
||||
libxtst-dev \
|
||||
python3
|
||||
|
||||
# clean apt cache
|
||||
sudo apt-get clean
|
||||
@ -368,6 +368,12 @@ jobs:
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
|
||||
|
||||
- name: Setup python
|
||||
id: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Build Linux
|
||||
env:
|
||||
BRANCH: ${{ github.head_ref || github.ref_name }}
|
||||
@ -463,6 +469,50 @@ jobs:
|
||||
name: sunshine-linux-${{ matrix.type }}-${{ matrix.dist }}
|
||||
path: artifacts/
|
||||
|
||||
- name: Install test deps
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y \
|
||||
doxygen \
|
||||
graphviz \
|
||||
python3-venv \
|
||||
x11-xserver-utils \
|
||||
xvfb
|
||||
|
||||
# clean apt cache
|
||||
sudo apt-get clean
|
||||
sudo rm -rf /var/lib/apt/lists/*
|
||||
|
||||
- name: Run tests
|
||||
id: test
|
||||
working-directory: build/tests
|
||||
run: |
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
|
||||
./test_sunshine --gtest_color=yes
|
||||
|
||||
- name: Generate gcov report
|
||||
# any except canceled or skipped
|
||||
if: always() && (steps.test.outcome == 'success' || steps.test.outcome == 'failure')
|
||||
id: test_report
|
||||
working-directory: build
|
||||
run: |
|
||||
${{ steps.python.outputs.python-path }} -m pip install gcovr
|
||||
${{ steps.python.outputs.python-path }} -m gcovr -r .. \
|
||||
--exclude ../tests/ \
|
||||
--exclude ../third-party/ \
|
||||
--xml-pretty \
|
||||
-o coverage.xml
|
||||
|
||||
- name: Upload coverage
|
||||
# any except canceled or skipped
|
||||
if: always() && (steps.test_report.outcome == 'success')
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: ./build/coverage.xml
|
||||
flags: ${{ runner.os }}
|
||||
|
||||
- name: Create/Update GitHub Release
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
@ -605,6 +655,12 @@ jobs:
|
||||
# install dependencies using homebrew
|
||||
brew install cmake
|
||||
|
||||
- name: Setup python
|
||||
id: python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Configure Portfile
|
||||
run: |
|
||||
# variables for Portfile
|
||||
@ -662,6 +718,7 @@ jobs:
|
||||
- name: Build port
|
||||
env:
|
||||
subportlist: ${{ steps.subportlist.outputs.subportlist }}
|
||||
id: build
|
||||
run: |
|
||||
subport="Sunshine"
|
||||
|
||||
@ -683,6 +740,13 @@ jobs:
|
||||
"$subport"
|
||||
echo "::endgroup::"
|
||||
|
||||
- name: Build Logs
|
||||
if: always()
|
||||
run: |
|
||||
logfile="/opt/local/var/macports/logs/_Users_runner_work_Sunshine_Sunshine_ports_multimedia_Sunshine/Sunshine/main.log"
|
||||
cat "$logfile"
|
||||
sudo mv "${logfile}" "${logfile}.bak"
|
||||
|
||||
- name: Upload Artifacts
|
||||
if: ${{ matrix.release }}
|
||||
uses: actions/upload-artifact@v4
|
||||
@ -690,6 +754,86 @@ jobs:
|
||||
name: sunshine-macports
|
||||
path: artifacts/
|
||||
|
||||
- name: Fix screen capture permissions
|
||||
if: ${{ matrix.os_version != 12 }} # macOS-12 is okay
|
||||
# can be removed if the following is fixed in the runner image
|
||||
# https://github.com/actions/runner-images/issues/9529
|
||||
# https://github.com/actions/runner-images/pull/9530
|
||||
run: |
|
||||
# https://apple.stackexchange.com/questions/362865/macos-list-apps-authorized-for-full-disk-access
|
||||
|
||||
# permissions for screen capture
|
||||
values="'kTCCServiceScreenCapture','/opt/off/opt/runner/provisioner/provisioner',1,2,4,1,NULL,NULL,0,'UNUSED',NULL,0,1687786159"
|
||||
if [[ "${{ matrix.os_version }}" == "14" ]]; then
|
||||
# TCC access table in Sonoma has extra 4 columns: pid, pid_version, boot_uuid, last_reminded
|
||||
values="${values},NULL,NULL,'UNUSED',${values##*,}"
|
||||
fi
|
||||
|
||||
# system and user databases
|
||||
dbPaths=(
|
||||
"/Library/Application Support/com.apple.TCC/TCC.db"
|
||||
"$HOME/Library/Application Support/com.apple.TCC/TCC.db"
|
||||
)
|
||||
|
||||
sqlQuery="INSERT OR IGNORE INTO access VALUES($values);"
|
||||
|
||||
for dbPath in "${dbPaths[@]}"; do
|
||||
echo "Column names for $dbPath"
|
||||
echo "-------------------"
|
||||
sudo sqlite3 "$dbPath" "PRAGMA table_info(access);"
|
||||
echo "Current permissions for $dbPath"
|
||||
echo "-------------------"
|
||||
sudo sqlite3 "$dbPath" "SELECT * FROM access WHERE service='kTCCServiceScreenCapture';"
|
||||
sudo sqlite3 "$dbPath" "$sqlQuery"
|
||||
echo "Updated permissions for $dbPath"
|
||||
echo "-------------------"
|
||||
sudo sqlite3 "$dbPath" "SELECT * FROM access WHERE service='kTCCServiceScreenCapture';"
|
||||
done
|
||||
|
||||
- name: Run tests
|
||||
if: always()
|
||||
id: test
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo port test "Sunshine"
|
||||
|
||||
- name: Test Logs
|
||||
if: always()
|
||||
run: |
|
||||
logfile="/opt/local/var/macports/logs/_Users_runner_work_Sunshine_Sunshine_ports_multimedia_Sunshine/Sunshine/main.log"
|
||||
cat "$logfile"
|
||||
|
||||
- name: Generate gcov report
|
||||
# any except canceled or skipped
|
||||
if: always() && (steps.test.outcome == 'success' || steps.test.outcome == 'failure')
|
||||
id: test_report
|
||||
working-directory:
|
||||
/opt/local/var/macports/build/_Users_runner_work_Sunshine_Sunshine_ports_multimedia_Sunshine/Sunshine/work
|
||||
run: |
|
||||
base_dir=$(pwd)
|
||||
build_dir=${base_dir}/build
|
||||
|
||||
# get the directory name that starts with Sunshine-*
|
||||
dir=$(ls -d Sunshine-*)
|
||||
|
||||
cd ${build_dir}
|
||||
${{ steps.python.outputs.python-path }} -m pip install gcovr
|
||||
sudo ${{ steps.python.outputs.python-path }} -m gcovr -r ../${dir} \
|
||||
--exclude ../${dir}/tests/ \
|
||||
--exclude ../${dir}/third-party/ \
|
||||
--gcov-object-directory $(pwd) \
|
||||
--verbose \
|
||||
--xml-pretty \
|
||||
-o ${{ github.workspace }}/build/coverage.xml
|
||||
|
||||
- name: Upload coverage
|
||||
# any except canceled or skipped
|
||||
if: always() && (steps.test_report.outcome == 'success')
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: ./build/coverage.xml
|
||||
flags: ${{ runner.os }}-${{ matrix.os_version }}
|
||||
|
||||
- name: Create/Update GitHub Release
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' && matrix.release }}
|
||||
uses: ncipollo/release-action@v1
|
||||
@ -715,6 +859,97 @@ jobs:
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Prepare tests
|
||||
id: prepare-tests
|
||||
if: false # todo: DirectX11 is not available, so even software encoder fails
|
||||
run: |
|
||||
# function to download and extract a zip file
|
||||
function DownloadAndExtract {
|
||||
param (
|
||||
[string]$Uri,
|
||||
[string]$OutFile
|
||||
)
|
||||
|
||||
$maxRetries = 5
|
||||
$retryCount = 0
|
||||
$success = $false
|
||||
|
||||
while (-not $success -and $retryCount -lt $maxRetries) {
|
||||
$retryCount++
|
||||
Write-Host "Downloading $Uri to $OutFile, attempt $retryCount of $maxRetries"
|
||||
try {
|
||||
Invoke-WebRequest -Uri $Uri -OutFile $OutFile
|
||||
$success = $true
|
||||
} catch {
|
||||
Write-Host "Attempt $retryCount of $maxRetries failed with error: $($_.Exception.Message). Retrying..."
|
||||
Start-Sleep -Seconds 5
|
||||
}
|
||||
}
|
||||
|
||||
if (-not $success) {
|
||||
Write-Host "Failed to download the file after $maxRetries attempts."
|
||||
exit 1
|
||||
}
|
||||
|
||||
# use .NET to get the base name of the file
|
||||
$baseName = (Get-Item $OutFile).BaseName
|
||||
|
||||
# Extract the zip file
|
||||
Expand-Archive -Path $OutFile -DestinationPath $baseName
|
||||
}
|
||||
|
||||
# virtual display driver
|
||||
DownloadAndExtract `
|
||||
-Uri "https://www.amyuni.com/downloads/usbmmidd_v2.zip" `
|
||||
-OutFile "usbmmidd_v2.zip"
|
||||
|
||||
# install
|
||||
Set-Location -Path usbmmidd_v2/usbmmidd_v2
|
||||
./deviceinstaller64 install usbmmidd.inf usbmmidd
|
||||
|
||||
# create the virtual display
|
||||
./deviceinstaller64 enableidd 1
|
||||
|
||||
# install devcon
|
||||
choco install devcon.portable
|
||||
|
||||
# disable Hyper-V Video
|
||||
# https://stackoverflow.com/a/59490940
|
||||
C:\ProgramData\chocolatey\lib\devcon.portable\devcon64.exe `
|
||||
disable "VMBUS\{da0a7802-e377-4aac-8e77-0558eb1073f8}"
|
||||
|
||||
# move up a directory
|
||||
Set-Location -Path ../..
|
||||
|
||||
# multi monitor tool
|
||||
DownloadAndExtract `
|
||||
-Uri "http://www.nirsoft.net/utils/multimonitortool-x64.zip" `
|
||||
-OutFile "multimonitortool.zip"
|
||||
|
||||
# enable the virtual display
|
||||
# http://www.nirsoft.net/utils/multi_monitor_tool.html
|
||||
Set-Location -Path multimonitortool
|
||||
|
||||
# Original Hyper-V is \\.\DISPLAY1, it will recreate itself as \\.\DISPLAY6 (or something higher than 2)
|
||||
# USB Mobile Monitor Virtual Display is \\.\DISPLAY2
|
||||
|
||||
# these don't seem to work if not using runAs
|
||||
# todo: do they work if not using runAs?
|
||||
Start-Process powershell -Verb runAs -ArgumentList '-Command ./MultiMonitorTool.exe /enable \\.\DISPLAY2'
|
||||
Start-Process powershell -Verb runAs -ArgumentList '-Command ./MultiMonitorTool.exe /SetPrimary \\.\DISPLAY2'
|
||||
|
||||
# wait a few seconds
|
||||
Start-Sleep -s 5
|
||||
|
||||
# list monitors
|
||||
./MultiMonitorTool.exe /stext monitor_list.txt
|
||||
|
||||
# wait a few seconds
|
||||
Start-Sleep -s 5
|
||||
|
||||
# print the monitor list
|
||||
Get-Content -Path monitor_list.txt
|
||||
|
||||
- name: Setup Dependencies Windows
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
@ -722,12 +957,14 @@ jobs:
|
||||
install: >-
|
||||
base-devel
|
||||
diffutils
|
||||
doxygen
|
||||
git
|
||||
make
|
||||
mingw-w64-x86_64-binutils
|
||||
mingw-w64-x86_64-boost
|
||||
mingw-w64-x86_64-cmake
|
||||
mingw-w64-x86_64-curl
|
||||
mingw-w64-x86_64-graphviz
|
||||
mingw-w64-x86_64-miniupnpc
|
||||
mingw-w64-x86_64-nlohmann-json
|
||||
mingw-w64-x86_64-nodejs
|
||||
@ -740,6 +977,24 @@ jobs:
|
||||
wget
|
||||
yasm
|
||||
|
||||
- name: Setup python
|
||||
# use this instead of msys2 python due to known issues using wheels, https://www.msys2.org/docs/python/
|
||||
id: setup-python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Python Path
|
||||
id: python-path
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
# replace backslashes with double backslashes
|
||||
python_path=$(echo "${{ steps.setup-python.outputs.python-path }}" | sed 's/\\/\\\\/g')
|
||||
|
||||
# step output
|
||||
echo "python-path=${python_path}"
|
||||
echo "python-path=${python_path}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Build Windows
|
||||
shell: msys2 {0}
|
||||
env:
|
||||
@ -753,6 +1008,8 @@ jobs:
|
||||
-DBUILD_WERROR=ON \
|
||||
-DCMAKE_BUILD_TYPE=RelWithDebInfo \
|
||||
-DSUNSHINE_ASSETS_DIR=assets \
|
||||
-DTESTS_PYTHON_EXECUTABLE='${{ steps.python-path.outputs.python-path }}' \
|
||||
-DTESTS_SOFTWARE_ENCODER_UNAVAILABLE='skip' \
|
||||
-G "MinGW Makefiles" \
|
||||
..
|
||||
mingw32-make -j$(nproc)
|
||||
@ -771,6 +1028,35 @@ jobs:
|
||||
mv ./cpack_artifacts/Sunshine.exe ../artifacts/sunshine-windows-installer.exe
|
||||
mv ./cpack_artifacts/Sunshine.zip ../artifacts/sunshine-windows-portable.zip
|
||||
|
||||
- name: Run tests
|
||||
id: test
|
||||
shell: msys2 {0}
|
||||
working-directory: build/tests
|
||||
run: |
|
||||
./test_sunshine.exe --gtest_color=yes
|
||||
|
||||
- name: Generate gcov report
|
||||
# any except canceled or skipped
|
||||
if: always() && (steps.test.outcome == 'success' || steps.test.outcome == 'failure')
|
||||
id: test_report
|
||||
shell: msys2 {0}
|
||||
working-directory: build
|
||||
run: |
|
||||
${{ steps.python-path.outputs.python-path }} -m pip install gcovr
|
||||
${{ steps.python-path.outputs.python-path }} -m gcovr -r .. \
|
||||
--exclude ../tests/ \
|
||||
--exclude ../third-party/ \
|
||||
--xml-pretty \
|
||||
-o coverage.xml
|
||||
|
||||
- name: Upload coverage
|
||||
# any except canceled or skipped
|
||||
if: always() && (steps.test_report.outcome == 'success')
|
||||
uses: codecov/codecov-action@v3
|
||||
with:
|
||||
files: ./build/coverage.xml
|
||||
flags: ${{ runner.os }}
|
||||
|
||||
- name: Package Windows Debug Info
|
||||
working-directory: build
|
||||
run: |
|
||||
|
12
.gitmodules
vendored
12
.gitmodules
vendored
@ -1,7 +1,19 @@
|
||||
[submodule "packaging/linux/flatpak/deps/org.flatpak.Builder.BaseApp"]
|
||||
path = packaging/linux/flatpak/deps/org.flatpak.Builder.BaseApp
|
||||
url = https://github.com/flathub/org.flatpak.Builder.BaseApp
|
||||
branch = branch/23.08
|
||||
[submodule "packaging/linux/flatpak/deps/shared-modules"]
|
||||
path = packaging/linux/flatpak/deps/shared-modules
|
||||
url = https://github.com/flathub/shared-modules
|
||||
branch = master
|
||||
[submodule "third-party/build-deps"]
|
||||
path = third-party/build-deps
|
||||
url = https://github.com/LizardByte/build-deps.git
|
||||
branch = dist
|
||||
[submodule "third-party/googletest"]
|
||||
path = third-party/googletest
|
||||
url = https://github.com/google/googletest/
|
||||
branch = v1.14.x
|
||||
[submodule "third-party/moonlight-common-c"]
|
||||
path = third-party/moonlight-common-c
|
||||
url = https://github.com/moonlight-stream/moonlight-common-c.git
|
||||
|
@ -1,5 +1,6 @@
|
||||
cmake_minimum_required(VERSION 3.18)
|
||||
# `CMAKE_CUDA_ARCHITECTURES` requires 3.18
|
||||
# set_source_files_properties requires 3.18
|
||||
# todo - set this conditionally
|
||||
|
||||
# todo - set version to 0.0.0 once confident in automated versioning
|
||||
|
@ -96,6 +96,10 @@ Integrations
|
||||
:alt: Read the Docs
|
||||
:target: http://sunshinestream.readthedocs.io/
|
||||
|
||||
.. image:: https://img.shields.io/codecov/c/gh/LizardByte/Sunshine?token=SMGXQ5NVMJ&style=for-the-badge&logo=codecov&label=codecov
|
||||
:alt: Codecov
|
||||
:target: https://codecov.io/gh/LizardByte/Sunshine
|
||||
|
||||
Support
|
||||
-------
|
||||
|
||||
|
@ -110,11 +110,6 @@ set(SUNSHINE_TARGET_FILES
|
||||
"${CMAKE_SOURCE_DIR}/src/stat_trackers.cpp"
|
||||
${PLATFORM_TARGET_FILES})
|
||||
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/src/upnp.cpp" PROPERTIES COMPILE_FLAGS -Wno-pedantic)
|
||||
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/nanors/rs.c"
|
||||
PROPERTIES COMPILE_FLAGS "-include deps/obl/autoshim.h -ftree-vectorize")
|
||||
|
||||
if(NOT SUNSHINE_ASSETS_DIR_DEF)
|
||||
set(SUNSHINE_ASSETS_DIR_DEF "${SUNSHINE_ASSETS_DIR}")
|
||||
endif()
|
||||
@ -134,15 +129,6 @@ include_directories(
|
||||
${PLATFORM_INCLUDE_DIRS}
|
||||
)
|
||||
|
||||
string(TOUPPER "x${CMAKE_BUILD_TYPE}" BUILD_TYPE)
|
||||
if("${BUILD_TYPE}" STREQUAL "XDEBUG")
|
||||
if(WIN32)
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/src/nvhttp.cpp" PROPERTIES COMPILE_FLAGS -O2)
|
||||
endif()
|
||||
else()
|
||||
add_definitions(-DNDEBUG)
|
||||
endif()
|
||||
|
||||
list(APPEND SUNSHINE_EXTERNAL_LIBRARIES
|
||||
${MINIUPNP_LIBRARIES}
|
||||
${CMAKE_THREAD_LIBS_INIT}
|
||||
|
@ -29,16 +29,6 @@ file(GLOB NVPREFS_FILES CONFIGURE_DEPENDS
|
||||
|
||||
# vigem
|
||||
include_directories(SYSTEM "${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/include")
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/src/ViGEmClient.cpp"
|
||||
PROPERTIES COMPILE_DEFINITIONS "UNICODE=1;ERROR_INVALID_DEVICE_OBJECT_PARAMETER=650")
|
||||
set(VIGEM_COMPILE_FLAGS "")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unknown-pragmas ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-misleading-indentation ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-class-memaccess ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-function ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-variable ")
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/src/ViGEmClient.cpp"
|
||||
PROPERTIES COMPILE_FLAGS ${VIGEM_COMPILE_FLAGS})
|
||||
|
||||
# sunshine icon
|
||||
if(NOT DEFINED SUNSHINE_ICON_PATH)
|
||||
|
@ -1,3 +1,10 @@
|
||||
option(BUILD_TESTS "Build tests" ON)
|
||||
option(TESTS_ENABLE_PYTHON_TESTS "Enable Python tests" ON)
|
||||
|
||||
# DirectX11 is not available in GitHub runners, so even software encoding fails
|
||||
set(TESTS_SOFTWARE_ENCODER_UNAVAILABLE "fail"
|
||||
CACHE STRING "How to handle unavailable software encoders in tests. 'fail/skip'")
|
||||
|
||||
option(BUILD_WERROR "Enable -Werror flag." OFF)
|
||||
|
||||
# if this option is set, the build will exit after configuring special package configuration files
|
||||
|
@ -35,6 +35,7 @@ elseif (UNIX)
|
||||
# configure the flatpak manifest
|
||||
if(${SUNSHINE_CONFIGURE_FLATPAK_MAN})
|
||||
configure_file(packaging/linux/flatpak/dev.lizardbyte.sunshine.yml dev.lizardbyte.sunshine.yml @ONLY)
|
||||
file(COPY packaging/linux/flatpak/deps/ DESTINATION ${CMAKE_BINARY_DIR})
|
||||
endif()
|
||||
endif()
|
||||
|
||||
|
@ -53,3 +53,45 @@ add_custom_target(web-ui ALL
|
||||
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
|
||||
COMMENT "Installing NPM Dependencies and Building the Web UI"
|
||||
COMMAND bash -c \"npm install && SUNSHINE_BUILD_HOMEBREW=${NPM_BUILD_HOMEBREW} SUNSHINE_SOURCE_ASSETS_DIR=${NPM_SOURCE_ASSETS_DIR} SUNSHINE_ASSETS_DIR=${NPM_ASSETS_DIR} npm run build\") # cmake-lint: disable=C0301
|
||||
|
||||
# tests
|
||||
if(BUILD_TESTS)
|
||||
add_subdirectory(tests)
|
||||
endif()
|
||||
|
||||
# custom compile flags, must be after adding tests
|
||||
|
||||
# src/upnp
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/src/upnp.cpp"
|
||||
DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests"
|
||||
PROPERTIES COMPILE_FLAGS -Wno-pedantic)
|
||||
|
||||
# third-party/nanors
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/nanors/rs.c"
|
||||
DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests"
|
||||
PROPERTIES COMPILE_FLAGS "-include deps/obl/autoshim.h -ftree-vectorize")
|
||||
|
||||
# third-party/ViGEmClient
|
||||
set(VIGEM_COMPILE_FLAGS "")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unknown-pragmas ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-misleading-indentation ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-class-memaccess ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-function ")
|
||||
string(APPEND VIGEM_COMPILE_FLAGS "-Wno-unused-variable ")
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/third-party/ViGEmClient/src/ViGEmClient.cpp"
|
||||
DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests"
|
||||
PROPERTIES
|
||||
COMPILE_DEFINITIONS "UNICODE=1;ERROR_INVALID_DEVICE_OBJECT_PARAMETER=650"
|
||||
COMPILE_FLAGS ${VIGEM_COMPILE_FLAGS})
|
||||
|
||||
# src/nvhttp
|
||||
string(TOUPPER "x${CMAKE_BUILD_TYPE}" BUILD_TYPE)
|
||||
if("${BUILD_TYPE}" STREQUAL "XDEBUG")
|
||||
if(WIN32)
|
||||
set_source_files_properties("${CMAKE_SOURCE_DIR}/src/nvhttp.cpp"
|
||||
DIRECTORY "${CMAKE_SOURCE_DIR}" "${CMAKE_SOURCE_DIR}/tests"
|
||||
PROPERTIES COMPILE_FLAGS -O2)
|
||||
endif()
|
||||
else()
|
||||
add_definitions(-DNDEBUG)
|
||||
endif()
|
||||
|
15
codecov.yml
Normal file
15
codecov.yml
Normal file
@ -0,0 +1,15 @@
|
||||
---
|
||||
codecov:
|
||||
branch: master
|
||||
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: auto
|
||||
threshold: 10%
|
||||
|
||||
comment:
|
||||
layout: "diff, flags, files"
|
||||
behavior: default
|
||||
require_changes: false # if true: only post the comment if coverage changes
|
@ -43,7 +43,8 @@ pacman -Syu --disable-download-timeout --needed --noconfirm \
|
||||
cmake \
|
||||
cuda \
|
||||
git \
|
||||
namcap
|
||||
namcap \
|
||||
xorg-server-xvfb
|
||||
_DEPS
|
||||
|
||||
# Setup builder user
|
||||
@ -84,6 +85,8 @@ RUN mv /build/sunshine/build/sunshine.install .
|
||||
RUN <<_PKGBUILD
|
||||
#!/bin/bash
|
||||
set -e
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
namcap -i PKGBUILD
|
||||
makepkg -si --noconfirm
|
||||
rm -f /build/sunshine/pkg/sunshine-debug*.pkg.tar.zst
|
||||
|
@ -14,6 +14,8 @@ FROM toolchain-base as toolchain
|
||||
ARG TARGETPLATFORM
|
||||
RUN echo "target_platform: ${TARGETPLATFORM}"
|
||||
|
||||
ENV DISPLAY=:0
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# install dependencies
|
||||
RUN <<_DEPS
|
||||
@ -24,10 +26,12 @@ apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
cmake=3.22.* \
|
||||
ca-certificates \
|
||||
doxygen \
|
||||
gcc=4:11.2.* \
|
||||
g++=4:11.2.* \
|
||||
gdb \
|
||||
git \
|
||||
graphviz \
|
||||
libayatana-appindicator3-dev \
|
||||
libavdevice-dev \
|
||||
libboost-filesystem-dev=1.74.* \
|
||||
@ -54,8 +58,12 @@ apt-get install -y --no-install-recommends \
|
||||
libxfixes-dev \
|
||||
libxrandr-dev \
|
||||
libxtst-dev \
|
||||
python3.10 \
|
||||
python3.10-venv \
|
||||
udev \
|
||||
wget
|
||||
wget \
|
||||
x11-xserver-utils \
|
||||
xvfb
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev
|
||||
@ -98,3 +106,28 @@ chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/usr/local --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
_INSTALL_CUDA
|
||||
|
||||
WORKDIR /
|
||||
# Write a shell script that starts Xvfb and then runs a shell
|
||||
RUN <<_ENTRYPOINT
|
||||
#!/bin/bash
|
||||
set -e
|
||||
cat <<EOF > /entrypoint.sh
|
||||
#!/bin/bash
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
if [ "\$#" -eq 0 ]; then
|
||||
exec "/bin/bash"
|
||||
else
|
||||
exec "\$@"
|
||||
fi
|
||||
EOF
|
||||
_ENTRYPOINT
|
||||
|
||||
# Make the script executable
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
# Note about CLion
|
||||
RUN echo "ATTENTION: CLion will override the entrypoint, you can disable this in the toolchain settings"
|
||||
|
||||
# Use the shell script as the entrypoint
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
@ -32,7 +32,9 @@ apt-get update -y
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
cmake=3.25.* \
|
||||
doxygen \
|
||||
git \
|
||||
graphviz \
|
||||
libavdevice-dev \
|
||||
libayatana-appindicator3-dev \
|
||||
libboost-filesystem-dev=1.74.* \
|
||||
@ -61,8 +63,12 @@ apt-get install -y --no-install-recommends \
|
||||
libxtst-dev \
|
||||
nodejs \
|
||||
npm \
|
||||
python3.11 \
|
||||
python3.11-venv \
|
||||
udev \
|
||||
wget
|
||||
wget \
|
||||
x11-xserver-utils \
|
||||
xvfb
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev
|
||||
@ -120,6 +126,17 @@ make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
# run tests
|
||||
WORKDIR /build/sunshine/build/tests
|
||||
# hadolint ignore=SC1091
|
||||
RUN <<_TEST
|
||||
#!/bin/bash
|
||||
set -e
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
./test_sunshine --gtest_color=yes
|
||||
_TEST
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
|
@ -33,7 +33,9 @@ apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
cmake=3.18.* \
|
||||
doxygen \
|
||||
git \
|
||||
graphviz \
|
||||
libavdevice-dev \
|
||||
libayatana-appindicator3-dev \
|
||||
libboost-filesystem-dev=1.74.* \
|
||||
@ -60,8 +62,12 @@ apt-get install -y --no-install-recommends \
|
||||
libxfixes-dev \
|
||||
libxrandr-dev \
|
||||
libxtst-dev \
|
||||
python3.9 \
|
||||
python3.9-venv \
|
||||
udev \
|
||||
wget
|
||||
wget \
|
||||
x11-xserver-utils \
|
||||
xvfb
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev
|
||||
@ -134,6 +140,17 @@ make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
# run tests
|
||||
WORKDIR /build/sunshine/build/tests
|
||||
# hadolint ignore=SC1091
|
||||
RUN <<_TEST
|
||||
#!/bin/bash
|
||||
set -e
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
./test_sunshine --gtest_color=yes
|
||||
_TEST
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
|
@ -32,9 +32,11 @@ dnf -y group install "Development Tools"
|
||||
dnf -y install \
|
||||
boost-devel-1.78.0* \
|
||||
cmake-3.27.* \
|
||||
doxygen \
|
||||
gcc-13.2.* \
|
||||
gcc-c++-13.2.* \
|
||||
git \
|
||||
graphviz \
|
||||
libappindicator-gtk3-devel \
|
||||
libcap-devel \
|
||||
libcurl-devel \
|
||||
@ -58,9 +60,11 @@ dnf -y install \
|
||||
openssl-devel \
|
||||
opus-devel \
|
||||
pulseaudio-libs-devel \
|
||||
python3.10 \
|
||||
rpm-build \
|
||||
wget \
|
||||
which
|
||||
which \
|
||||
xorg-x11-server-Xvfb
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
dnf -y install intel-mediasdk-devel
|
||||
fi
|
||||
@ -117,6 +121,17 @@ make -j "$(nproc)"
|
||||
cpack -G RPM
|
||||
_MAKE
|
||||
|
||||
# run tests
|
||||
WORKDIR /build/sunshine/build/tests
|
||||
# hadolint ignore=SC1091
|
||||
RUN <<_TEST
|
||||
#!/bin/bash
|
||||
set -e
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
./test_sunshine --gtest_color=yes
|
||||
_TEST
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
|
@ -32,9 +32,11 @@ dnf -y group install "Development Tools"
|
||||
dnf -y install \
|
||||
boost-devel-1.81.0* \
|
||||
cmake-3.27.* \
|
||||
doxygen \
|
||||
gcc-13.2.* \
|
||||
gcc-c++-13.2.* \
|
||||
git \
|
||||
graphviz \
|
||||
libappindicator-gtk3-devel \
|
||||
libcap-devel \
|
||||
libcurl-devel \
|
||||
@ -58,9 +60,11 @@ dnf -y install \
|
||||
openssl-devel \
|
||||
opus-devel \
|
||||
pulseaudio-libs-devel \
|
||||
python3.11 \
|
||||
rpm-build \
|
||||
wget \
|
||||
which
|
||||
which \
|
||||
xorg-x11-server-Xvfb
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
dnf -y install intel-mediasdk-devel
|
||||
fi
|
||||
@ -124,6 +128,17 @@ make -j "$(nproc)"
|
||||
cpack -G RPM
|
||||
_MAKE
|
||||
|
||||
# run tests
|
||||
WORKDIR /build/sunshine/build/tests
|
||||
# hadolint ignore=SC1091
|
||||
RUN <<_TEST
|
||||
#!/bin/bash
|
||||
set -e
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
./test_sunshine --gtest_color=yes
|
||||
_TEST
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
|
@ -32,9 +32,11 @@ apt-get update -y
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
ca-certificates \
|
||||
doxygen \
|
||||
gcc-10=10.5.* \
|
||||
g++-10=10.5.* \
|
||||
git \
|
||||
graphviz \
|
||||
libayatana-appindicator3-dev \
|
||||
libavdevice-dev \
|
||||
libboost-filesystem-dev=1.71.* \
|
||||
@ -61,8 +63,12 @@ apt-get install -y --no-install-recommends \
|
||||
libxfixes-dev \
|
||||
libxrandr-dev \
|
||||
libxtst-dev \
|
||||
python3.9 \
|
||||
python3.9-venv \
|
||||
udev \
|
||||
wget
|
||||
wget \
|
||||
x11-xserver-utils \
|
||||
xvfb
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev
|
||||
@ -170,6 +176,17 @@ make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
# run tests
|
||||
WORKDIR /build/sunshine/build/tests
|
||||
# hadolint ignore=SC1091
|
||||
RUN <<_TEST
|
||||
#!/bin/bash
|
||||
set -e
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
./test_sunshine --gtest_color=yes
|
||||
_TEST
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
|
@ -33,7 +33,9 @@ apt-get install -y --no-install-recommends \
|
||||
build-essential \
|
||||
cmake=3.22.* \
|
||||
ca-certificates \
|
||||
doxygen \
|
||||
git \
|
||||
graphviz \
|
||||
libayatana-appindicator3-dev \
|
||||
libavdevice-dev \
|
||||
libboost-filesystem-dev=1.74.* \
|
||||
@ -60,8 +62,12 @@ apt-get install -y --no-install-recommends \
|
||||
libxfixes-dev \
|
||||
libxrandr-dev \
|
||||
libxtst-dev \
|
||||
python3.10 \
|
||||
python3.10-venv \
|
||||
udev \
|
||||
wget
|
||||
wget \
|
||||
x11-xserver-utils \
|
||||
xvfb
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev
|
||||
@ -135,6 +141,17 @@ make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
# run tests
|
||||
WORKDIR /build/sunshine/build/tests
|
||||
# hadolint ignore=SC1091
|
||||
RUN <<_TEST
|
||||
#!/bin/bash
|
||||
set -e
|
||||
export DISPLAY=:1
|
||||
Xvfb ${DISPLAY} -screen 0 1024x768x24 &
|
||||
./test_sunshine --gtest_color=yes
|
||||
_TEST
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
|
247
docs/Doxyfile
247
docs/Doxyfile
@ -1,4 +1,4 @@
|
||||
# Doxyfile 1.9.6
|
||||
# Doxyfile 1.10.0
|
||||
|
||||
# This file describes the settings to be used by the documentation system
|
||||
# doxygen (www.doxygen.org) for a project.
|
||||
@ -42,7 +42,7 @@ DOXYFILE_ENCODING = UTF-8
|
||||
# title of most generated pages and in a few other places.
|
||||
# The default value is: My Project.
|
||||
|
||||
PROJECT_NAME = "Sunshine"
|
||||
PROJECT_NAME = Sunshine
|
||||
|
||||
# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
|
||||
# could be handy for archiving the generated documentation or if some version
|
||||
@ -63,6 +63,12 @@ PROJECT_BRIEF = "Sunshine is a Gamestream host for Moonlight."
|
||||
|
||||
PROJECT_LOGO = ../sunshine.png
|
||||
|
||||
# With the PROJECT_ICON tag one can specify an icon that is included in the tabs
|
||||
# when the HTML document is shown. Doxygen will copy the logo to the output
|
||||
# directory.
|
||||
|
||||
PROJECT_ICON =
|
||||
|
||||
# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
|
||||
# into which the generated documentation will be written. If a relative path is
|
||||
# entered, it will be relative to the location where doxygen was started. If
|
||||
@ -365,6 +371,17 @@ MARKDOWN_SUPPORT = YES
|
||||
|
||||
TOC_INCLUDE_HEADINGS = 5
|
||||
|
||||
# The MARKDOWN_ID_STYLE tag can be used to specify the algorithm used to
|
||||
# generate identifiers for the Markdown headings. Note: Every identifier is
|
||||
# unique.
|
||||
# Possible values are: DOXYGEN use a fixed 'autotoc_md' string followed by a
|
||||
# sequence number starting at 0 and GITHUB use the lower case version of title
|
||||
# with any whitespace replaced by '-' and punctuation characters removed.
|
||||
# The default value is: DOXYGEN.
|
||||
# This tag requires that the tag MARKDOWN_SUPPORT is set to YES.
|
||||
|
||||
MARKDOWN_ID_STYLE = DOXYGEN
|
||||
|
||||
# When enabled doxygen tries to link words that correspond to documented
|
||||
# classes, or namespaces to their corresponding documentation. Such a link can
|
||||
# be prevented in individual cases by putting a % sign in front of the word or
|
||||
@ -489,6 +506,14 @@ LOOKUP_CACHE_SIZE = 0
|
||||
|
||||
NUM_PROC_THREADS = 0
|
||||
|
||||
# If the TIMESTAMP tag is set different from NO then each generated page will
|
||||
# contain the date or date and time when the page was generated. Setting this to
|
||||
# NO can help when comparing the output of multiple runs.
|
||||
# Possible values are: YES, NO, DATETIME and DATE.
|
||||
# The default value is: NO.
|
||||
|
||||
TIMESTAMP = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Build related configuration options
|
||||
#---------------------------------------------------------------------------
|
||||
@ -874,7 +899,14 @@ WARN_IF_UNDOC_ENUM_VAL = NO
|
||||
# a warning is encountered. If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS
|
||||
# then doxygen will continue running as if WARN_AS_ERROR tag is set to NO, but
|
||||
# at the end of the doxygen process doxygen will return with a non-zero status.
|
||||
# Possible values are: NO, YES and FAIL_ON_WARNINGS.
|
||||
# If the WARN_AS_ERROR tag is set to FAIL_ON_WARNINGS_PRINT then doxygen behaves
|
||||
# like FAIL_ON_WARNINGS but in case no WARN_LOGFILE is defined doxygen will not
|
||||
# write the warning messages in between other messages but write them at the end
|
||||
# of a run, in case a WARN_LOGFILE is defined the warning messages will be
|
||||
# besides being in the defined file also be shown at the end of a run, unless
|
||||
# the WARN_LOGFILE is defined as - i.e. standard output (stdout) in that case
|
||||
# the behavior will remain as with the setting FAIL_ON_WARNINGS.
|
||||
# Possible values are: NO, YES, FAIL_ON_WARNINGS and FAIL_ON_WARNINGS_PRINT.
|
||||
# The default value is: NO.
|
||||
|
||||
WARN_AS_ERROR = NO
|
||||
@ -953,12 +985,12 @@ INPUT_FILE_ENCODING =
|
||||
# Note the list of default checked file patterns might differ from the list of
|
||||
# default file extension mappings.
|
||||
#
|
||||
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cpp,
|
||||
# *.c++, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h,
|
||||
# *.hh, *.hxx, *.hpp, *.h++, *.l, *.cs, *.d, *.php, *.php4, *.php5, *.phtml,
|
||||
# *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to be provided as doxygen C
|
||||
# comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08, *.f18, *.f, *.for, *.vhd,
|
||||
# *.vhdl, *.ucf, *.qsf and *.ice.
|
||||
# If left blank the following patterns are tested:*.c, *.cc, *.cxx, *.cxxm,
|
||||
# *.cpp, *.cppm, *.ccm, *.c++, *.c++m, *.java, *.ii, *.ixx, *.ipp, *.i++, *.inl,
|
||||
# *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, *.h++, *.ixx, *.l, *.cs, *.d,
|
||||
# *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, *.md, *.mm, *.dox (to
|
||||
# be provided as doxygen C comment), *.py, *.pyw, *.f90, *.f95, *.f03, *.f08,
|
||||
# *.f18, *.f, *.for, *.vhd, *.vhdl, *.ucf, *.qsf and *.ice.
|
||||
|
||||
FILE_PATTERNS = *.c \
|
||||
*.cc \
|
||||
@ -1043,9 +1075,6 @@ EXCLUDE_PATTERNS =
|
||||
# output. The symbol name can be a fully qualified name, a word, or if the
|
||||
# wildcard * is used, a substring. Examples: ANamespace, AClass,
|
||||
# ANamespace::AClass, ANamespace::*Test
|
||||
#
|
||||
# Note that the wildcards are matched against the file with absolute path, so to
|
||||
# exclude all test directories use the pattern */test/*
|
||||
|
||||
EXCLUDE_SYMBOLS =
|
||||
|
||||
@ -1159,7 +1188,8 @@ FORTRAN_COMMENT_AFTER = 72
|
||||
SOURCE_BROWSER = NO
|
||||
|
||||
# Setting the INLINE_SOURCES tag to YES will include the body of functions,
|
||||
# classes and enums directly into the documentation.
|
||||
# multi-line macros, enums or list initialized variables directly into the
|
||||
# documentation.
|
||||
# The default value is: NO.
|
||||
|
||||
INLINE_SOURCES = NO
|
||||
@ -1428,15 +1458,6 @@ HTML_COLORSTYLE_SAT = 100
|
||||
|
||||
HTML_COLORSTYLE_GAMMA = 80
|
||||
|
||||
# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
|
||||
# page will contain the date and time when the page was generated. Setting this
|
||||
# to YES can help to show when doxygen was last run and thus if the
|
||||
# documentation is up to date.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_TIMESTAMP = NO
|
||||
|
||||
# If the HTML_DYNAMIC_MENUS tag is set to YES then the generated HTML
|
||||
# documentation will contain a main index with vertical navigation menus that
|
||||
# are dynamically created via JavaScript. If disabled, the navigation index will
|
||||
@ -1456,6 +1477,33 @@ HTML_DYNAMIC_MENUS = YES
|
||||
|
||||
HTML_DYNAMIC_SECTIONS = NO
|
||||
|
||||
# If the HTML_CODE_FOLDING tag is set to YES then classes and functions can be
|
||||
# dynamically folded and expanded in the generated HTML source code.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_CODE_FOLDING = YES
|
||||
|
||||
# If the HTML_COPY_CLIPBOARD tag is set to YES then doxygen will show an icon in
|
||||
# the top right corner of code and text fragments that allows the user to copy
|
||||
# its content to the clipboard. Note this only works if supported by the browser
|
||||
# and the web page is served via a secure context (see:
|
||||
# https://www.w3.org/TR/secure-contexts/), i.e. using the https: or file:
|
||||
# protocol.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_COPY_CLIPBOARD = YES
|
||||
|
||||
# Doxygen stores a couple of settings persistently in the browser (via e.g.
|
||||
# cookies). By default these settings apply to all HTML pages generated by
|
||||
# doxygen across all projects. The HTML_PROJECT_COOKIE tag can be used to store
|
||||
# the settings under a project specific key, such that the user preferences will
|
||||
# be stored separately.
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
HTML_PROJECT_COOKIE =
|
||||
|
||||
# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
|
||||
# shown in the various tree structured indices initially; the user can expand
|
||||
# and collapse entries dynamically later on. Doxygen will expand the tree to
|
||||
@ -1586,6 +1634,16 @@ BINARY_TOC = NO
|
||||
|
||||
TOC_EXPAND = NO
|
||||
|
||||
# The SITEMAP_URL tag is used to specify the full URL of the place where the
|
||||
# generated documentation will be placed on the server by the user during the
|
||||
# deployment of the documentation. The generated sitemap is called sitemap.xml
|
||||
# and placed on the directory specified by HTML_OUTPUT. In case no SITEMAP_URL
|
||||
# is specified no sitemap is generated. For information about the sitemap
|
||||
# protocol see https://www.sitemaps.org
|
||||
# This tag requires that the tag GENERATE_HTML is set to YES.
|
||||
|
||||
SITEMAP_URL =
|
||||
|
||||
# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
|
||||
# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
|
||||
# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
|
||||
@ -2074,9 +2132,16 @@ PDF_HYPERLINKS = YES
|
||||
|
||||
USE_PDFLATEX = YES
|
||||
|
||||
# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
|
||||
# command to the generated LaTeX files. This will instruct LaTeX to keep running
|
||||
# if errors occur, instead of asking the user for help.
|
||||
# The LATEX_BATCHMODE tag signals the behavior of LaTeX in case of an error.
|
||||
# Possible values are: NO same as ERROR_STOP, YES same as BATCH, BATCH In batch
|
||||
# mode nothing is printed on the terminal, errors are scrolled as if <return> is
|
||||
# hit at every error; missing files that TeX tries to input or request from
|
||||
# keyboard input (\read on a not open input stream) cause the job to abort,
|
||||
# NON_STOP In nonstop mode the diagnostic message will appear on the terminal,
|
||||
# but there is no possibility of user interaction just like in batch mode,
|
||||
# SCROLL In scroll mode, TeX will stop only for missing files to input or if
|
||||
# keyboard input is necessary and ERROR_STOP In errorstop mode, TeX will stop at
|
||||
# each error, asking for user intervention.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
@ -2097,14 +2162,6 @@ LATEX_HIDE_INDICES = NO
|
||||
|
||||
LATEX_BIB_STYLE = plain
|
||||
|
||||
# If the LATEX_TIMESTAMP tag is set to YES then the footer of each generated
|
||||
# page will contain the date and time when the page was generated. Setting this
|
||||
# to NO can help when comparing the output of multiple runs.
|
||||
# The default value is: NO.
|
||||
# This tag requires that the tag GENERATE_LATEX is set to YES.
|
||||
|
||||
LATEX_TIMESTAMP = NO
|
||||
|
||||
# The LATEX_EMOJI_DIRECTORY tag is used to specify the (relative or absolute)
|
||||
# path from which the emoji images will be read. If a relative path is entered,
|
||||
# it will be relative to the LATEX_OUTPUT directory. If left blank the
|
||||
@ -2270,13 +2327,39 @@ DOCBOOK_OUTPUT = doxydocbook
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
|
||||
# AutoGen Definitions (see http://autogen.sourceforge.net/) file that captures
|
||||
# AutoGen Definitions (see https://autogen.sourceforge.net/) file that captures
|
||||
# the structure of the code including all documentation. Note that this feature
|
||||
# is still experimental and incomplete at the moment.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_AUTOGEN_DEF = NO
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to Sqlite3 output
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# If the GENERATE_SQLITE3 tag is set to YES doxygen will generate a Sqlite3
|
||||
# database with symbols found by doxygen stored in tables.
|
||||
# The default value is: NO.
|
||||
|
||||
GENERATE_SQLITE3 = NO
|
||||
|
||||
# The SQLITE3_OUTPUT tag is used to specify where the Sqlite3 database will be
|
||||
# put. If a relative path is entered the value of OUTPUT_DIRECTORY will be put
|
||||
# in front of it.
|
||||
# The default directory is: sqlite3.
|
||||
# This tag requires that the tag GENERATE_SQLITE3 is set to YES.
|
||||
|
||||
SQLITE3_OUTPUT = sqlite3
|
||||
|
||||
# The SQLITE3_RECREATE_DB tag is set to YES, the existing doxygen_sqlite3.db
|
||||
# database file will be recreated with each doxygen run. If set to NO, doxygen
|
||||
# will warn if a database file is already found and not modify it.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag GENERATE_SQLITE3 is set to YES.
|
||||
|
||||
SQLITE3_RECREATE_DB = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the Perl module output
|
||||
#---------------------------------------------------------------------------
|
||||
@ -2419,15 +2502,15 @@ TAGFILES =
|
||||
|
||||
GENERATE_TAGFILE =
|
||||
|
||||
# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
|
||||
# the class index. If set to NO, only the inherited external classes will be
|
||||
# listed.
|
||||
# If the ALLEXTERNALS tag is set to YES, all external classes and namespaces
|
||||
# will be listed in the class and namespace index. If set to NO, only the
|
||||
# inherited external classes will be listed.
|
||||
# The default value is: NO.
|
||||
|
||||
ALLEXTERNALS = NO
|
||||
|
||||
# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
|
||||
# in the modules index. If set to NO, only the current project's groups will be
|
||||
# in the topic index. If set to NO, only the current project's groups will be
|
||||
# listed.
|
||||
# The default value is: YES.
|
||||
|
||||
@ -2441,16 +2524,9 @@ EXTERNAL_GROUPS = YES
|
||||
EXTERNAL_PAGES = YES
|
||||
|
||||
#---------------------------------------------------------------------------
|
||||
# Configuration options related to the dot tool
|
||||
# Configuration options related to diagram generator tools
|
||||
#---------------------------------------------------------------------------
|
||||
|
||||
# You can include diagrams made with dia in doxygen documentation. Doxygen will
|
||||
# then run dia to produce the diagram and insert it in the documentation. The
|
||||
# DIA_PATH tag allows you to specify the directory where the dia binary resides.
|
||||
# If left empty dia is assumed to be found in the default search path.
|
||||
|
||||
DIA_PATH =
|
||||
|
||||
# If set to YES the inheritance and collaboration graphs will hide inheritance
|
||||
# and usage relations if the target is undocumented or is not a class.
|
||||
# The default value is: YES.
|
||||
@ -2459,7 +2535,7 @@ HIDE_UNDOC_RELATIONS = YES
|
||||
|
||||
# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
|
||||
# available from the path. This tool is part of Graphviz (see:
|
||||
# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
|
||||
# https://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
|
||||
# Bell Labs. The other options in this section have no effect if this option is
|
||||
# set to NO
|
||||
# The default value is: NO.
|
||||
@ -2512,13 +2588,19 @@ DOT_NODE_ATTR = "shape=box,height=0.2,width=0.4"
|
||||
|
||||
DOT_FONTPATH =
|
||||
|
||||
# If the CLASS_GRAPH tag is set to YES (or GRAPH) then doxygen will generate a
|
||||
# graph for each documented class showing the direct and indirect inheritance
|
||||
# relations. In case HAVE_DOT is set as well dot will be used to draw the graph,
|
||||
# otherwise the built-in generator will be used. If the CLASS_GRAPH tag is set
|
||||
# to TEXT the direct and indirect inheritance relations will be shown as texts /
|
||||
# links.
|
||||
# Possible values are: NO, YES, TEXT and GRAPH.
|
||||
# If the CLASS_GRAPH tag is set to YES or GRAPH or BUILTIN then doxygen will
|
||||
# generate a graph for each documented class showing the direct and indirect
|
||||
# inheritance relations. In case the CLASS_GRAPH tag is set to YES or GRAPH and
|
||||
# HAVE_DOT is enabled as well, then dot will be used to draw the graph. In case
|
||||
# the CLASS_GRAPH tag is set to YES and HAVE_DOT is disabled or if the
|
||||
# CLASS_GRAPH tag is set to BUILTIN, then the built-in generator will be used.
|
||||
# If the CLASS_GRAPH tag is set to TEXT the direct and indirect inheritance
|
||||
# relations will be shown as texts / links. Explicit enabling an inheritance
|
||||
# graph or choosing a different representation for an inheritance graph of a
|
||||
# specific class, can be accomplished by means of the command \inheritancegraph.
|
||||
# Disabling an inheritance graph can be accomplished by means of the command
|
||||
# \hideinheritancegraph.
|
||||
# Possible values are: NO, YES, TEXT, GRAPH and BUILTIN.
|
||||
# The default value is: YES.
|
||||
|
||||
CLASS_GRAPH = YES
|
||||
@ -2526,15 +2608,21 @@ CLASS_GRAPH = YES
|
||||
# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
|
||||
# graph for each documented class showing the direct and indirect implementation
|
||||
# dependencies (inheritance, containment, and class references variables) of the
|
||||
# class with other documented classes.
|
||||
# class with other documented classes. Explicit enabling a collaboration graph,
|
||||
# when COLLABORATION_GRAPH is set to NO, can be accomplished by means of the
|
||||
# command \collaborationgraph. Disabling a collaboration graph can be
|
||||
# accomplished by means of the command \hidecollaborationgraph.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
COLLABORATION_GRAPH = YES
|
||||
|
||||
# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
|
||||
# groups, showing the direct groups dependencies. See also the chapter Grouping
|
||||
# in the manual.
|
||||
# groups, showing the direct groups dependencies. Explicit enabling a group
|
||||
# dependency graph, when GROUP_GRAPHS is set to NO, can be accomplished by means
|
||||
# of the command \groupgraph. Disabling a directory graph can be accomplished by
|
||||
# means of the command \hidegroupgraph. See also the chapter Grouping in the
|
||||
# manual.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
@ -2576,8 +2664,8 @@ DOT_UML_DETAILS = NO
|
||||
|
||||
# The DOT_WRAP_THRESHOLD tag can be used to set the maximum number of characters
|
||||
# to display on a single line. If the actual line length exceeds this threshold
|
||||
# significantly it will wrapped across multiple lines. Some heuristics are apply
|
||||
# to avoid ugly line breaks.
|
||||
# significantly it will be wrapped across multiple lines. Some heuristics are
|
||||
# applied to avoid ugly line breaks.
|
||||
# Minimum value: 0, maximum value: 1000, default value: 17.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
@ -2594,7 +2682,9 @@ TEMPLATE_RELATIONS = NO
|
||||
# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
|
||||
# YES then doxygen will generate a graph for each documented file showing the
|
||||
# direct and indirect include dependencies of the file with other documented
|
||||
# files.
|
||||
# files. Explicit enabling an include graph, when INCLUDE_GRAPH is is set to NO,
|
||||
# can be accomplished by means of the command \includegraph. Disabling an
|
||||
# include graph can be accomplished by means of the command \hideincludegraph.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
@ -2603,7 +2693,10 @@ INCLUDE_GRAPH = YES
|
||||
# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
|
||||
# set to YES then doxygen will generate a graph for each documented file showing
|
||||
# the direct and indirect include dependencies of the file with other documented
|
||||
# files.
|
||||
# files. Explicit enabling an included by graph, when INCLUDED_BY_GRAPH is set
|
||||
# to NO, can be accomplished by means of the command \includedbygraph. Disabling
|
||||
# an included by graph can be accomplished by means of the command
|
||||
# \hideincludedbygraph.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
@ -2643,7 +2736,10 @@ GRAPHICAL_HIERARCHY = YES
|
||||
# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
|
||||
# dependencies a directory has on other directories in a graphical way. The
|
||||
# dependency relations are determined by the #include relations between the
|
||||
# files in the directories.
|
||||
# files in the directories. Explicit enabling a directory graph, when
|
||||
# DIRECTORY_GRAPH is set to NO, can be accomplished by means of the command
|
||||
# \directorygraph. Disabling a directory graph can be accomplished by means of
|
||||
# the command \hidedirectorygraph.
|
||||
# The default value is: YES.
|
||||
# This tag requires that the tag HAVE_DOT is set to YES.
|
||||
|
||||
@ -2659,7 +2755,7 @@ DIR_GRAPH_MAX_DEPTH = 1
|
||||
# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
|
||||
# generated by dot. For an explanation of the image formats see the section
|
||||
# output formats in the documentation of the dot tool (Graphviz (see:
|
||||
# http://www.graphviz.org/)).
|
||||
# https://www.graphviz.org/)).
|
||||
# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
|
||||
# to make the SVG files visible in IE 9+ (other browsers do not have this
|
||||
# requirement).
|
||||
@ -2696,11 +2792,12 @@ DOT_PATH =
|
||||
|
||||
DOTFILE_DIRS =
|
||||
|
||||
# The MSCFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain msc files that are included in the documentation (see the \mscfile
|
||||
# command).
|
||||
# You can include diagrams made with dia in doxygen documentation. Doxygen will
|
||||
# then run dia to produce the diagram and insert it in the documentation. The
|
||||
# DIA_PATH tag allows you to specify the directory where the dia binary resides.
|
||||
# If left empty dia is assumed to be found in the default search path.
|
||||
|
||||
MSCFILE_DIRS =
|
||||
DIA_PATH =
|
||||
|
||||
# The DIAFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain dia files that are included in the documentation (see the \diafile
|
||||
@ -2777,3 +2874,19 @@ GENERATE_LEGEND = YES
|
||||
# The default value is: YES.
|
||||
|
||||
DOT_CLEANUP = YES
|
||||
|
||||
# You can define message sequence charts within doxygen comments using the \msc
|
||||
# command. If the MSCGEN_TOOL tag is left empty (the default), then doxygen will
|
||||
# use a built-in version of mscgen tool to produce the charts. Alternatively,
|
||||
# the MSCGEN_TOOL tag can also specify the name an external tool. For instance,
|
||||
# specifying prog as the value, doxygen will call the tool as prog -T
|
||||
# <outfile_format> -o <outputfile> <inputfile>. The external tool should support
|
||||
# output file formats "png", "eps", "svg", and "ismap".
|
||||
|
||||
MSCGEN_TOOL =
|
||||
|
||||
# The MSCFILE_DIRS tag can be used to specify one or more directories that
|
||||
# contain msc files that are included in the documentation (see the \mscfile
|
||||
# command).
|
||||
|
||||
MSCFILE_DIRS =
|
||||
|
@ -3,7 +3,7 @@
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXOPTS ?= -W --keep-going
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = source
|
||||
BUILDDIR = build
|
||||
|
@ -9,6 +9,7 @@ if "%SPHINXBUILD%" == "" (
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
set "SPHINXOPTS=-W --keep-going"
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
@ -25,11 +26,11 @@ if errorlevel 9009 (
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% || exit /b %ERRORLEVEL%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% || exit /b %ERRORLEVEL%
|
||||
|
||||
:end
|
||||
popd
|
||||
|
@ -12,14 +12,14 @@ MacPorts
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
sudo port install avahi boost180 cmake curl libopus miniupnpc npm9 pkgconfig
|
||||
sudo port install avahi boost180 cmake curl doxygen graphviz libopus miniupnpc npm9 pkgconfig python311 py311-pip
|
||||
|
||||
Homebrew
|
||||
""""""""
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
brew install boost cmake miniupnpc node opus pkg-config
|
||||
brew install boost cmake doxygen graphviz miniupnpc node opus pkg-config python@3.11
|
||||
|
||||
If there are issues with an SSL header that is not found:
|
||||
.. tab:: Intel
|
||||
@ -45,7 +45,7 @@ Build
|
||||
.. code-block:: bash
|
||||
|
||||
cmake ..
|
||||
make -j ${nproc}
|
||||
make -j $(sysctl -n hw.ncpu)
|
||||
|
||||
cpack -G DragNDrop # optionally, create a macOS dmg package
|
||||
|
||||
|
@ -18,6 +18,7 @@ Install dependencies:
|
||||
base-devel \
|
||||
cmake \
|
||||
diffutils \
|
||||
doxygen \
|
||||
gcc \
|
||||
git \
|
||||
make \
|
||||
@ -25,13 +26,17 @@ Install dependencies:
|
||||
mingw-w64-x86_64-boost \
|
||||
mingw-w64-x86_64-cmake \
|
||||
mingw-w64-x86_64-curl \
|
||||
mingw-w64-x86_64-graphviz \
|
||||
mingw-w64-x86_64-miniupnpc \
|
||||
mingw-w64-x86_64-nlohmann-json \
|
||||
mingw-w64-x86_64-nodejs \
|
||||
mingw-w64-x86_64-onevpl \
|
||||
mingw-w64-x86_64-openssl \
|
||||
mingw-w64-x86_64-opus \
|
||||
mingw-w64-x86_64-toolchain
|
||||
mingw-w64-x86_64-rust \
|
||||
mingw-w64-x86_64-toolchain \
|
||||
python \
|
||||
python-pip
|
||||
|
||||
Build
|
||||
-----
|
||||
|
@ -7,7 +7,6 @@
|
||||
# standard imports
|
||||
from datetime import datetime
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
|
||||
|
||||
@ -27,16 +26,8 @@ project_copyright = f'{datetime.now ().year}, {project}'
|
||||
author = 'ReenigneArcher'
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
with open(os.path.join(root_dir, 'CMakeLists.txt'), 'r') as f:
|
||||
version = re.search(r"project\(Sunshine VERSION ((\d+)\.(\d+)\.(\d+))", str(f.read())).group(1)
|
||||
"""
|
||||
To use cmake method for obtaining version instead of regex,
|
||||
1. Within CMakeLists.txt add the following line without backticks:
|
||||
``configure_file(docs/source/conf.py.in "${CMAKE_CURRENT_SOURCE_DIR}/docs/source/conf.py" @ONLY)``
|
||||
2. Rename this file to ``conf.py.in``
|
||||
3. Uncomment the next line
|
||||
"""
|
||||
# version = '@PROJECT_VERSION@' # use this for cmake configure_file method
|
||||
# https://docs.readthedocs.io/en/stable/reference/environment-variables.html#envvar-READTHEDOCS_VERSION
|
||||
version = os.getenv('READTHEDOCS_VERSION', 'dirty')
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
@ -105,6 +96,17 @@ doxy_proc = subprocess.run('doxygen --version', shell=True, cwd=source_dir, capt
|
||||
doxy_version = doxy_proc.stdout.decode('utf-8').strip()
|
||||
print('doxygen version: ' + doxy_version)
|
||||
|
||||
# create build directories, as doxygen fails to create it in macports and docker
|
||||
directories = [
|
||||
os.path.join(source_dir, 'build'),
|
||||
os.path.join(source_dir, 'build', 'doxyxml'),
|
||||
]
|
||||
for d in directories:
|
||||
os.makedirs(
|
||||
name=d,
|
||||
exist_ok=True,
|
||||
)
|
||||
|
||||
# run doxygen
|
||||
doxy_proc = subprocess.run('doxygen Doxyfile', shell=True, cwd=source_dir)
|
||||
if doxy_proc.returncode != 0:
|
||||
|
@ -59,5 +59,81 @@ Format inplace with rstfmt
|
||||
|
||||
Unit Testing
|
||||
------------
|
||||
.. todo:: Sunshine does not currently have any unit tests. If you would like to help us improve please get in contact
|
||||
with us, or make a PR with suggested changes.
|
||||
Sunshine uses `Google Test <https://github.com/google/googletest>`__ for unit testing. Google Test is included in the
|
||||
repo as a submodule. The test sources are located in the `./tests` directory.
|
||||
|
||||
The tests need to be compiled into an executable, and then run. The tests are built using the normal build process, but
|
||||
can be disabled by setting the `BUILD_TESTS` CMake option to `OFF`.
|
||||
|
||||
To run the tests, execute the following command from the build directory:
|
||||
|
||||
.. tab:: Linux
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pushd tests
|
||||
./test_sunshine
|
||||
popd
|
||||
|
||||
.. tab:: macOS
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pushd tests
|
||||
./test_sunshine
|
||||
popd
|
||||
|
||||
.. tab:: Windows
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pushd tests
|
||||
test_sunshine.exe
|
||||
popd
|
||||
|
||||
To see all available options, run the tests with the `--help` option.
|
||||
|
||||
.. tab:: Linux
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pushd tests
|
||||
./test_sunshine --help
|
||||
popd
|
||||
|
||||
.. tab:: macOS
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pushd tests
|
||||
./test_sunshine --help
|
||||
popd
|
||||
|
||||
.. tab:: Windows
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
pushd tests
|
||||
test_sunshine.exe --help
|
||||
popd
|
||||
|
||||
Some tests rely on Python to run. CMake will search for Python and enable the docs tests if it is found, otherwise
|
||||
cmake will fail. You can manually disable the tests by setting the `TESTS_ENABLE_PYTHON_TESTS` CMake option to
|
||||
`OFF`.
|
||||
|
||||
.. tip::
|
||||
|
||||
See the googletest `FAQ <https://google.github.io/googletest/faq.html>`__ for more information on how to use
|
||||
Google Test.
|
||||
|
||||
We use `gcovr <https://www.gcovr.com/>`__ to generate code coverage reports,
|
||||
and `Codecov <https://about.codecov.io/>`__ to analyze the reports for all PRs and commits.
|
||||
|
||||
Codecov will fail a PR if the total coverage is reduced too much, or if not enough of the diff is covered by tests.
|
||||
In some cases, the code cannot be covered when running the tests inside of GitHub runners. For example, any test that
|
||||
needs access to the GPU will not be able to run. In these cases, the coverage can be omitted by adding comments to the
|
||||
code. See the `gcovr documentation <https://gcovr.com/en/stable/guide/exclusion-markers.html#exclusion-markers>`__ for
|
||||
more information.
|
||||
|
||||
Even if your changes cannot be covered in the CI, we still encourage you to write the tests for them. This will allow
|
||||
maintainers to run the tests locally.
|
||||
|
@ -7,7 +7,7 @@ pkgrel=1
|
||||
pkgdesc="@PROJECT_DESCRIPTION@"
|
||||
arch=('x86_64' 'aarch64')
|
||||
url=@PROJECT_HOMEPAGE_URL@
|
||||
license=('GPL3')
|
||||
license=('GPL-3.0-only')
|
||||
install=sunshine.install
|
||||
|
||||
depends=('avahi'
|
||||
@ -31,16 +31,21 @@ depends=('avahi'
|
||||
'numactl'
|
||||
'openssl'
|
||||
'opus'
|
||||
'python'
|
||||
'udev')
|
||||
checkdepends=('doxygen'
|
||||
'graphviz')
|
||||
makedepends=('boost'
|
||||
'cmake'
|
||||
'gcc12'
|
||||
'git'
|
||||
'make'
|
||||
'nodejs'
|
||||
'npm')
|
||||
optdepends=('cuda: Nvidia GPU encoding support'
|
||||
'libva-mesa-driver: AMD GPU encoding support'
|
||||
'intel-media-driver: Intel GPU encoding support')
|
||||
'intel-media-driver: Intel GPU encoding support'
|
||||
'xorg-server-xvfb: Virtual X server for headless testing')
|
||||
|
||||
provides=('sunshine')
|
||||
|
||||
@ -57,6 +62,9 @@ build() {
|
||||
export BUILD_VERSION="@GITHUB_BUILD_VERSION@"
|
||||
export COMMIT="@GITHUB_COMMIT@"
|
||||
|
||||
export CC=gcc-12
|
||||
export CXX=g++-12
|
||||
|
||||
export CFLAGS="${CFLAGS/-Werror=format-security/}"
|
||||
export CXXFLAGS="${CXXFLAGS/-Werror=format-security/}"
|
||||
|
||||
@ -72,6 +80,14 @@ build() {
|
||||
make -C build
|
||||
}
|
||||
|
||||
check() {
|
||||
export CC=gcc-12
|
||||
export CXX=g++-12
|
||||
|
||||
cd "${srcdir}/build/tests"
|
||||
./test_sunshine --gtest_color=yes
|
||||
}
|
||||
|
||||
package() {
|
||||
make -C build install DESTDIR="$pkgdir"
|
||||
}
|
||||
|
@ -0,0 +1 @@
|
||||
Subproject commit 644487fc314a069d06219b754b04c39a807ca8d9
|
1
packaging/linux/flatpak/deps/shared-modules
Submodule
1
packaging/linux/flatpak/deps/shared-modules
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit d0229951ac23967c4f5697bd7b5c1bd7e641b8c3
|
@ -34,6 +34,8 @@ build-options:
|
||||
prepend-ld-library-path: /usr/lib/sdk/vala/lib
|
||||
|
||||
modules:
|
||||
- "org.flatpak.Builder.BaseApp/xvfb.json"
|
||||
|
||||
- name: boost
|
||||
disabled: false
|
||||
buildsystem: simple
|
||||
@ -341,6 +343,7 @@ modules:
|
||||
- -DSUNSHINE_ENABLE_DRM=ON
|
||||
- -DSUNSHINE_ENABLE_CUDA=ON
|
||||
- -DSUNSHINE_BUILD_FLATPAK=ON
|
||||
- -DTESTS_ENABLE_PYTHON_TESTS=OFF
|
||||
sources:
|
||||
- type: git
|
||||
url: "@GITHUB_CLONE_URL@"
|
||||
@ -358,3 +361,7 @@ modules:
|
||||
's%/app/bin/sunshine%flatpak run dev.lizardbyte.sunshine\nExecStop=flatpak kill dev.lizardbyte.sunshine%g'
|
||||
/app/share/sunshine/systemd/user/sunshine.service
|
||||
- install -D $FLATPAK_BUILDER_BUILDDIR/packaging/linux/flatpak/scripts/* /app/bin
|
||||
run-tests: true
|
||||
test-rule: "" # empty to disable
|
||||
test-commands:
|
||||
- xvfb-run tests/test_sunshine --gtest_color=yes
|
||||
|
@ -31,13 +31,19 @@ post-fetch {
|
||||
system -W ${worksrcpath} "${git.cmd} submodule update --init --recursive"
|
||||
}
|
||||
|
||||
# https://guide.macports.org/chunked/reference.dependencies.html
|
||||
depends_build-append port:npm9 \
|
||||
port:pkgconfig
|
||||
|
||||
depends_lib port:avahi \
|
||||
port:curl \
|
||||
port:libopus \
|
||||
port:miniupnpc
|
||||
port:miniupnpc \
|
||||
port:python311 \
|
||||
port:py311-pip
|
||||
|
||||
depends_test port:doxygen \
|
||||
port:graphviz
|
||||
|
||||
boost.version 1.81
|
||||
|
||||
@ -62,3 +68,9 @@ notes-append "Run @PROJECT_NAME@ by executing 'sunshine <path to user config>',
|
||||
notes-append "The config file will be created if it doesn't exist."
|
||||
notes-append "It is recommended to set a location for the apps file in the config."
|
||||
notes-append "See our documentation at 'https://docs.lizardbyte.dev/projects/sunshine/en/v@PROJECT_VERSION@/' for further info."
|
||||
|
||||
test.run yes
|
||||
test.dir ${build.dir}/tests
|
||||
test.target ""
|
||||
test.cmd ./test_sunshine
|
||||
test.args --gtest_color=yes
|
||||
|
@ -11,26 +11,28 @@ class @PROJECT_NAME@ < Formula
|
||||
|
||||
depends_on "boost" => :build
|
||||
depends_on "cmake" => :build
|
||||
depends_on "node" => :build
|
||||
depends_on "pkg-config" => :build
|
||||
depends_on "curl"
|
||||
depends_on "miniupnpc"
|
||||
depends_on "node"
|
||||
depends_on "openssl"
|
||||
depends_on "opus"
|
||||
|
||||
def install
|
||||
args = %W[
|
||||
-DBUIld_WERROR=ON
|
||||
-DBUILD_WERROR=ON
|
||||
-DCMAKE_INSTALL_PREFIX=#{prefix}
|
||||
-DOPENSSL_ROOT_DIR=#{Formula["openssl"].opt_prefix}
|
||||
-DSUNSHINE_ASSETS_DIR=sunshine/assets
|
||||
-DSUNSHINE_BUILD_HOMEBREW=ON
|
||||
-DTESTS_ENABLE_PYTHON_TESTS=OFF
|
||||
]
|
||||
system "cmake", "-S", ".", "-B", "build", *std_cmake_args, *args
|
||||
|
||||
cd "build" do
|
||||
system "make", "-j"
|
||||
system "make", "install"
|
||||
bin.install "tests/test_sunshine"
|
||||
end
|
||||
end
|
||||
|
||||
@ -54,9 +56,10 @@ class @PROJECT_NAME@ < Formula
|
||||
|
||||
test do
|
||||
# test that the binary runs at all
|
||||
output = shell_output("#{bin}/sunshine --version").strip
|
||||
puts output
|
||||
system "#{bin}/sunshine", "--version"
|
||||
|
||||
# TODO: add unit tests
|
||||
# run the test suite
|
||||
# cannot build tests with python tests because homebrew destroys the source directory
|
||||
system "#{bin}/test_sunshine", "--gtest_color=yes"
|
||||
end
|
||||
end
|
||||
|
@ -5,6 +5,7 @@ import subprocess
|
||||
# variables
|
||||
directories = [
|
||||
'src',
|
||||
'tests',
|
||||
'tools',
|
||||
os.path.join('third-party', 'glad'),
|
||||
os.path.join('third-party', 'nvfbc'),
|
||||
|
@ -22,7 +22,9 @@ extern "C" {
|
||||
#define fourcc_mod_code(vendor, val) ((((uint64_t) vendor) << 56) | ((val) &0x00ffffffffffffffULL))
|
||||
#define DRM_FORMAT_MOD_INVALID fourcc_mod_code(0, ((1ULL << 56) - 1))
|
||||
|
||||
#define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/opengl"
|
||||
#if !defined(SUNSHINE_SHADERS_DIR) // for testing this needs to be defined in cmake as we don't do an install
|
||||
#define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/opengl"
|
||||
#endif
|
||||
|
||||
using namespace std::literals;
|
||||
namespace gl {
|
||||
|
@ -25,7 +25,9 @@ extern "C" {
|
||||
|
||||
#include <boost/algorithm/string/predicate.hpp>
|
||||
|
||||
#define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/directx"
|
||||
#if !defined(SUNSHINE_SHADERS_DIR) // for testing this needs to be defined in cmake as we don't do an install
|
||||
#define SUNSHINE_SHADERS_DIR SUNSHINE_ASSETS_DIR "/shaders/directx"
|
||||
#endif
|
||||
namespace platf {
|
||||
using namespace std::literals;
|
||||
}
|
||||
|
120
tests/CMakeLists.txt
Normal file
120
tests/CMakeLists.txt
Normal file
@ -0,0 +1,120 @@
|
||||
cmake_minimum_required(VERSION 3.13)
|
||||
# https://github.com/google/oss-policies-info/blob/main/foundational-cxx-support-matrix.md#foundational-c-support
|
||||
|
||||
project(test_sunshine)
|
||||
|
||||
set(PYTHON_PREFERRED_VERSION 3.11)
|
||||
set(PYTHON_MINIMUM_VERSION 3.9)
|
||||
|
||||
include_directories("${CMAKE_SOURCE_DIR}")
|
||||
|
||||
enable_testing()
|
||||
|
||||
# Add GoogleTest directory to the project
|
||||
set(GTEST_SOURCE_DIR "${CMAKE_SOURCE_DIR}/third-party/googletest")
|
||||
set(INSTALL_GTEST OFF)
|
||||
set(INSTALL_GMOCK OFF)
|
||||
add_subdirectory("${GTEST_SOURCE_DIR}" "${CMAKE_CURRENT_BINARY_DIR}/googletest")
|
||||
include_directories("${GTEST_SOURCE_DIR}/googletest/include" "${GTEST_SOURCE_DIR}")
|
||||
|
||||
# coverage
|
||||
# https://gcovr.com/en/stable/guide/compiling.html#compiler-options
|
||||
set(CMAKE_CXX_FLAGS "-fprofile-arcs -ftest-coverage -O1")
|
||||
set(CMAKE_C_FLAGS "-fprofile-arcs -ftest-coverage -O1")
|
||||
|
||||
# if windows
|
||||
if (WIN32)
|
||||
# For Windows: Prevent overriding the parent project's compiler/linker settings
|
||||
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE) # cmake-lint: disable=C0103
|
||||
endif ()
|
||||
|
||||
# modify SUNSHINE_DEFINITIONS
|
||||
if (WIN32)
|
||||
list(APPEND
|
||||
SUNSHINE_DEFINITIONS SUNSHINE_SHADERS_DIR="${CMAKE_SOURCE_DIR}/src_assets/windows/assets/shaders/directx")
|
||||
elseif (NOT APPLE)
|
||||
list(APPEND SUNSHINE_DEFINITIONS SUNSHINE_SHADERS_DIR="${CMAKE_SOURCE_DIR}/src_assets/linux/assets/shaders/opengl")
|
||||
endif ()
|
||||
|
||||
set(TEST_DEFINITIONS) # list will be appended as needed
|
||||
|
||||
# IF option TESTS_ENABLE_PYTHON_TESTS is ON, then we need to find python
|
||||
if (TESTS_ENABLE_PYTHON_TESTS)
|
||||
if (NOT DEFINED TESTS_PYTHON_EXECUTABLE)
|
||||
# python is required for doc tests
|
||||
|
||||
# https://github.com/actions/setup-python/issues/121#issuecomment-777748504
|
||||
if (POLICY CMP0094) # https://cmake.org/cmake/help/latest/policy/CMP0094.html
|
||||
cmake_policy(SET CMP0094 NEW) # FindPython should return the first matching Python
|
||||
endif ()
|
||||
|
||||
# needed on GitHub Actions CI: actions/setup-python does not touch registry/frameworks on Windows/macOS
|
||||
# this mirrors PythonInterp behavior which did not consult registry/frameworks first
|
||||
if (NOT DEFINED Python_FIND_REGISTRY)
|
||||
set(Python_FIND_REGISTRY "LAST") # cmake-lint: disable=C0103
|
||||
endif ()
|
||||
if (NOT DEFINED Python_FIND_FRAMEWORK)
|
||||
set(Python_FIND_FRAMEWORK "LAST") # cmake-lint: disable=C0103
|
||||
endif ()
|
||||
|
||||
# first, try to find preferred version of python
|
||||
find_package(Python ${PYTHON_PREFERRED_VERSION} EXACT COMPONENTS Interpreter)
|
||||
if (Python_FOUND)
|
||||
message(STATUS
|
||||
"Preferred Python ${PYTHON_PREFERRED_VERSION} found, tests dependent on Python will be enabled")
|
||||
else ()
|
||||
# fallback to minimum version
|
||||
find_package(Python ${PYTHON_MINIMUM_VERSION} COMPONENTS Interpreter)
|
||||
endif ()
|
||||
if (Python_FOUND)
|
||||
message(STATUS "Python found, tests dependent on Python will be enabled")
|
||||
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=1)
|
||||
list(APPEND TEST_DEFINITIONS TESTS_PYTHON_EXECUTABLE="${Python_EXECUTABLE}")
|
||||
else ()
|
||||
message(SEND_ERROR "Python not found, tests dependent on Python will be disabled")
|
||||
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=0)
|
||||
endif ()
|
||||
else()
|
||||
message(STATUS "Python executable is set to ${TESTS_PYTHON_EXECUTABLE}")
|
||||
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=1)
|
||||
list(APPEND TEST_DEFINITIONS TESTS_PYTHON_EXECUTABLE="${TESTS_PYTHON_EXECUTABLE}")
|
||||
endif()
|
||||
else ()
|
||||
message(STATUS "Python tests are disabled by 'TESTS_ENABLE_PYTHON_TESTS' option")
|
||||
list(APPEND TEST_DEFINITIONS TESTS_ENABLE_VENV_TESTS=0)
|
||||
endif ()
|
||||
|
||||
list(APPEND TEST_DEFINITIONS TESTS_SOURCE_DIR="${CMAKE_SOURCE_DIR}") # add source directory to TEST_DEFINITIONS
|
||||
list(APPEND TEST_DEFINITIONS TESTS_DOCS_DIR="${CMAKE_SOURCE_DIR}/docs") # add docs directory to TEST_DEFINITIONS
|
||||
|
||||
# make sure TESTS_SOFTWARE_ENCODER_UNAVAILABLE is set to "fail" or "skip"
|
||||
if (NOT (TESTS_SOFTWARE_ENCODER_UNAVAILABLE STREQUAL "fail" OR TESTS_SOFTWARE_ENCODER_UNAVAILABLE STREQUAL "skip"))
|
||||
set(TESTS_SOFTWARE_ENCODER_UNAVAILABLE "fail")
|
||||
endif ()
|
||||
list(APPEND TEST_DEFINITIONS TESTS_SOFTWARE_ENCODER_UNAVAILABLE="${TESTS_SOFTWARE_ENCODER_UNAVAILABLE}") # fail/skip
|
||||
|
||||
file(GLOB_RECURSE TEST_SOURCES
|
||||
${CMAKE_SOURCE_DIR}/tests/conftest.cpp
|
||||
${CMAKE_SOURCE_DIR}/tests/utils.cpp
|
||||
${CMAKE_SOURCE_DIR}/tests/test_*.cpp)
|
||||
|
||||
set(SUNSHINE_SOURCES
|
||||
${SUNSHINE_TARGET_FILES})
|
||||
|
||||
# remove main.cpp from the list of sources
|
||||
list(REMOVE_ITEM SUNSHINE_SOURCES ${CMAKE_SOURCE_DIR}/src/main.cpp)
|
||||
|
||||
add_executable(${PROJECT_NAME}
|
||||
${TEST_SOURCES}
|
||||
${SUNSHINE_SOURCES})
|
||||
set_target_properties(${PROJECT_NAME} PROPERTIES CXX_STANDARD 17)
|
||||
target_link_libraries(${PROJECT_NAME}
|
||||
${SUNSHINE_EXTERNAL_LIBRARIES}
|
||||
gtest
|
||||
gtest_main # if we use this we don't need our own main function
|
||||
${PLATFORM_LIBRARIES})
|
||||
target_compile_definitions(${PROJECT_NAME} PUBLIC ${SUNSHINE_DEFINITIONS} ${TEST_DEFINITIONS})
|
||||
target_compile_options(${PROJECT_NAME} PRIVATE $<$<COMPILE_LANGUAGE:CXX>:${SUNSHINE_COMPILE_OPTIONS}>;$<$<COMPILE_LANGUAGE:CUDA>:${SUNSHINE_COMPILE_OPTIONS_CUDA};-std=c++17>) # cmake-lint: disable=C0301
|
||||
target_link_options(${PROJECT_NAME} PRIVATE)
|
||||
|
||||
add_test(NAME ${PROJECT_NAME} COMMAND sunshine_test)
|
36
tests/ci/test_docs.cpp
Normal file
36
tests/ci/test_docs.cpp
Normal file
@ -0,0 +1,36 @@
|
||||
#include <tests/conftest.cpp>
|
||||
|
||||
class DocsTests: public DocsTestFixture, public ::testing::WithParamInterface<std::tuple<const char *, const char *>> {};
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
DocFormats,
|
||||
DocsTests,
|
||||
::testing::Values(
|
||||
std::make_tuple("html", "index.html"),
|
||||
std::make_tuple("epub", "Sunshine.epub")));
|
||||
TEST_P(DocsTests, MakeDocs) {
|
||||
auto params = GetParam();
|
||||
std::string format = std::get<0>(params);
|
||||
std::string expected_filename = std::get<1>(params);
|
||||
|
||||
std::filesystem::path expected_file = std::filesystem::current_path() / "build" / format / expected_filename;
|
||||
|
||||
std::string command = "make " + format;
|
||||
int status = BaseTest::exec(command.c_str());
|
||||
EXPECT_EQ(status, 0);
|
||||
|
||||
EXPECT_TRUE(std::filesystem::exists(expected_file));
|
||||
}
|
||||
|
||||
class DocsRstTests: public DocsPythonVenvTest, public ::testing::WithParamInterface<std::filesystem::path> {};
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
RstFiles,
|
||||
DocsRstTests,
|
||||
::testing::Values(
|
||||
std::filesystem::path(TESTS_DOCS_DIR),
|
||||
std::filesystem::path(TESTS_SOURCE_DIR) / "README.rst"));
|
||||
TEST_P(DocsRstTests, RstCheckDocs) {
|
||||
std::filesystem::path docs_dir = GetParam();
|
||||
std::string command = "rstcheck -r " + docs_dir.string();
|
||||
int status = BaseTest::exec(command.c_str());
|
||||
EXPECT_EQ(status, 0);
|
||||
}
|
385
tests/conftest.cpp
Normal file
385
tests/conftest.cpp
Normal file
@ -0,0 +1,385 @@
|
||||
#include <filesystem>
|
||||
#include <gtest/gtest.h>
|
||||
|
||||
#include <boost/log/core.hpp>
|
||||
#include <boost/log/expressions.hpp>
|
||||
#include <boost/log/sinks/sync_frontend.hpp>
|
||||
#include <boost/log/sinks/text_ostream_backend.hpp>
|
||||
#include <boost/log/trivial.hpp>
|
||||
#include <boost/shared_ptr.hpp>
|
||||
|
||||
#include <src/globals.h>
|
||||
#include <src/platform/common.h>
|
||||
|
||||
#include <tests/utils.h>
|
||||
|
||||
namespace logging = boost::log;
|
||||
namespace sinks = logging::sinks;
|
||||
|
||||
// Undefine the original TEST macro
|
||||
#undef TEST
|
||||
|
||||
// Redefine TEST to use our BaseTest class, to automatically use our BaseTest fixture
|
||||
#define TEST(test_case_name, test_name) \
|
||||
GTEST_TEST_(test_case_name, test_name, ::BaseTest, \
|
||||
::testing::internal::GetTypeId<::BaseTest>())
|
||||
|
||||
/**
|
||||
* @brief Base class for tests.
|
||||
*
|
||||
* This class provides a base test fixture for all tests.
|
||||
*
|
||||
* ``cout``, ``stderr``, and ``stdout`` are redirected to a buffer, and the buffer is printed if the test fails.
|
||||
*
|
||||
* @todo Retain the color of the original output.
|
||||
*/
|
||||
class BaseTest: public ::testing::Test {
|
||||
protected:
|
||||
// https://stackoverflow.com/a/58369622/11214013
|
||||
|
||||
// we can possibly use some internal googletest functions to capture stdout and stderr, but I have not tested this
|
||||
// https://stackoverflow.com/a/33186201/11214013
|
||||
|
||||
// Add a member variable to store the sink
|
||||
boost::shared_ptr<sinks::synchronous_sink<sinks::text_ostream_backend>> test_sink;
|
||||
|
||||
BaseTest():
|
||||
sbuf { nullptr }, pipe_stdout { nullptr }, pipe_stderr { nullptr } {
|
||||
// intentionally empty
|
||||
}
|
||||
|
||||
~BaseTest() override = default;
|
||||
|
||||
void
|
||||
SetUp() override {
|
||||
// todo: only run this one time, instead of every time a test is run
|
||||
// see: https://stackoverflow.com/questions/2435277/googletest-accessing-the-environment-from-a-test
|
||||
// get command line args from the test executable
|
||||
testArgs = ::testing::internal::GetArgvs();
|
||||
|
||||
// then get the directory of the test executable
|
||||
// std::string path = ::testing::internal::GetArgvs()[0];
|
||||
testBinary = testArgs[0];
|
||||
|
||||
// get the directory of the test executable
|
||||
testBinaryDir = std::filesystem::path(testBinary).parent_path();
|
||||
|
||||
// If testBinaryDir is empty or `.` then set it to the current directory
|
||||
// maybe some better options here: https://stackoverflow.com/questions/875249/how-to-get-current-directory
|
||||
if (testBinaryDir.empty() || testBinaryDir.string() == ".") {
|
||||
testBinaryDir = std::filesystem::current_path();
|
||||
}
|
||||
|
||||
// Create a sink that writes to our stringstream (BOOST_LOG)
|
||||
typedef sinks::synchronous_sink<sinks::text_ostream_backend> test_text_sink;
|
||||
test_sink = boost::make_shared<test_text_sink>();
|
||||
|
||||
// Set the stringstream as the target of the sink (BOOST_LOG)
|
||||
boost::shared_ptr<std::ostream> stream(&boost_log_buffer, [](std::ostream *) {});
|
||||
test_sink->locked_backend()->add_stream(stream);
|
||||
|
||||
// Register the sink in the logging core (BOOST_LOG)
|
||||
logging::core::get()->add_sink(test_sink);
|
||||
|
||||
sbuf = std::cout.rdbuf(); // save cout buffer (std::cout)
|
||||
std::cout.rdbuf(cout_buffer.rdbuf()); // redirect cout to buffer (std::cout)
|
||||
|
||||
// todo: do this only once
|
||||
// setup a mail object
|
||||
mail::man = std::make_shared<safe::mail_raw_t>();
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
std::cout.rdbuf(sbuf); // restore cout buffer
|
||||
|
||||
// get test info
|
||||
const ::testing::TestInfo *const test_info = ::testing::UnitTest::GetInstance()->current_test_info();
|
||||
|
||||
if (test_info->result()->Failed()) {
|
||||
std::cout << std::endl
|
||||
<< "Test failed: " << test_info->name() << std::endl
|
||||
<< std::endl
|
||||
<< "Captured boost log:" << std::endl
|
||||
<< boost_log_buffer.str() << std::endl
|
||||
<< "Captured cout:" << std::endl
|
||||
<< cout_buffer.str() << std::endl
|
||||
<< "Captured stdout:" << std::endl
|
||||
<< stdout_buffer.str() << std::endl
|
||||
<< "Captured stderr:" << std::endl
|
||||
<< stderr_buffer.str() << std::endl;
|
||||
}
|
||||
|
||||
sbuf = nullptr; // clear sbuf
|
||||
if (pipe_stdout) {
|
||||
pclose(pipe_stdout);
|
||||
pipe_stdout = nullptr;
|
||||
}
|
||||
if (pipe_stderr) {
|
||||
pclose(pipe_stderr);
|
||||
pipe_stderr = nullptr;
|
||||
}
|
||||
|
||||
// Remove the sink from the logging core (BOOST_LOG)
|
||||
logging::core::get()->remove_sink(test_sink);
|
||||
test_sink.reset();
|
||||
}
|
||||
|
||||
// functions and variables
|
||||
std::vector<std::string> testArgs; // CLI arguments used
|
||||
std::filesystem::path testBinary; // full path of this binary
|
||||
std::filesystem::path testBinaryDir; // full directory of this binary
|
||||
std::stringstream boost_log_buffer; // declare boost_log_buffer
|
||||
std::stringstream cout_buffer; // declare cout_buffer
|
||||
std::stringstream stdout_buffer; // declare stdout_buffer
|
||||
std::stringstream stderr_buffer; // declare stderr_buffer
|
||||
std::streambuf *sbuf;
|
||||
FILE *pipe_stdout;
|
||||
FILE *pipe_stderr;
|
||||
|
||||
int
|
||||
exec(const char *cmd) {
|
||||
std::array<char, 128> buffer {};
|
||||
pipe_stdout = popen((std::string(cmd) + " 2>&1").c_str(), "r");
|
||||
pipe_stderr = popen((std::string(cmd) + " 2>&1").c_str(), "r");
|
||||
if (!pipe_stdout || !pipe_stderr) {
|
||||
throw std::runtime_error("popen() failed!");
|
||||
}
|
||||
while (fgets(buffer.data(), buffer.size(), pipe_stdout) != nullptr) {
|
||||
stdout_buffer << buffer.data();
|
||||
}
|
||||
while (fgets(buffer.data(), buffer.size(), pipe_stderr) != nullptr) {
|
||||
stderr_buffer << buffer.data();
|
||||
}
|
||||
int returnCode = pclose(pipe_stdout);
|
||||
pipe_stdout = nullptr;
|
||||
if (returnCode != 0) {
|
||||
std::cout << "Error: " << stderr_buffer.str() << std::endl
|
||||
<< "Return code: " << returnCode << std::endl;
|
||||
}
|
||||
return returnCode;
|
||||
}
|
||||
};
|
||||
|
||||
class PlatformInitBase: public virtual BaseTest {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
std::cout << "PlatformInitTest:: starting Fixture SetUp" << std::endl;
|
||||
|
||||
// initialize the platform
|
||||
auto deinit_guard = platf::init();
|
||||
if (!deinit_guard) {
|
||||
FAIL() << "Platform failed to initialize";
|
||||
}
|
||||
|
||||
std::cout << "PlatformInitTest:: finished Fixture SetUp" << std::endl;
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
std::cout << "PlatformInitTest:: starting Fixture TearDown" << std::endl;
|
||||
std::cout << "PlatformInitTest:: finished Fixture TearDown" << std::endl;
|
||||
}
|
||||
};
|
||||
|
||||
class DocsPythonVenvBase: public virtual BaseTest {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
#if defined TESTS_ENABLE_VENV_TESTS && TESTS_ENABLE_VENV_TESTS == 0
|
||||
GTEST_SKIP_("TESTS_ENABLE_VENV_TESTS is disabled by CMake");
|
||||
#else
|
||||
std::cout << "DocsPythonVenvTest:: starting Fixture SetUp" << std::endl;
|
||||
|
||||
std::string pythonBinDirArray[] = { "bin", "Scripts" };
|
||||
std::filesystem::path pythonPath = "python";
|
||||
std::string binPath;
|
||||
std::string command;
|
||||
int exit_code;
|
||||
|
||||
std::filesystem::path venvPath = ".venv";
|
||||
std::filesystem::path fullVenvPath = BaseTest::testBinaryDir / venvPath;
|
||||
|
||||
// check for existence of venv, and create it if necessary
|
||||
std::cout << "DocsPythonVenvTest:: checking for venv" << std::endl;
|
||||
if (!std::filesystem::exists(fullVenvPath)) {
|
||||
std::cout << "DocsPythonVenvTest:: venv not found" << std::endl;
|
||||
|
||||
// create the venv
|
||||
command = "\"" TESTS_PYTHON_EXECUTABLE "\" -m venv " + fullVenvPath.string();
|
||||
std::cout << "DocsPythonVenvTest:: trying to create venv with command: " << command << std::endl;
|
||||
exit_code = BaseTest::exec(command.c_str());
|
||||
if (exit_code != 0) {
|
||||
if (!std::filesystem::exists(fullVenvPath)) {
|
||||
FAIL() << "Command failed: " << command << " with exit code: " << exit_code;
|
||||
}
|
||||
else {
|
||||
// venv command will randomly complain that some files already exist...
|
||||
std::cout << "DocsPythonVenvTest:: exit code (" << exit_code << ") indicates venv creation failed, but venv exists" << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// determine if bin directory is `bin` (Unix) or `Scripts` (Windows)
|
||||
// cannot assume `Scripts` on Windows, as it could be `bin` if using MSYS2, cygwin, etc.
|
||||
std::cout << "DocsPythonVenvTest:: checking structure of venv" << std::endl;
|
||||
for (const std::string &binDir : pythonBinDirArray) {
|
||||
// check if bin directory exists
|
||||
if (std::filesystem::exists(fullVenvPath / binDir)) {
|
||||
binPath = binDir;
|
||||
std::cout << "DocsPythonVenvTest:: found binPath: " << binPath << std::endl;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (binPath.empty()) {
|
||||
FAIL() << "Python venv not found";
|
||||
}
|
||||
|
||||
// set fullPythonPath and fullPythonBinPath
|
||||
fullPythonPath = fullVenvPath / binPath / pythonPath;
|
||||
fullPythonBinPath = fullVenvPath / binPath;
|
||||
|
||||
std::cout << "DocsPythonVenvTest:: fullPythonPath: " << fullPythonPath << std::endl;
|
||||
std::cout << "DocsPythonVenvTest:: fullPythonBinPath: " << fullPythonBinPath << std::endl;
|
||||
|
||||
std::filesystem::path requirements_path = std::filesystem::path(TESTS_DOCS_DIR) / "requirements.txt";
|
||||
|
||||
// array of commands to run
|
||||
std::string CommandArray[] = {
|
||||
"\"" + fullPythonPath.string() + "\" -m pip install -r " + requirements_path.string(),
|
||||
};
|
||||
|
||||
for (const std::string &_command : CommandArray) {
|
||||
std::cout << "DocsPythonVenvTest:: running command: " << _command << std::endl;
|
||||
exit_code = BaseTest::exec(_command.c_str());
|
||||
if (exit_code != 0) {
|
||||
FAIL() << "Command failed: " << command << " with exit code: " << exit_code;
|
||||
}
|
||||
}
|
||||
|
||||
// Save the original PATH
|
||||
originalEnvPath = std::getenv("PATH") ? std::getenv("PATH") : "";
|
||||
std::cout << "DocsPythonVenvTest:: originalEnvPath: " << originalEnvPath << std::endl;
|
||||
|
||||
// Set the temporary PATH
|
||||
std::string tempPath;
|
||||
std::string envPathSep;
|
||||
|
||||
#ifdef _WIN32
|
||||
envPathSep = ";";
|
||||
#else
|
||||
envPathSep = ":";
|
||||
#endif
|
||||
tempPath = fullPythonBinPath.string() + envPathSep + originalEnvPath;
|
||||
std::cout << "DocsPythonVenvTest:: tempPath: " << tempPath << std::endl;
|
||||
setEnv("PATH", tempPath);
|
||||
|
||||
std::cout << "DocsPythonVenvTest:: finished Fixture SetUp" << std::endl;
|
||||
#endif
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
std::cout << "DocsPythonVenvTest:: starting Fixture TearDown" << std::endl;
|
||||
|
||||
// Restore the original PATH
|
||||
if (!originalEnvPath.empty()) {
|
||||
std::cout << "DocsPythonVenvTest:: restoring originalEnvPath: " << originalEnvPath << std::endl;
|
||||
setEnv("PATH", originalEnvPath);
|
||||
}
|
||||
|
||||
std::cout << "DocsPythonVenvTest:: finished Fixture TearDown" << std::endl;
|
||||
}
|
||||
|
||||
// functions and variables
|
||||
std::filesystem::path fullPythonPath;
|
||||
std::filesystem::path fullPythonBinPath;
|
||||
std::string originalEnvPath;
|
||||
};
|
||||
|
||||
class DocsPythonVenvTest: public virtual BaseTest, public DocsPythonVenvBase {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
BaseTest::SetUp();
|
||||
DocsPythonVenvBase::SetUp();
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
DocsPythonVenvBase::TearDown();
|
||||
BaseTest::TearDown();
|
||||
}
|
||||
};
|
||||
|
||||
class DocsWorkingDirectoryBase: public virtual BaseTest {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
#if defined TESTS_ENABLE_VENV_TESTS && TESTS_ENABLE_VENV_TESTS == 1
|
||||
std::cout << "DocsWorkingDirectoryTest:: starting Fixture SetUp" << std::endl;
|
||||
|
||||
temp_dir = TESTS_DOCS_DIR;
|
||||
std::cout << "DocsWorkingDirectoryTest:: temp_dir: " << temp_dir << std::endl;
|
||||
|
||||
// change directory to `docs`
|
||||
original_dir = std::filesystem::current_path(); // save original directory
|
||||
std::cout << "DocsWorkingDirectoryTest:: original_dir: " << original_dir << std::endl;
|
||||
std::filesystem::current_path(temp_dir);
|
||||
std::cout << "DocsWorkingDirectoryTest:: working directory set to: " << std::filesystem::current_path() << std::endl;
|
||||
|
||||
std::cout << "DocsWorkingDirectoryTest:: finished Fixture SetUp" << std::endl;
|
||||
#endif
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
#if defined TESTS_ENABLE_VENV_TESTS && TESTS_ENABLE_VENV_TESTS == 1
|
||||
std::cout << "DocsWorkingDirectoryTest:: starting Fixture TearDown" << std::endl;
|
||||
|
||||
// change directory back to original
|
||||
std::filesystem::current_path(original_dir);
|
||||
std::cout << "DocsWorkingDirectoryTest:: working directory set to: " << std::filesystem::current_path() << std::endl;
|
||||
|
||||
std::cout << "DocsWorkingDirectoryTest:: finished Fixture TearDown" << std::endl;
|
||||
#endif
|
||||
}
|
||||
|
||||
// functions and variables
|
||||
std::filesystem::path original_dir;
|
||||
std::filesystem::path temp_dir;
|
||||
};
|
||||
|
||||
class DocsWorkingDirectoryTest: public virtual BaseTest, public DocsWorkingDirectoryBase {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
BaseTest::SetUp();
|
||||
DocsWorkingDirectoryBase::SetUp();
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
DocsWorkingDirectoryBase::TearDown();
|
||||
BaseTest::TearDown();
|
||||
}
|
||||
};
|
||||
|
||||
class DocsTestFixture: public virtual BaseTest, public DocsPythonVenvBase, public DocsWorkingDirectoryBase {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
BaseTest::SetUp();
|
||||
DocsPythonVenvBase::SetUp();
|
||||
DocsWorkingDirectoryBase::SetUp();
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
DocsWorkingDirectoryBase::TearDown();
|
||||
DocsPythonVenvBase::TearDown();
|
||||
BaseTest::TearDown();
|
||||
}
|
||||
};
|
19
tests/unit/test_file_handler.cpp
Normal file
19
tests/unit/test_file_handler.cpp
Normal file
@ -0,0 +1,19 @@
|
||||
/**
|
||||
* @file tests/test_file_handler.cpp
|
||||
* @brief Test src/file_handler.*.
|
||||
*/
|
||||
#include <src/file_handler.h>
|
||||
|
||||
#include <tests/conftest.cpp>
|
||||
|
||||
TEST(FileHandlerTests, WriteFileTest) {
|
||||
EXPECT_EQ(file_handler::write_file("write_file_test.txt", "test"), 0);
|
||||
}
|
||||
|
||||
TEST(FileHandlerTests, ReadFileTest) {
|
||||
// read file from WriteFileTest
|
||||
EXPECT_EQ(file_handler::read_file("write_file_test.txt"), "test\n"); // sunshine adds a newline
|
||||
|
||||
// read missing file
|
||||
EXPECT_EQ(file_handler::read_file("non-existing-file.txt"), "");
|
||||
}
|
68
tests/unit/test_video.cpp
Normal file
68
tests/unit/test_video.cpp
Normal file
@ -0,0 +1,68 @@
|
||||
/**
|
||||
* @file tests/test_video.cpp
|
||||
* @brief Test src/video.*.
|
||||
*/
|
||||
#include <src/video.h>
|
||||
|
||||
#include <tests/conftest.cpp>
|
||||
|
||||
class EncoderTest: public virtual BaseTest, public PlatformInitBase, public ::testing::WithParamInterface<std::tuple<std::basic_string_view<char>, video::encoder_t *>> {
|
||||
protected:
|
||||
void
|
||||
SetUp() override {
|
||||
BaseTest::SetUp();
|
||||
PlatformInitBase::SetUp();
|
||||
|
||||
std::string_view p_name = std::get<0>(GetParam());
|
||||
std::cout << "EncoderTest(" << p_name << "):: starting Fixture SetUp" << std::endl;
|
||||
|
||||
std::cout << "EncoderTest(" << p_name << "):: validating encoder" << std::endl;
|
||||
video::encoder_t *encoder = std::get<1>(GetParam());
|
||||
bool isEncoderValid;
|
||||
isEncoderValid = video::validate_encoder(*encoder, false);
|
||||
|
||||
// todo: av logging is not redirected to boost so it will be visible whether the test passes or fails
|
||||
// move this code to logging
|
||||
// https://github.com/LizardByte/Sunshine/blob/5606840c8983b714a0e442c42d887a49807715e1/src/main.cpp#L118
|
||||
|
||||
if (!isEncoderValid) {
|
||||
// if encoder is software fail, otherwise skip
|
||||
if (encoder == &video::software && std::string(TESTS_SOFTWARE_ENCODER_UNAVAILABLE) == "fail") {
|
||||
FAIL() << "EncoderTest(" << p_name << "):: software encoder not available";
|
||||
}
|
||||
else {
|
||||
GTEST_SKIP_((std::string("EncoderTest(") + std::string(p_name) + "):: encoder not available").c_str());
|
||||
}
|
||||
}
|
||||
else {
|
||||
std::cout << "EncoderTest(" << p_name << "):: encoder available" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
TearDown() override {
|
||||
PlatformInitBase::TearDown();
|
||||
BaseTest::TearDown();
|
||||
}
|
||||
};
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
EncoderVariants,
|
||||
EncoderTest,
|
||||
::testing::Values(
|
||||
// todo: all encoders crash on windows, probably due to platf not being initialized (which also crashes)
|
||||
#if !defined(__APPLE__)
|
||||
std::make_tuple(video::nvenc.name, &video::nvenc),
|
||||
#endif
|
||||
#ifdef _WIN32
|
||||
std::make_tuple(video::amdvce.name, &video::amdvce), std::make_tuple(video::quicksync.name, &video::quicksync),
|
||||
#endif
|
||||
#ifdef __linux__
|
||||
std::make_tuple(video::vaapi.name, &video::vaapi),
|
||||
#endif
|
||||
#ifdef __APPLE__
|
||||
std::make_tuple(video::videotoolbox.name, &video::videotoolbox),
|
||||
#endif
|
||||
std::make_tuple(video::software.name, &video::software)));
|
||||
TEST_P(EncoderTest, ValidateEncoder) {
|
||||
// todo:: test something besides fixture setup
|
||||
}
|
21
tests/utils.cpp
Normal file
21
tests/utils.cpp
Normal file
@ -0,0 +1,21 @@
|
||||
/**
|
||||
* @file utils.cpp
|
||||
* @brief Utility functions
|
||||
*/
|
||||
|
||||
#include "utils.h"
|
||||
|
||||
/**
|
||||
* @brief Set an environment variable.
|
||||
* @param name Name of the environment variable
|
||||
* @param value Value of the environment variable
|
||||
* @return 0 on success, non-zero error code on failure
|
||||
*/
|
||||
int
|
||||
setEnv(const std::string &name, const std::string &value) {
|
||||
#ifdef _WIN32
|
||||
return _putenv_s(name.c_str(), value.c_str());
|
||||
#else
|
||||
return setenv(name.c_str(), value.c_str(), 1);
|
||||
#endif
|
||||
}
|
11
tests/utils.h
Normal file
11
tests/utils.h
Normal file
@ -0,0 +1,11 @@
|
||||
/**
|
||||
* @file utils.h
|
||||
* @brief Reusable functions for tests.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
|
||||
int
|
||||
setEnv(const std::string &name, const std::string &value);
|
1
third-party/googletest
vendored
Submodule
1
third-party/googletest
vendored
Submodule
@ -0,0 +1 @@
|
||||
Subproject commit f8d7d77c06936315286eb55f8de22cd23c188571
|
Loading…
x
Reference in New Issue
Block a user