Compare commits

..

47 Commits

Author SHA1 Message Date
loki
1102ac9f3b Merge branch 'nvenc' 2020-04-23 16:12:21 +02:00
loki
12115a8a75 Merge branch 'master' of github.com:loki-47-6F-64/sunshine 2020-04-23 16:11:56 +02:00
loki
7ee59669da Removed unnecessary header include 2020-04-23 15:49:47 +02:00
loki
22418cb613 Moved linux specific files to folder platform/linux 2020-04-23 15:48:05 +02:00
loki
4bccec1c39 Refactor platorm Windows 2020-04-23 15:41:40 +02:00
loki
8b1a288ef3 Merge branch 'nvenc' of https://github.com/loki-47-6F-64/sunshine into nvenc 2020-04-23 11:14:35 +02:00
loki
fa489531b0 Don't access video device ctx merely for setting cursor invisible 2020-04-23 00:23:40 +03:00
loki
2e52402e27 Correctly truncate cursor image 2020-04-23 00:09:27 +03:00
loki
519f7a8bf1 convert pointer shape monochrome to color 2020-04-22 22:55:33 +03:00
loki
17e9b803db Display cursor type color with nvenc 2020-04-22 00:07:26 +03:00
loki
48b4dbd81c Merge branch 'master' into nvenc 2020-04-19 21:39:32 +02:00
loki
70bf11ec27 Increase accuracy of fps for nvenc and proper pixel format 2020-04-19 00:10:47 +03:00
loki-47-6F-64
8ea9d34729 Add small description for Sunshine 2020-04-18 12:27:26 +02:00
loki-47-6F-64
8c4519a2d0 Merge pull request #15 from Doomsdayrs/master
README update
2020-04-18 12:25:03 +02:00
Doomsdayrs
ea266b979f Update README.md
- added assets
- made a list
2020-04-17 22:46:07 -04:00
loki
2f978b3159 update pre-compiled 2020-04-17 21:57:27 +02:00
loki
d81ba12aa8 Merge branch 'nvenc' of github.com:loki-47-6F-64/sunshine into nvenc 2020-04-17 19:19:12 +02:00
loki
dd13131fe6 Fix video freezing when resizing display with 2 or more sessions 2020-04-17 19:18:55 +02:00
loki
5a4055f313 Pair and connect with Moonlight-iOS 2020-04-17 18:42:55 +02:00
loki
87f3ab0181 Fix nvenc 2020-04-17 12:28:23 +02:00
loki
c7d6e959e0 Fix stream not closing properly when exiting app 2020-04-16 15:35:12 +02:00
loki
0b1a69a067 Ensure it compiles on Linux again 2020-04-15 21:07:00 +02:00
loki
525e8b3c6d Refactor video.cpp 2020-04-15 19:16:20 +02:00
loki
ad7f93c3cb Switch between nvenc and software encoding 2020-04-14 00:59:43 +03:00
loki
c7a72553c4 Configure settings nvenc 2020-04-14 00:15:24 +03:00
loki
679f74e53c Fix multicasting for nvenc 2020-04-12 02:33:17 +03:00
loki
7edaa0cce0 Encode with nvenc smoothly 2020-04-10 15:39:50 +03:00
loki
c21038af88 Encode video with nvenc 2020-04-08 02:15:08 +03:00
loki
65f44cc885 Fix encoder flags not set properly 2020-04-07 18:57:59 +03:00
loki
ceb784c648 Test capabilities of the encoders 2020-04-07 14:54:56 +03:00
loki
8e3df43caf Pass both nvenc and software in validation 2020-04-07 00:34:52 +03:00
loki
afbca0f6cd initialize nvenc 2020-04-06 23:15:03 +03:00
Doomsdayrs
f4e99a1bd6 Update README.md 2020-04-05 14:46:00 -04:00
Doomsdayrs
ed6b919a4d Update README.md 2020-04-05 14:34:11 -04:00
Doomsdayrs
2d2fd77bc1 Update README.md
- Cleaned up credits
2020-04-05 02:11:26 -04:00
Doomsdayrs
15f347c69c Update README.md
- Super simple table of contents
2020-04-05 02:10:20 -04:00
Doomsdayrs
9fdaf2117f Update README.md
- Added links, Moved some parts around for appeal
2020-04-05 02:08:39 -04:00
Doomsdayrs
631be974ee Update README.md
- Initial rework to be legible in a modern fashion
2020-04-05 02:03:40 -04:00
doomsdayrs
bd3d1d6988 Readme, but now MD 2020-04-05 01:49:08 -04:00
loki
f2636b163e General structure complete 2020-04-02 20:13:44 +02:00
loki
df5daa045a Add a timestamp in front of the log 2020-04-01 18:01:13 +02:00
loki
4de547228c Fix mouse format unsupported and incorrect version string 2020-04-01 14:33:05 +02:00
loki
3d595ce927 Re-add capturing mouse for windows 2020-03-31 23:48:07 +02:00
loki
456d33cf77 Add abillity to supply options for specific encoders 2020-03-31 23:46:41 +02:00
loki
3ceb9b37a0 Reinitialize the video encoder in addition to the capturing device 2020-03-31 21:18:33 +02:00
loki
94181fd047 Prevent unnecessary copies of entire frames on Windows 2020-03-27 21:57:29 +01:00
loki
55705af922 Prepare for hardware encoders 2020-03-25 10:51:32 +01:00
27 changed files with 3275 additions and 1247 deletions

View File

@@ -57,9 +57,12 @@ if(WIN32)
include_directories(
ViGEmClient/include)
set(PLATFORM_TARGET_FILES
sunshine/platform/windows.cpp
sunshine/platform/windows_dxgi.cpp
sunshine/platform/windows_wasapi.cpp
sunshine/platform/windows/input.cpp
sunshine/platform/windows/display.h
sunshine/platform/windows/display_base.cpp
sunshine/platform/windows/display_vram.cpp
sunshine/platform/windows/display_ram.cpp
sunshine/platform/windows/audio.cpp
ViGEmClient/src/ViGEmClient.cpp
ViGEmClient/include/ViGEm/Client.h
ViGEmClient/include/ViGEm/Common.h
@@ -82,8 +85,8 @@ else()
find_package(X11 REQUIRED)
set(PLATFORM_TARGET_FILES
sunshine/platform/linux.cpp
sunshine/platform/linux_evdev.cpp)
sunshine/platform/linux/display.cpp
sunshine/platform/linux/input.cpp)
set(PLATFORM_LIBRARIES
Xfixes
@@ -148,6 +151,7 @@ set(SUNSHINE_TARGET_FILES
sunshine/thread_pool.h
sunshine/thread_safe.h
sunshine/sync.h
sunshine/round_robin.h
${PLATFORM_TARGET_FILES})
include_directories(

185
README.md Normal file
View File

@@ -0,0 +1,185 @@
# Introduction
Sunshine is a Gamestream host for Moonlight
- [Building](README.md#building)
- [Credits](README.md#credits)
# Building
- [Linux](README.md#linux)
- [Windows](README.md#windows-10)
## Linux
### Requirements:
Ubuntu 19.10:
sudo apt install cmake libssl-dev libavdevice-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libpulse-dev libopus-dev libxtst-dev libx11-dev libxfixes-dev libevdev-dev libxcb1-dev libxcb-shm0-dev libxcb-xfixes0-dev
### Compilation:
- `git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules`
- `cd sunshine && mkdir build && cd build`
- `cmake ..`
- `make`: It is suggested to use the `-j C#` flags with this command, `C#` being the number of cores your PC has
### Setup:
sunshine needs access to uinput to create mouse and gamepad events:
- Add user to group 'input': "usermod -a -G input username
- Create a file: "/etc/udev/rules.d/85-sunshine-input.rules"
- The contents of the file is as follows:
KERNEL=="uinput", GROUP="input", mode="0660"
- assets/sunshine.conf is an example configuration file. Modify it as you see fit and use it by running: "sunshine path/to/sunshine.conf"
- path/to/build/dir/sunshine.service is used to start sunshine in the background:
- `cp sunshine.service $HOME/.config/systemd/user/`
- Modify $HOME/.config/systemd/user/sunshine.conf to point to the sunshine executable
- `systemctl --user start sunshine`
- assets/apps.json is an [example](README.md#application-list) of a list of applications that are started just before running a stream
### Trouleshooting:
* If you get "Could not create Sunshine Gamepad: Permission Denied", ensure you are part of the group "input":
* groups
* If Sunshine sends audio from the microphone instead of the speaker, try the following steps:
* pacmd list-sources | grep "name:"
* Copy the name to the configuration option "audio_sink"
* restart sunshine
## Windows 10
### Requirements:
MSYS2 : mingw-w64-x86_64-openssl mingw-w64-x86_64-cmake mingw-w64-x86_64-toolchain mingw-w64-x86_64-ffmpeg mingw-w64-x86_64-boost
### Compilation:
- `git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules`
- `cd sunshine && mkdir build && cd build`
- `cmake -G"Unix Makefiles" ..`
- `make`
### Setup:
- **OPTIONAL** Gamepad support: Download and run 'ViGEmBus_Setup_1.16.116.exe' from [https://github.com/ViGEm/ViGEmBus/releases]
### Static build
#### Requirements:
MSYS2 : mingw-w64-x86_64-openssl mingw-w64-x86_64-cmake mingw-w64-x86_64-toolchain mingw-w64-x86_64-ffmpeg mingw-w64-x86_64-boost git-lfs
#### Compilation:
- `git lfs install`
- `git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules`
- `cd sunshine && mkdir build && cd build`
- `cmake -DSUNSHINE_STANDALONE=ON -DSUNSHINE_ASSETS_DIR=assets -G"Unix Makefiles" ..`
- `make`
# Common
## Usage:
- run "sunshine path/to/sunshine.conf"
- In Moonlight: Add PC manually
- When Moonlight request you insert the correct pin on sunshine, either:
- `wget xxx.xxx.xxx.xxx:47989/pin/####`
- Type in the URL bar of your browser: `xxx.xxx.xxx.xxx:47989/pin/####`
- The x's are the IP of your instance, `####` is the pin
- Click on one of the Applications listed
- Have fun :)
## Note:
- The Windows key is not passed through by Moonlight, therefore Sunshine maps Right-Alt key to the Windows key
- If you set Video Bitrate to 0.5Mb/s:
- Sunshine will use CRF or QP to controll the quality of the stream. (See example configuration file for more details)
- This is less CPU intensive and it has lower average bandwith requirements compared to manually setting bitrate to acceptable quality
- However, it has higher peak bitrates, forcing Sunshine to drop entire frames when streaming 1080P due to their size.
- When this happens, the video portion of the stream appears to be frozen.
- This is rare enough that using this for the desktop environment is tolerable (in my opinion), however for gaming not so much.
## Credits:
- [Simple-Web-Server](https://gitlab.com/eidheim/Simple-Web-Server)
- [Moonlight](https://github.com/moonlight-stream)
- [Looking-Glass](https://github.com/gnif/LookingGlass) (For showing me how to properly capture frames on Windows, saving me a lot of time :)
## Application List:
- You can use Environment variables in place of values
- $(HOME) will be replaced by the value of $HOME
- $$ will be replaced by $ --> $$(HOME) will be replaced by $(HOME)
- env: Adds or overwrites Environment variables for the commands/applications run by Sunshine.
- "Variable name":"Variable value"
- apps: The list of applications
- Example:
```json
{
"name":"An App",
"cmd":"command to open app",
"prep-cmd":[
{
"do":"somecommand",
"undo":"undothatcommand"
}
]
}
```
- name: Self explanatory
- output <optional>: The file where the output of the command is stored
- If it is not specified, the output is ignored
- prep-cmd: A list of commands to be run before/after the application
- If any of the prep-commands fail, starting the application is aborted
- do: Run before the application
- If it fails, all 'undo' commands of the previously succeeded 'do' commands are run
- undo <optional>: Run after the application has terminated
- This should not fail considering it is supposed to undo the 'do' commands.
- If it fails, Sunshine is terminated
- cmd <optional>: The main application
- If not specified, a processs is started that sleeps indefinitely
1. When an application is started, if there is an application already running, it will be terminated.
2. When the application has been shutdown, the stream shuts down as well.
3. In addition to the apps listed, one app "Desktop" is hardcoded into Sunshine. It does not start an application, instead it simply starts a stream.
Linux
```json
{
"env":{
"DISPLAY":":0",
"DRI_PRIME":"1",
"XAUTHORITY":"$(HOME)/.Xauthority",
"PATH":"$(PATH):$(HOME)/.local/bin"
},
"apps":[
{
"name":"Low Res Desktop",
"prep-cmd":[
{ "do":"xrandr --output HDMI-1 --mode 1920x1080", "undo":"xrandr --output HDMI-1 --mode 1920x1200" }
]
},
{
"name":"Steam BigPicture",
"output":"steam.txt",
"cmd":"steam -bigpicture",
"prep-cmd":[]
}
]
}
```
Windows
```json
{
"env":{
"PATH":"$(PATH);C:\\Program Files (x86)\\Steam"
},
"apps":[
{
"name":"Steam BigPicture",
"output":"steam.txt",
"prep-cmd":[
{"do":"steam \"steam://open/bigpicture\""}
]
}
]
}
```

View File

@@ -1,117 +0,0 @@
######### Linux ##############
Requirements:
Ubuntu 19.10: cmake libssl-dev libavdevice-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libpulse-dev libopus-dev libxtst-dev libx11-dev libxfixes-dev libevdev-dev libxcb1-dev libxcb-shm0-dev libxcb-xfixes0-dev
Compilation:
* git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules
* cd sunshine && mkdir build && cd build
* cmake ..
* make
Setup:
* sunshine needs access to uinput to create mouse and gamepad events:
* Add user to group 'input': "usermod -a -G input username
* Create a file: "/etc/udev/rules.d/85-sunshine-input.rules"
* The contents of the file is as follows:
KERNEL=="uinput", GROUP="input", mode="0660"
* assets/sunshine.conf is an example configuration file. Modify it as you see fit and use it by running: "sunshine path/to/sunshine.conf"
* path/to/build/dir/sunshine.service is used to start sunshine in the background:
* cp sunshine.service $HOME/.config/systemd/user/
* Modify $HOME/.config/systemd/user/sunshine.conf to point to the sunshine executable
* systemctl --user start sunshine
* assets/apps.json is an example of a list of applications that are started just before running a stream:
* See below for a detailed explanation
Trouleshooting:
* If you get "Could not create Sunshine Gamepad: Permission Denied", ensure you are part of the group "input":
* groups
* If Sunshine sends audio from the microphone instead of the speaker, try the following steps:
* pacmd list-sources | grep "name:"
* Copy the name to the configuration option "audio_sink"
* restart sunshine
######### Windows 10 ############
Requirements:
MSYS2 : mingw-w64-x86_64-openssl mingw-w64-x86_64-cmake mingw-w64-x86_64-toolchain mingw-w64-x86_64-ffmpeg mingw-w64-x86_64-boost
Compilation:
* git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules
* cd sunshine && mkdir build && cd build
* cmake -G"Unix Makefiles" ..
* make
Setup:
* <optional> Gamepad support: Download and run 'ViGEmBus_Setup_1.16.116.exe' from [https://github.com/ViGEm/ViGEmBus/releases]
== Static build ==
Requirements:
MSYS2 : mingw-w64-x86_64-openssl mingw-w64-x86_64-cmake mingw-w64-x86_64-toolchain mingw-w64-x86_64-ffmpeg mingw-w64-x86_64-boost git-lfs
Compilation:
* git lfs install
* git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules
* cd sunshine && mkdir build && cd build
* cmake -DSUNSHINE_STANDALONE=ON -DSUNSHINE_ASSETS_DIR=assets -G"Unix Makefiles" ..
* make
######### Common #############
Usage:
* run "sunshine path/to/sunshine.conf"
* In Moonlight: Add PC manually
* When Moonlight request you insert the correct pin on sunshine:
wget xxx.xxx.xxx.xxx:47989/pin/xxxx -- where the first few x's are substituted by the ip of Sunshine and the final 4 x'es are substituted by the pin
or
Type in the URL bar of your browser: xxx.xxx.xxx.xxx:47989/pin/xxxx -- where the first few x's are substituted by the ip of the final 4 x'es are subsituted by the pin
* Click on one of the Applications listed
* Have fun :)
Note:
* The Windows key is not passed through by Moonlight, therefore Sunshine maps Right-Alt key to the Windows key
* If you set Video Bitrate to 0.5Mb/s:
* Sunshine will use CRF or QP to controll the quality of the stream. (See example configuration file for more details)
* This is less CPU intensive and it has lower average bandwith requirements compared to manually setting bitrate to acceptable quality
* However, it has higher peak bitrates, forcing Sunshine to drop entire frames when streaming 1080P due to their size.
* When this happens, the video portion of the stream appears to be frozen.
* This is rare enough that using this for the desktop environment is tolerable (in my opinion), however for gaming not so much.
Credits:
* Simple-Web-Server [https://gitlab.com/eidheim/Simple-Web-Server]
* Moonlight [https://github.com/moonlight-stream]
* Looking-Glass [https://github.com/gnif/LookingGlass] (For showing me how to properly capture frames on Windows, saving me a lot of time :)
Application List:
* You can use Environment variables in place of values
* $(HOME) will be replaced by the value of $HOME
* $$ will be replaced by $ --> $$(HOME) will be replaced by $(HOME)
* env: Adds or overwrites Environment variables for the commands/applications run by Sunshine.
* "Variable name":"Variable value"
* apps: The list of applications
* name: Self explanatory
* output <optional>: The file where the output of the command is stored
* If it is not specified, the output is ignored
* prep-cmd: A list of commands to be run before/after the application
* If any of the prep-commands fail, starting the application is aborted
* do: Run before the application
* If it fails, all 'undo' commands of the previously succeeded 'do' commands are run
* undo <optional>: Run after the application has terminated
* This should not fail considering it is supposed to undo the 'do' commands.
* If it fails, Sunshine is terminated
* cmd <optional>: The main application
* If not specified, a processs is started that sleeps indefinitely
When an application is started, if there is an application already running, it will be terminated.
When the application has been shutdown, the stream shuts down as well.
In addition to the apps listed, one app "Desktop" is hardcoded into Sunshine. It does not start an application, instead it simply starts a stream.

View File

@@ -105,20 +105,64 @@
# Increasing the value slightly reduces encoding efficiency, but the tradeoff is usually
# worth it to gain the use of more CPU cores for encoding. The ideal value is the lowest
# value that can reliably encode at your desired streaming settings on your hardware.
# min_threads = 2
# min_threads = 1
# Allows the client to request HEVC Main or HEVC Main10 video streams.
# HEVC is more CPU-intensive to encode, so enabling this may reduce performance.
# If set to 0 (default), Sunshine will not advertise support for HEVC
# If set to 1, Sunshine will advertise support for HEVC Main profile
# If set to 2, Sunshine will advertise support for HEVC Main and Main10 (HDR) profiles
# hevc_mode = 2
# HEVC is more CPU-intensive to encode, so enabling this may reduce performance when using software encoding.
# If set to 0 (default), Sunshine will specify support for HEVC based on encoder
# If set to 1, Sunshine will not advertise support for HEVC
# If set to 2, Sunshine will advertise support for HEVC Main profile
# If set to 3, Sunshine will advertise support for HEVC Main and Main10 (HDR) profiles
# hevc_mode = 0
# Force a specific encoder, otherwise Sunshine will use the first encoder that is available
# supported encoders:
# nvenc
# software
#
# encoder = nvenc
##################################### Software #####################################
# See x264 --fullhelp for the different presets
# preset = superfast
# tune = zerolatency
# sw_preset = superfast
# sw_tune = zerolatency
#
##################################### NVENC #####################################
###### presets ###########
# default
# hp -- high performance
# hq -- high quality
# slow -- hq 2 passes
# medium -- hq 1 pass
# fast -- hp 1 pass
# bd
# ll -- low latency
# llhq
# llhp
# lossless
# losslesshp
##########################
# nv_preset = llhq
#
####### rate control #####
# auto -- let ffmpeg decide rate control
# constqp -- constant QP mode
# vbr -- variable bitrate
# cbr -- constant bitrate
# cbr_hq -- cbr high quality
# cbr_ld_hq -- cbr low delay high quality
# vbr_hq -- vbr high quality
##########################
# nv_rc = auto
###### h264 entropy ######
# auto -- let ffmpeg nvenc decide the entropy encoding
# cabac
# cavlc
##########################
# nv_coder = auto
##############################################
# Some configurable parameters, are merely toggles for specific features
# The first occurrence turns it on, the second occurence turns it off, the third occurence turns it on again, etc, etc

View File

@@ -15,17 +15,97 @@
#define APPS_JSON_PATH SUNSHINE_ASSETS_DIR "/" APPS_JSON
namespace config {
using namespace std::literals;
namespace nv {
enum preset_e : int {
_default = 0,
slow,
medium,
fast,
hp,
hq,
bd,
ll_default,
llhq,
llhp,
lossless_default, // lossless presets must be the last ones
lossless_hp,
};
enum rc_e : int {
constqp = 0x0, /**< Constant QP mode */
vbr = 0x1, /**< Variable bitrate mode */
cbr = 0x2, /**< Constant bitrate mode */
cbr_ld_hq = 0x8, /**< low-delay CBR, high quality */
cbr_hq = 0x10, /**< CBR, high quality (slower) */
vbr_hq = 0x20 /**< VBR, high quality (slower) */
};
enum coder_e : int {
_auto = 0,
cabac,
cavlc
};
std::optional<preset_e> preset_from_view(const std::string_view &preset) {
#define _CONVERT_(x) if(preset == #x##sv) return x
_CONVERT_(slow);
_CONVERT_(medium);
_CONVERT_(fast);
_CONVERT_(hp);
_CONVERT_(bd);
_CONVERT_(ll_default);
_CONVERT_(llhq);
_CONVERT_(llhp);
_CONVERT_(lossless_default);
_CONVERT_(lossless_hp);
if(preset == "default"sv) return _default;
#undef _CONVERT_
return std::nullopt;
}
std::optional<rc_e> rc_from_view(const std::string_view &rc) {
#define _CONVERT_(x) if(rc == #x##sv) return x
_CONVERT_(constqp);
_CONVERT_(vbr);
_CONVERT_(cbr);
_CONVERT_(cbr_hq);
_CONVERT_(vbr_hq);
_CONVERT_(cbr_ld_hq);
#undef _CONVERT_
return std::nullopt;
}
int coder_from_view(const std::string_view &coder) {
if(coder == "auto"sv) return _auto;
if(coder == "cabac"sv || coder == "ac"sv) return cabac;
if(coder == "cavlc"sv || coder == "vlc"sv) return cavlc;
return -1;
}
}
video_t video {
0, // crf
28, // qp
2, // min_threads
0, // hevc_mode
"superfast"s, // preset
"zerolatency"s, // tune
1, // min_threads
{
"superfast"s, // preset
"zerolatency"s, // tune
}, // software
{
nv::llhq,
std::nullopt,
-1
}, // nv
{}, // encoder
{}, // adapter_name
{} // output_name
{} // output_name
};
audio_t audio {};
@@ -138,6 +218,37 @@ void int_f(std::unordered_map<std::string, std::string> &vars, const std::string
vars.erase(it);
}
void int_f(std::unordered_map<std::string, std::string> &vars, const std::string &name, std::optional<int> &input) {
auto it = vars.find(name);
if(it == std::end(vars)) {
return;
}
auto &val = it->second;
input = util::from_chars(&val[0], &val[0] + val.size());
vars.erase(it);
}
template<class F>
void int_f(std::unordered_map<std::string, std::string> &vars, const std::string &name, int &input, F &&f) {
std::string tmp;
string_f(vars, name, tmp);
if(!tmp.empty()) {
input = f(tmp);
}
}
template<class F>
void int_f(std::unordered_map<std::string, std::string> &vars, const std::string &name, std::optional<int> &input, F &&f) {
std::string tmp;
string_f(vars, name, tmp);
if(!tmp.empty()) {
input = f(tmp);
}
}
void int_between_f(std::unordered_map<std::string, std::string> &vars, const std::string &name, int &input, const std::pair<int, int> &range) {
int temp = input;
@@ -149,6 +260,28 @@ void int_between_f(std::unordered_map<std::string, std::string> &vars, const std
}
}
bool to_bool(std::string &boolean) {
std::for_each(std::begin(boolean), std::end(boolean), [](char ch) { return (char)std::tolower(ch); });
return
boolean == "true"sv ||
boolean == "yes"sv ||
boolean == "enable"sv ||
(std::find(std::begin(boolean), std::end(boolean), '1') != std::end(boolean));
}
void bool_f(std::unordered_map<std::string, std::string> &vars, const std::string &name, int &input) {
std::string tmp;
string_restricted_f(vars, name, tmp, {
"enable"sv, "dis"
});
if(tmp.empty()) {
return;
}
input = to_bool(tmp) ? 1 : 0;
}
void print_help(const char *name) {
std::cout <<
"Usage: "sv << name << " [options] [/path/to/configuration_file]"sv << std::endl <<
@@ -190,10 +323,14 @@ void apply_config(std::unordered_map<std::string, std::string> &&vars) {
int_f(vars, "qp", video.qp);
int_f(vars, "min_threads", video.min_threads);
int_between_f(vars, "hevc_mode", video.hevc_mode, {
0, 2
0, 3
});
string_f(vars, "preset", video.preset);
string_f(vars, "tune", video.tune);
string_f(vars, "sw_preset", video.sw.preset);
string_f(vars, "sw_tune", video.sw.tune);
int_f(vars, "nv_preset", video.nv.preset, nv::preset_from_view);
int_f(vars, "nv_rc", video.nv.preset, nv::rc_from_view);
int_f(vars, "nv_coder", video.nv.coder, nv::coder_from_view);
string_f(vars, "encoder", video.encoder);
string_f(vars, "adapter_name", video.adapter_name);
string_f(vars, "output_name", video.output_name);
@@ -327,8 +464,8 @@ int parse(int argc, char *argv[]) {
std::istreambuf_iterator<char>()
});
for(auto &var : cmd_vars) {
vars.emplace(std::move(var));
for(auto &[name,value] : cmd_vars) {
vars.insert_or_assign(std::move(name), std::move(value));
}
apply_config(std::move(vars));

View File

@@ -4,6 +4,7 @@
#include <chrono>
#include <string>
#include <bitset>
#include <optional>
namespace config {
struct video_t {
@@ -11,12 +12,21 @@ struct video_t {
int crf; // higher == more compression and less quality
int qp; // higher == more compression and less quality, ignored if crf != 0
int min_threads; // Minimum number of threads/slices for CPU encoding
int hevc_mode;
std::string preset;
std::string tune;
int min_threads; // Minimum number of threads/slices for CPU encoding
struct {
std::string preset;
std::string tune;
} sw;
struct {
std::optional<int> preset;
std::optional<int> rc;
int coder;
} nv;
std::string encoder;
std::string adapter_name;
std::string output_name;
};

View File

@@ -5,7 +5,6 @@
#include "process.h"
#include <thread>
#include <filesystem>
#include <iostream>
#include <csignal>
@@ -13,7 +12,9 @@
#include <boost/log/sinks.hpp>
#include <boost/log/expressions.hpp>
#include <boost/log/sources/severity_logger.hpp>
#include <boost/log/attributes/clock.hpp>
#include "video.h"
#include "input.h"
#include "nvhttp.h"
#include "rtsp.h"
@@ -23,6 +24,7 @@
#include "platform/common.h"
extern "C" {
#include <rs.h>
#include <libavutil/log.h>
}
using namespace std::literals;
@@ -68,13 +70,19 @@ int main(int argc, char *argv[]) {
return 0;
}
if(config::sunshine.min_log_level >= 2) {
av_log_set_level(AV_LOG_QUIET);
}
sink = boost::make_shared<text_sink>();
boost::shared_ptr<std::ostream> stream { &std::cout, NoDelete {} };
sink->locked_backend()->add_stream(stream);
sink->set_filter(severity >= config::sunshine.min_log_level);
sink->set_formatter([severity="Severity"s](const bl::record_view &view, bl::formatting_ostream &os) {
sink->set_formatter([message="Message"s, severity="Severity"s](const bl::record_view &view, bl::formatting_ostream &os) {
constexpr int DATE_BUFFER_SIZE = 21 +2 +1; // Full string plus ": \0"
auto log_level = view.attribute_values()[severity].extract<int>().get();
std::string_view log_type;
@@ -99,7 +107,11 @@ int main(int argc, char *argv[]) {
break;
};
os << log_type << view.attribute_values()["Message"].extract<std::string>();
char _date[DATE_BUFFER_SIZE];
std::time_t t = std::time(nullptr);
strftime(_date, DATE_BUFFER_SIZE, "[%Y:%m:%d:%H:%M:%S]: ", std::localtime(&t));
os << _date << log_type << view.attribute_values()[message].extract<std::string>();
});
bl::core::get()->add_sink(sink);
@@ -128,6 +140,9 @@ int main(int argc, char *argv[]) {
auto deinit_guard = platf::init();
input::init();
reed_solomon_init();
if(video::init()) {
return 2;
}
task_pool.start(1);

View File

@@ -33,7 +33,7 @@ constexpr auto PORT_HTTP = 47989;
constexpr auto PORT_HTTPS = 47984;
constexpr auto VERSION = "7.1.400.0";
constexpr auto GFE_VERSION = "2.0.0.1";
constexpr auto GFE_VERSION = "3.12.0.1";
namespace fs = std::filesystem;
namespace pt = boost::property_tree;
@@ -168,7 +168,15 @@ void update_id_client(const std::string &uniqueID, std::string &&cert, op_e op)
}
void getservercert(pair_session_t &sess, pt::ptree &tree, const std::string &pin) {
auto salt = util::from_hex<std::array<uint8_t, 16>>(sess.async_insert_pin.salt, true);
if(sess.async_insert_pin.salt.size() < 32) {
tree.put("root.paired", 0);
tree.put("root.<xmlattr>.status_code", 400);
return;
}
std::string_view salt_view { sess.async_insert_pin.salt.data(), 32 };
auto salt = util::from_hex<std::array<uint8_t, 16>>(salt_view, true);
auto key = crypto::gen_aes_key(*salt, pin);
sess.cipher_key = std::make_unique<crypto::aes_t>(key);
@@ -464,13 +472,13 @@ void serverinfo(std::shared_ptr<typename SimpleWeb::ServerBase<T>::Response> res
tree.put("root.GfeVersion", GFE_VERSION);
tree.put("root.uniqueid", unique_id);
tree.put("root.mac", platf::get_mac_address(request->local_endpoint_address()));
tree.put("root.MaxLumaPixelsHEVC", config::video.hevc_mode > 0 ? "1869449984" : "0");
tree.put("root.MaxLumaPixelsHEVC", config::video.hevc_mode > 1 ? "1869449984" : "0");
tree.put("root.LocalIP", request->local_endpoint_address());
if(config::video.hevc_mode == 2) {
if(config::video.hevc_mode == 3) {
tree.put("root.ServerCodecModeSupport", "3843");
}
else if(config::video.hevc_mode == 1) {
else if(config::video.hevc_mode == 2) {
tree.put("root.ServerCodecModeSupport", "259");
}
else {
@@ -484,7 +492,7 @@ void serverinfo(std::shared_ptr<typename SimpleWeb::ServerBase<T>::Response> res
auto current_appid = proc::proc.running();
tree.put("root.PairStatus", pair_status);
tree.put("root.currentgame", current_appid >= 0 ? current_appid + 1 : 0);
tree.put("root.state", "_SERVER_BUSY");
tree.put("root.state", current_appid >= 0 ? "_SERVER_BUSY" : "_SERVER_FREE");
std::ostringstream data;
@@ -522,7 +530,7 @@ void applist(resp_https_t response, req_https_t request) {
for(auto &proc : proc::proc.get_apps()) {
pt::ptree app;
app.put("IsHdrSupported"s, config::video.hevc_mode == 2 ? 1 : 0);
app.put("IsHdrSupported"s, config::video.hevc_mode == 3 ? 1 : 0);
app.put("AppTitle"s, proc.name);
app.put("ID"s, ++x);
@@ -541,8 +549,6 @@ void launch(resp_https_t response, req_https_t request) {
response->write(data.str());
});
BOOST_LOG(fatal) << stream::session_count();
if(stream::session_count() == config::stream.channels) {
tree.put("root.resume", 0);
tree.put("root.<xmlattr>.status_code", 503);
@@ -601,7 +607,6 @@ void resume(resp_https_t response, req_https_t request) {
// It is possible that due a race condition that this if-statement gives a false negative,
// that is automatically resolved in rtsp_server_t
if(stream::session_count() == config::stream.channels) {
BOOST_LOG(fatal) << stream::session_count();
tree.put("root.resume", 0);
tree.put("root.<xmlattr>.status_code", 503);

View File

@@ -6,6 +6,7 @@
#define SUNSHINE_COMMON_H
#include <string>
#include <mutex>
#include "sunshine/utility.h"
struct sockaddr;
@@ -28,6 +29,20 @@ constexpr std::uint16_t B = 0x2000;
constexpr std::uint16_t X = 0x4000;
constexpr std::uint16_t Y = 0x8000;
enum class dev_type_e {
none,
dxgi,
unknown
};
enum class pix_fmt_e {
yuv420p,
yuv420p10,
nv12,
p010,
unknown
};
struct gamepad_state_t {
std::uint16_t buttonFlags;
std::uint8_t lt;
@@ -58,6 +73,19 @@ public:
virtual ~img_t() = default;
};
struct hwdevice_t {
void *data {};
platf::img_t *img {};
virtual int convert(platf::img_t &img) {
return -1;
}
virtual void set_colorspace(std::uint32_t colorspace, std::uint32_t color_range) {};
virtual ~hwdevice_t() = default;
};
enum class capture_e : int {
ok,
reinit,
@@ -67,10 +95,18 @@ enum class capture_e : int {
class display_t {
public:
virtual capture_e snapshot(img_t *img, bool cursor) = 0;
virtual std::unique_ptr<img_t> alloc_img() = 0;
virtual capture_e snapshot(img_t *img, std::chrono::milliseconds timeout, bool cursor) = 0;
virtual std::shared_ptr<img_t> alloc_img() = 0;
virtual int dummy_img(img_t *img) = 0;
virtual std::shared_ptr<hwdevice_t> make_hwdevice(int width, int height, pix_fmt_e pix_fmt) {
return std::make_shared<hwdevice_t>();
}
virtual ~display_t() = default;
int width, height;
};
class mic_t {
@@ -91,7 +127,7 @@ std::string from_sockaddr(const sockaddr *const);
std::pair<std::uint16_t, std::string> from_sockaddr_ex(const sockaddr *const);
std::unique_ptr<mic_t> microphone(std::uint32_t sample_rate);
std::unique_ptr<display_t> display();
std::shared_ptr<display_t> display(dev_type_e hwdevice_type);
input_t input();
void move_mouse(input_t &input, int deltaX, int deltaY);

View File

@@ -2,7 +2,7 @@
// Created by loki on 6/21/19.
//
#include "common.h"
#include "sunshine/platform/common.h"
#include <fstream>
#include <bitset>
@@ -145,14 +145,22 @@ struct x11_attr_t : public display_t {
xwindow = DefaultRootWindow(xdisplay.get());
refresh();
width = xattr.width;
height = xattr.height;
}
void refresh() {
XGetWindowAttributes(xdisplay.get(), xwindow, &xattr);
}
capture_e snapshot(img_t *img_out_base, bool cursor) override {
capture_e snapshot(img_t *img_out_base, std::chrono::milliseconds timeout, bool cursor) override {
refresh();
if(width != xattr.width || height != xattr.height) {
return capture_e::reinit;
}
XImage *img { XGetImage(
xdisplay.get(),
xwindow,
@@ -176,8 +184,13 @@ struct x11_attr_t : public display_t {
return capture_e::ok;
}
std::unique_ptr<img_t> alloc_img() override {
return std::make_unique<x11_img_t>();
std::shared_ptr<img_t> alloc_img() override {
return std::make_shared<x11_img_t>();
}
int dummy_img(img_t *img) override {
snapshot(img, 0s, true);
return 0;
}
xdisplay_t xdisplay;
@@ -210,8 +223,8 @@ struct shm_attr_t : public x11_attr_t {
while(!task_pool.cancel(refresh_task_id));
}
capture_e snapshot(img_t *img, bool cursor) override {
if(display->width_in_pixels != xattr.width || display->height_in_pixels != xattr.height) {
capture_e snapshot(img_t *img, std::chrono::milliseconds timeout, bool cursor) override {
if(width != xattr.width || height != xattr.height) {
return capture_e::reinit;
}
@@ -219,7 +232,7 @@ struct shm_attr_t : public x11_attr_t {
xcb.get(),
display->root,
0, 0,
display->width_in_pixels, display->height_in_pixels,
width, height,
~0,
XCB_IMAGE_FORMAT_Z_PIXMAP,
seg,
@@ -232,16 +245,6 @@ struct shm_attr_t : public x11_attr_t {
return capture_e::reinit;
}
if(img->width != display->width_in_pixels || img->height != display->height_in_pixels) {
delete[] img->data;
img->data = new std::uint8_t[frame_size()];
img->width = display->width_in_pixels;
img->height = display->height_in_pixels;
img->pixel_pitch = 4;
img->row_pitch = img->width * img->pixel_pitch;
}
std::copy_n((std::uint8_t*)data.data, frame_size(), img->data);
if(cursor) {
@@ -251,8 +254,19 @@ struct shm_attr_t : public x11_attr_t {
return capture_e::ok;
}
std::unique_ptr<img_t> alloc_img() override {
return std::make_unique<shm_img_t>();
std::shared_ptr<img_t> alloc_img() override {
auto img = std::make_shared<shm_img_t>();
img->width = width;
img->height = height;
img->pixel_pitch = 4;
img->row_pitch = img->pixel_pitch * width;
img->data = new std::uint8_t[height * img->row_pitch];
return img;
}
int dummy_img(platf::img_t *img) override {
return 0;
}
int init() {
@@ -287,11 +301,14 @@ struct shm_attr_t : public x11_attr_t {
return -1;
}
width = display->width_in_pixels;
height = display->height_in_pixels;
return 0;
}
std::uint32_t frame_size() {
return display->height_in_pixels * display->width_in_pixels * 4;
return width * height * 4;
}
};
@@ -315,8 +332,8 @@ struct mic_attr_t : public mic_t {
}
};
std::unique_ptr<display_t> shm_display() {
auto shm = std::make_unique<shm_attr_t>();
std::shared_ptr<display_t> shm_display() {
auto shm = std::make_shared<shm_attr_t>();
if(shm->init()) {
return nullptr;
@@ -325,11 +342,15 @@ std::unique_ptr<display_t> shm_display() {
return shm;
}
std::unique_ptr<display_t> display() {
std::shared_ptr<display_t> display(platf::dev_type_e hwdevice_type) {
if(hwdevice_type != platf::dev_type_e::none) {
return nullptr;
}
auto shm_disp = shm_display();
if(!shm_disp) {
return std::unique_ptr<display_t> { new x11_attr_t {} };
return std::make_shared<x11_attr_t>();
}
return shm_disp;

View File

@@ -9,7 +9,7 @@
#include <cstring>
#include <filesystem>
#include "common.h"
#include "sunshine/platform/common.h"
#include "sunshine/main.h"
#include "sunshine/utility.h"

View File

@@ -12,7 +12,7 @@
#include "sunshine/config.h"
#include "sunshine/main.h"
#include "common.h"
#include "sunshine/platform/common.h"
const CLSID CLSID_MMDeviceEnumerator = __uuidof(MMDeviceEnumerator);
const IID IID_IMMDeviceEnumerator = __uuidof(IMMDeviceEnumerator);

View File

@@ -0,0 +1,116 @@
//
// Created by loki on 4/23/20.
//
#ifndef SUNSHINE_DISPLAY_H
#define SUNSHINE_DISPLAY_H
#include <dxgi.h>
#include <d3d11.h>
#include <d3d11_4.h>
#include <d3dcommon.h>
#include <dxgi1_2.h>
#include "sunshine/utility.h"
#include "sunshine/platform/common.h"
namespace platf::dxgi {
extern const char *format_str[];
template<class T>
void Release(T *dxgi) {
dxgi->Release();
}
using factory1_t = util::safe_ptr<IDXGIFactory1, Release<IDXGIFactory1>>;
using dxgi_t = util::safe_ptr<IDXGIDevice, Release<IDXGIDevice>>;
using dxgi1_t = util::safe_ptr<IDXGIDevice1, Release<IDXGIDevice1>>;
using device_t = util::safe_ptr<ID3D11Device, Release<ID3D11Device>>;
using device_ctx_t = util::safe_ptr<ID3D11DeviceContext, Release<ID3D11DeviceContext>>;
using adapter_t = util::safe_ptr<IDXGIAdapter1, Release<IDXGIAdapter1>>;
using output_t = util::safe_ptr<IDXGIOutput, Release<IDXGIOutput>>;
using output1_t = util::safe_ptr<IDXGIOutput1, Release<IDXGIOutput1>>;
using dup_t = util::safe_ptr<IDXGIOutputDuplication, Release<IDXGIOutputDuplication>>;
using texture2d_t = util::safe_ptr<ID3D11Texture2D, Release<ID3D11Texture2D>>;
using resource_t = util::safe_ptr<IDXGIResource, Release<IDXGIResource>>;
using multithread_t = util::safe_ptr<ID3D11Multithread, Release<ID3D11Multithread>>;
namespace video {
using device_t = util::safe_ptr<ID3D11VideoDevice, Release<ID3D11VideoDevice>>;
using ctx_t = util::safe_ptr<ID3D11VideoContext, Release<ID3D11VideoContext>>;
using processor_t = util::safe_ptr<ID3D11VideoProcessor, Release<ID3D11VideoProcessor>>;
using processor_out_t = util::safe_ptr<ID3D11VideoProcessorOutputView, Release<ID3D11VideoProcessorOutputView>>;
using processor_in_t = util::safe_ptr<ID3D11VideoProcessorInputView, Release<ID3D11VideoProcessorInputView>>;
using processor_enum_t = util::safe_ptr<ID3D11VideoProcessorEnumerator, Release<ID3D11VideoProcessorEnumerator>>;
}
class hwdevice_t;
struct cursor_t {
std::vector<std::uint8_t> img_data;
DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info;
int x, y;
bool visible;
};
struct gpu_cursor_t {
texture2d_t texture;
LONG width, height;
};
class duplication_t {
public:
dup_t dup;
bool has_frame {};
capture_e next_frame(DXGI_OUTDUPL_FRAME_INFO &frame_info, std::chrono::milliseconds timeout, resource_t::pointer *res_p);
capture_e reset(dup_t::pointer dup_p = dup_t::pointer());
capture_e release_frame();
~duplication_t();
};
class display_base_t : public display_t {
public:
int init();
factory1_t factory;
adapter_t adapter;
output_t output;
device_t device;
device_ctx_t device_ctx;
duplication_t dup;
DXGI_FORMAT format;
D3D_FEATURE_LEVEL feature_level;
};
class display_ram_t : public display_base_t {
public:
capture_e snapshot(img_t *img, std::chrono::milliseconds timeout, bool cursor_visible) override;
std::shared_ptr<img_t> alloc_img() override;
int dummy_img(img_t *img) override;
int init();
cursor_t cursor;
D3D11_MAPPED_SUBRESOURCE img_info;
texture2d_t texture;
};
class display_vram_t : public display_base_t, public std::enable_shared_from_this<display_vram_t> {
public:
capture_e snapshot(img_t *img, std::chrono::milliseconds timeout, bool cursor_visible) override;
std::shared_ptr<img_t> alloc_img() override;
int dummy_img(img_t *img_base) override;
std::shared_ptr<platf::hwdevice_t> make_hwdevice(int width, int height, pix_fmt_e pix_fmt) override;
gpu_cursor_t cursor;
std::vector<hwdevice_t*> hwdevices;
};
}
#endif

View File

@@ -0,0 +1,420 @@
//
// Created by loki on 1/12/20.
//
#include <codecvt>
#include "sunshine/config.h"
#include "sunshine/main.h"
#include "sunshine/platform/common.h"
#include "display.h"
namespace platf {
using namespace std::literals;
}
namespace platf::dxgi {
capture_e duplication_t::next_frame(DXGI_OUTDUPL_FRAME_INFO &frame_info, std::chrono::milliseconds timeout, resource_t::pointer *res_p) {
auto capture_status = release_frame();
if(capture_status != capture_e::ok) {
return capture_status;
}
auto status = dup->AcquireNextFrame(timeout.count(), &frame_info, res_p);
switch(status) {
case S_OK:
has_frame = true;
return capture_e::ok;
case DXGI_ERROR_WAIT_TIMEOUT:
return capture_e::timeout;
case WAIT_ABANDONED:
case DXGI_ERROR_ACCESS_LOST:
case DXGI_ERROR_ACCESS_DENIED:
return capture_e::reinit;
default:
BOOST_LOG(error) << "Couldn't acquire next frame [0x"sv << util::hex(status).to_string_view();
return capture_e::error;
}
}
capture_e duplication_t::reset(dup_t::pointer dup_p) {
auto capture_status = release_frame();
dup.reset(dup_p);
return capture_status;
}
capture_e duplication_t::release_frame() {
if(!has_frame) {
return capture_e::ok;
}
auto status = dup->ReleaseFrame();
switch (status) {
case S_OK:
has_frame = false;
return capture_e::ok;
case DXGI_ERROR_WAIT_TIMEOUT:
return capture_e::timeout;
case WAIT_ABANDONED:
case DXGI_ERROR_ACCESS_LOST:
case DXGI_ERROR_ACCESS_DENIED:
has_frame = false;
return capture_e::reinit;
default:
BOOST_LOG(error) << "Couldn't release frame [0x"sv << util::hex(status).to_string_view();
return capture_e::error;
}
}
duplication_t::~duplication_t() {
release_frame();
}
int display_base_t::init() {
/* Uncomment when use of IDXGIOutput5 is implemented
std::call_once(windows_cpp_once_flag, []() {
DECLARE_HANDLE(DPI_AWARENESS_CONTEXT);
const auto DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2 = ((DPI_AWARENESS_CONTEXT)-4);
typedef BOOL (*User32_SetProcessDpiAwarenessContext)(DPI_AWARENESS_CONTEXT value);
auto user32 = LoadLibraryA("user32.dll");
auto f = (User32_SetProcessDpiAwarenessContext)GetProcAddress(user32, "SetProcessDpiAwarenessContext");
if(f) {
f(DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2);
}
FreeLibrary(user32);
});
*/
dxgi::factory1_t::pointer factory_p {};
dxgi::adapter_t::pointer adapter_p {};
dxgi::output_t::pointer output_p {};
dxgi::device_t::pointer device_p {};
dxgi::device_ctx_t::pointer device_ctx_p {};
HRESULT status;
status = CreateDXGIFactory1(IID_IDXGIFactory1, (void**)&factory_p);
factory.reset(factory_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create DXGIFactory1 [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>, wchar_t> converter;
auto adapter_name = converter.from_bytes(config::video.adapter_name);
auto output_name = converter.from_bytes(config::video.output_name);
for(int x = 0; factory_p->EnumAdapters1(x, &adapter_p) != DXGI_ERROR_NOT_FOUND; ++x) {
dxgi::adapter_t adapter_tmp { adapter_p };
DXGI_ADAPTER_DESC1 adapter_desc;
adapter_tmp->GetDesc1(&adapter_desc);
if(!adapter_name.empty() && adapter_desc.Description != adapter_name) {
continue;
}
for(int y = 0; adapter_tmp->EnumOutputs(y, &output_p) != DXGI_ERROR_NOT_FOUND; ++y) {
dxgi::output_t output_tmp {output_p };
DXGI_OUTPUT_DESC desc;
output_tmp->GetDesc(&desc);
if(!output_name.empty() && desc.DeviceName != output_name) {
continue;
}
if(desc.AttachedToDesktop) {
output = std::move(output_tmp);
width = desc.DesktopCoordinates.right - desc.DesktopCoordinates.left;
height = desc.DesktopCoordinates.bottom - desc.DesktopCoordinates.top;
}
}
if(output) {
adapter = std::move(adapter_tmp);
break;
}
}
if(!output) {
BOOST_LOG(error) << "Failed to locate an output device"sv;
return -1;
}
D3D_FEATURE_LEVEL featureLevels[] {
D3D_FEATURE_LEVEL_12_1,
D3D_FEATURE_LEVEL_12_0,
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_3,
D3D_FEATURE_LEVEL_9_2,
D3D_FEATURE_LEVEL_9_1
};
status = adapter->QueryInterface(IID_IDXGIAdapter, (void**)&adapter_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query IDXGIAdapter interface"sv;
return -1;
}
status = D3D11CreateDevice(
adapter_p,
D3D_DRIVER_TYPE_UNKNOWN,
nullptr,
D3D11_CREATE_DEVICE_VIDEO_SUPPORT,
featureLevels, sizeof(featureLevels) / sizeof(D3D_FEATURE_LEVEL),
D3D11_SDK_VERSION,
&device_p,
&feature_level,
&device_ctx_p);
adapter_p->Release();
device.reset(device_p);
device_ctx.reset(device_ctx_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create D3D11 device [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
DXGI_ADAPTER_DESC adapter_desc;
adapter->GetDesc(&adapter_desc);
auto description = converter.to_bytes(adapter_desc.Description);
BOOST_LOG(info)
<< std::endl
<< "Device Description : " << description << std::endl
<< "Device Vendor ID : 0x"sv << util::hex(adapter_desc.VendorId).to_string_view() << std::endl
<< "Device Device ID : 0x"sv << util::hex(adapter_desc.DeviceId).to_string_view() << std::endl
<< "Device Video Mem : "sv << adapter_desc.DedicatedVideoMemory / 1048576 << " MiB"sv << std::endl
<< "Device Sys Mem : "sv << adapter_desc.DedicatedSystemMemory / 1048576 << " MiB"sv << std::endl
<< "Share Sys Mem : "sv << adapter_desc.SharedSystemMemory / 1048576 << " MiB"sv << std::endl
<< "Feature Level : 0x"sv << util::hex(feature_level).to_string_view() << std::endl
<< "Capture size : "sv << width << 'x' << height;
// Bump up thread priority
{
dxgi::dxgi_t::pointer dxgi_p {};
status = device->QueryInterface(IID_IDXGIDevice, (void**)&dxgi_p);
dxgi::dxgi_t dxgi { dxgi_p };
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query DXGI interface from device [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
dxgi->SetGPUThreadPriority(7);
}
// Try to reduce latency
{
dxgi::dxgi1_t::pointer dxgi_p {};
status = device->QueryInterface(IID_IDXGIDevice, (void**)&dxgi_p);
dxgi::dxgi1_t dxgi { dxgi_p };
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query DXGI interface from device [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
dxgi->SetMaximumFrameLatency(1);
}
//FIXME: Duplicate output on RX580 in combination with DOOM (2016) --> BSOD
//TODO: Use IDXGIOutput5 for improved performance
{
dxgi::output1_t::pointer output1_p {};
status = output->QueryInterface(IID_IDXGIOutput1, (void**)&output1_p);
dxgi::output1_t output1 {output1_p };
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query IDXGIOutput1 from the output"sv;
return -1;
}
// We try this twice, in case we still get an error on reinitialization
for(int x = 0; x < 2; ++x) {
dxgi::dup_t::pointer dup_p {};
status = output1->DuplicateOutput((IUnknown*)device.get(), &dup_p);
if(SUCCEEDED(status)) {
dup.reset(dup_p);
break;
}
std::this_thread::sleep_for(200ms);
}
if(FAILED(status)) {
BOOST_LOG(error) << "DuplicateOutput Failed [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
}
DXGI_OUTDUPL_DESC dup_desc;
dup.dup->GetDesc(&dup_desc);
format = dup_desc.ModeDesc.Format;
BOOST_LOG(debug) << "Source format ["sv << format_str[dup_desc.ModeDesc.Format] << ']';
return 0;
}
const char *format_str[] = {
"DXGI_FORMAT_UNKNOWN",
"DXGI_FORMAT_R32G32B32A32_TYPELESS",
"DXGI_FORMAT_R32G32B32A32_FLOAT",
"DXGI_FORMAT_R32G32B32A32_UINT",
"DXGI_FORMAT_R32G32B32A32_SINT",
"DXGI_FORMAT_R32G32B32_TYPELESS",
"DXGI_FORMAT_R32G32B32_FLOAT",
"DXGI_FORMAT_R32G32B32_UINT",
"DXGI_FORMAT_R32G32B32_SINT",
"DXGI_FORMAT_R16G16B16A16_TYPELESS",
"DXGI_FORMAT_R16G16B16A16_FLOAT",
"DXGI_FORMAT_R16G16B16A16_UNORM",
"DXGI_FORMAT_R16G16B16A16_UINT",
"DXGI_FORMAT_R16G16B16A16_SNORM",
"DXGI_FORMAT_R16G16B16A16_SINT",
"DXGI_FORMAT_R32G32_TYPELESS",
"DXGI_FORMAT_R32G32_FLOAT",
"DXGI_FORMAT_R32G32_UINT",
"DXGI_FORMAT_R32G32_SINT",
"DXGI_FORMAT_R32G8X24_TYPELESS",
"DXGI_FORMAT_D32_FLOAT_S8X24_UINT",
"DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS",
"DXGI_FORMAT_X32_TYPELESS_G8X24_UINT",
"DXGI_FORMAT_R10G10B10A2_TYPELESS",
"DXGI_FORMAT_R10G10B10A2_UNORM",
"DXGI_FORMAT_R10G10B10A2_UINT",
"DXGI_FORMAT_R11G11B10_FLOAT",
"DXGI_FORMAT_R8G8B8A8_TYPELESS",
"DXGI_FORMAT_R8G8B8A8_UNORM",
"DXGI_FORMAT_R8G8B8A8_UNORM_SRGB",
"DXGI_FORMAT_R8G8B8A8_UINT",
"DXGI_FORMAT_R8G8B8A8_SNORM",
"DXGI_FORMAT_R8G8B8A8_SINT",
"DXGI_FORMAT_R16G16_TYPELESS",
"DXGI_FORMAT_R16G16_FLOAT",
"DXGI_FORMAT_R16G16_UNORM",
"DXGI_FORMAT_R16G16_UINT",
"DXGI_FORMAT_R16G16_SNORM",
"DXGI_FORMAT_R16G16_SINT",
"DXGI_FORMAT_R32_TYPELESS",
"DXGI_FORMAT_D32_FLOAT",
"DXGI_FORMAT_R32_FLOAT",
"DXGI_FORMAT_R32_UINT",
"DXGI_FORMAT_R32_SINT",
"DXGI_FORMAT_R24G8_TYPELESS",
"DXGI_FORMAT_D24_UNORM_S8_UINT",
"DXGI_FORMAT_R24_UNORM_X8_TYPELESS",
"DXGI_FORMAT_X24_TYPELESS_G8_UINT",
"DXGI_FORMAT_R8G8_TYPELESS",
"DXGI_FORMAT_R8G8_UNORM",
"DXGI_FORMAT_R8G8_UINT",
"DXGI_FORMAT_R8G8_SNORM",
"DXGI_FORMAT_R8G8_SINT",
"DXGI_FORMAT_R16_TYPELESS",
"DXGI_FORMAT_R16_FLOAT",
"DXGI_FORMAT_D16_UNORM",
"DXGI_FORMAT_R16_UNORM",
"DXGI_FORMAT_R16_UINT",
"DXGI_FORMAT_R16_SNORM",
"DXGI_FORMAT_R16_SINT",
"DXGI_FORMAT_R8_TYPELESS",
"DXGI_FORMAT_R8_UNORM",
"DXGI_FORMAT_R8_UINT",
"DXGI_FORMAT_R8_SNORM",
"DXGI_FORMAT_R8_SINT",
"DXGI_FORMAT_A8_UNORM",
"DXGI_FORMAT_R1_UNORM",
"DXGI_FORMAT_R9G9B9E5_SHAREDEXP",
"DXGI_FORMAT_R8G8_B8G8_UNORM",
"DXGI_FORMAT_G8R8_G8B8_UNORM",
"DXGI_FORMAT_BC1_TYPELESS",
"DXGI_FORMAT_BC1_UNORM",
"DXGI_FORMAT_BC1_UNORM_SRGB",
"DXGI_FORMAT_BC2_TYPELESS",
"DXGI_FORMAT_BC2_UNORM",
"DXGI_FORMAT_BC2_UNORM_SRGB",
"DXGI_FORMAT_BC3_TYPELESS",
"DXGI_FORMAT_BC3_UNORM",
"DXGI_FORMAT_BC3_UNORM_SRGB",
"DXGI_FORMAT_BC4_TYPELESS",
"DXGI_FORMAT_BC4_UNORM",
"DXGI_FORMAT_BC4_SNORM",
"DXGI_FORMAT_BC5_TYPELESS",
"DXGI_FORMAT_BC5_UNORM",
"DXGI_FORMAT_BC5_SNORM",
"DXGI_FORMAT_B5G6R5_UNORM",
"DXGI_FORMAT_B5G5R5A1_UNORM",
"DXGI_FORMAT_B8G8R8A8_UNORM",
"DXGI_FORMAT_B8G8R8X8_UNORM",
"DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM",
"DXGI_FORMAT_B8G8R8A8_TYPELESS",
"DXGI_FORMAT_B8G8R8A8_UNORM_SRGB",
"DXGI_FORMAT_B8G8R8X8_TYPELESS",
"DXGI_FORMAT_B8G8R8X8_UNORM_SRGB",
"DXGI_FORMAT_BC6H_TYPELESS",
"DXGI_FORMAT_BC6H_UF16",
"DXGI_FORMAT_BC6H_SF16",
"DXGI_FORMAT_BC7_TYPELESS",
"DXGI_FORMAT_BC7_UNORM",
"DXGI_FORMAT_BC7_UNORM_SRGB",
"DXGI_FORMAT_AYUV",
"DXGI_FORMAT_Y410",
"DXGI_FORMAT_Y416",
"DXGI_FORMAT_NV12",
"DXGI_FORMAT_P010",
"DXGI_FORMAT_P016",
"DXGI_FORMAT_420_OPAQUE",
"DXGI_FORMAT_YUY2",
"DXGI_FORMAT_Y210",
"DXGI_FORMAT_Y216",
"DXGI_FORMAT_NV11",
"DXGI_FORMAT_AI44",
"DXGI_FORMAT_IA44",
"DXGI_FORMAT_P8",
"DXGI_FORMAT_A8P8",
"DXGI_FORMAT_B4G4R4A4_UNORM",
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
"DXGI_FORMAT_P208",
"DXGI_FORMAT_V208",
"DXGI_FORMAT_V408"
};
}
namespace platf {
std::shared_ptr<display_t> display(dev_type_e hwdevice_type) {
if(hwdevice_type == dev_type_e::dxgi) {
auto disp = std::make_shared<dxgi::display_vram_t>();
if(!disp->init()) {
return disp;
}
}
else if(hwdevice_type == dev_type_e::none) {
auto disp = std::make_shared<dxgi::display_ram_t>();
if(!disp->init()) {
return disp;
}
}
return nullptr;
}
}

View File

@@ -0,0 +1,301 @@
#include "sunshine/main.h"
#include "display.h"
namespace platf {
using namespace std::literals;
}
namespace platf::dxgi {
struct img_t : public ::platf::img_t {
~img_t() override {
delete[] data;
data = nullptr;
}
};
void blend_cursor_monochrome(const cursor_t &cursor, img_t &img) {
int height = cursor.shape_info.Height / 2;
int width = cursor.shape_info.Width;
int pitch = cursor.shape_info.Pitch;
// img cursor.{x,y} < 0, skip parts of the cursor.img_data
auto cursor_skip_y = -std::min(0, cursor.y);
auto cursor_skip_x = -std::min(0, cursor.x);
// img cursor.{x,y} > img.{x,y}, truncate parts of the cursor.img_data
auto cursor_truncate_y = std::max(0, cursor.y - img.height);
auto cursor_truncate_x = std::max(0, cursor.x - img.width);
auto cursor_width = width - cursor_skip_x - cursor_truncate_x;
auto cursor_height = height - cursor_skip_y - cursor_truncate_y;
if(cursor_height > height || cursor_width > width) {
return;
}
auto img_skip_y = std::max(0, cursor.y);
auto img_skip_x = std::max(0, cursor.x);
auto cursor_img_data = cursor.img_data.data() + cursor_skip_y * pitch;
int delta_height = std::min(cursor_height - cursor_truncate_y, std::max(0, img.height - img_skip_y));
int delta_width = std::min(cursor_width - cursor_truncate_x, std::max(0, img.width - img_skip_x));
auto pixels_per_byte = width / pitch;
auto bytes_per_row = delta_width / pixels_per_byte;
auto img_data = (int*)img.data;
for(int i = 0; i < delta_height; ++i) {
auto and_mask = &cursor_img_data[i * pitch];
auto xor_mask = &cursor_img_data[(i + height) * pitch];
auto img_pixel_p = &img_data[(i + img_skip_y) * (img.row_pitch / img.pixel_pitch) + img_skip_x];
auto skip_x = cursor_skip_x;
for(int x = 0; x < bytes_per_row; ++x) {
for(auto bit = 0u; bit < 8; ++bit) {
if(skip_x > 0) {
--skip_x;
continue;
}
int and_ = *and_mask & (1 << (7 - bit)) ? -1 : 0;
int xor_ = *xor_mask & (1 << (7 - bit)) ? -1 : 0;
*img_pixel_p &= and_;
*img_pixel_p ^= xor_;
++img_pixel_p;
}
++and_mask;
++xor_mask;
}
}
}
void apply_color_alpha(int *img_pixel_p, int cursor_pixel) {
auto colors_out = (std::uint8_t*)&cursor_pixel;
auto colors_in = (std::uint8_t*)img_pixel_p;
//TODO: When use of IDXGIOutput5 is implemented, support different color formats
auto alpha = colors_out[3];
if(alpha == 255) {
*img_pixel_p = cursor_pixel;
}
else {
colors_in[0] = colors_out[0] + (colors_in[0] * (255 - alpha) + 255/2) / 255;
colors_in[1] = colors_out[1] + (colors_in[1] * (255 - alpha) + 255/2) / 255;
colors_in[2] = colors_out[2] + (colors_in[2] * (255 - alpha) + 255/2) / 255;
}
}
void apply_color_masked(int *img_pixel_p, int cursor_pixel) {
//TODO: When use of IDXGIOutput5 is implemented, support different color formats
auto alpha = ((std::uint8_t*)&cursor_pixel)[3];
if(alpha == 0xFF) {
*img_pixel_p ^= cursor_pixel;
}
else {
*img_pixel_p = cursor_pixel;
}
}
void blend_cursor_color(const cursor_t &cursor, img_t &img, const bool masked) {
int height = cursor.shape_info.Height;
int width = cursor.shape_info.Width;
int pitch = cursor.shape_info.Pitch;
// img cursor.y < 0, skip parts of the cursor.img_data
auto cursor_skip_y = -std::min(0, cursor.y);
auto cursor_skip_x = -std::min(0, cursor.x);
// img cursor.{x,y} > img.{x,y}, truncate parts of the cursor.img_data
auto cursor_truncate_y = std::max(0, cursor.y - img.height);
auto cursor_truncate_x = std::max(0, cursor.x - img.width);
auto img_skip_y = std::max(0, cursor.y);
auto img_skip_x = std::max(0, cursor.x);
auto cursor_width = width - cursor_skip_x - cursor_truncate_x;
auto cursor_height = height - cursor_skip_y - cursor_truncate_y;
if(cursor_height > height || cursor_width > width) {
return;
}
auto cursor_img_data = (int*)&cursor.img_data[cursor_skip_y * pitch];
int delta_height = std::min(cursor_height - cursor_truncate_y, std::max(0, img.height - img_skip_y));
int delta_width = std::min(cursor_width - cursor_truncate_x, std::max(0, img.width - img_skip_x));
auto img_data = (int*)img.data;
for(int i = 0; i < delta_height; ++i) {
auto cursor_begin = &cursor_img_data[i * cursor.shape_info.Width + cursor_skip_x];
auto cursor_end = &cursor_begin[delta_width];
auto img_pixel_p = &img_data[(i + img_skip_y) * (img.row_pitch / img.pixel_pitch) + img_skip_x];
std::for_each(cursor_begin, cursor_end, [&](int cursor_pixel) {
if(masked) {
apply_color_masked(img_pixel_p, cursor_pixel);
}
else {
apply_color_alpha(img_pixel_p, cursor_pixel);
}
++img_pixel_p;
});
}
}
void blend_cursor(const cursor_t &cursor, img_t &img) {
switch(cursor.shape_info.Type) {
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR:
blend_cursor_color(cursor, img, false);
break;
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME:
blend_cursor_monochrome(cursor, img);
break;
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR:
blend_cursor_color(cursor, img, true);
break;
default:
BOOST_LOG(warning) << "Unsupported cursor format ["sv << cursor.shape_info.Type << ']';
}
}
capture_e display_ram_t::snapshot(::platf::img_t *img_base, std::chrono::milliseconds timeout, bool cursor_visible) {
auto img = (img_t*)img_base;
HRESULT status;
DXGI_OUTDUPL_FRAME_INFO frame_info;
resource_t::pointer res_p {};
auto capture_status = dup.next_frame(frame_info, timeout, &res_p);
resource_t res{res_p};
if (capture_status != capture_e::ok) {
return capture_status;
}
if(frame_info.PointerShapeBufferSize > 0) {
auto &img_data = cursor.img_data;
img_data.resize(frame_info.PointerShapeBufferSize);
UINT dummy;
status = dup.dup->GetFramePointerShape(img_data.size(), img_data.data(), &dummy, &cursor.shape_info);
if (FAILED(status)) {
BOOST_LOG(error) << "Failed to get new pointer shape [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
}
if(frame_info.LastMouseUpdateTime.QuadPart) {
cursor.x = frame_info.PointerPosition.Position.x;
cursor.y = frame_info.PointerPosition.Position.y;
cursor.visible = frame_info.PointerPosition.Visible;
}
// If frame has been updated
if (frame_info.LastPresentTime.QuadPart != 0) {
{
texture2d_t::pointer src_p {};
status = res->QueryInterface(IID_ID3D11Texture2D, (void **)&src_p);
texture2d_t src{src_p};
if (FAILED(status)) {
BOOST_LOG(error) << "Couldn't query interface [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
//Copy from GPU to CPU
device_ctx->CopyResource(texture.get(), src.get());
}
if(img_info.pData) {
device_ctx->Unmap(texture.get(), 0);
img_info.pData = nullptr;
}
status = device_ctx->Map(texture.get(), 0, D3D11_MAP_READ, 0, &img_info);
if (FAILED(status)) {
BOOST_LOG(error) << "Failed to map texture [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
}
const bool mouse_update =
(frame_info.LastMouseUpdateTime.QuadPart || frame_info.PointerShapeBufferSize > 0) &&
(cursor_visible && cursor.visible);
const bool update_flag = frame_info.LastPresentTime.QuadPart != 0 || mouse_update;
if(!update_flag) {
return capture_e::timeout;
}
std::copy_n((std::uint8_t*)img_info.pData, height * img_info.RowPitch, (std::uint8_t*)img->data);
if(cursor_visible && cursor.visible) {
blend_cursor(cursor, *img);
}
return capture_e::ok;
}
std::shared_ptr<platf::img_t> display_ram_t::alloc_img() {
auto img = std::make_shared<img_t>();
img->pixel_pitch = 4;
img->row_pitch = img->pixel_pitch * width;
img->width = width;
img->height = height;
img->data = new std::uint8_t[img->row_pitch * height];
return img;
}
int display_ram_t::dummy_img(platf::img_t *img) {
return 0;
}
int display_ram_t::init() {
if(display_base_t::init()) {
return -1;
}
D3D11_TEXTURE2D_DESC t {};
t.Width = width;
t.Height = height;
t.MipLevels = 1;
t.ArraySize = 1;
t.SampleDesc.Count = 1;
t.Usage = D3D11_USAGE_STAGING;
t.Format = format;
t.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
dxgi::texture2d_t::pointer tex_p {};
auto status = device->CreateTexture2D(&t, nullptr, &tex_p);
texture.reset(tex_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create texture [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
// map the texture simply to get the pitch and stride
status = device_ctx->Map(texture.get(), 0, D3D11_MAP_READ, 0, &img_info);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to map the texture [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
return 0;
}
}

View File

@@ -0,0 +1,503 @@
#include "sunshine/main.h"
#include "display.h"
namespace platf {
using namespace std::literals;
}
namespace platf::dxgi {
struct img_d3d_t : public platf::img_t {
std::shared_ptr<platf::display_t> display;
texture2d_t texture;
~img_d3d_t() override = default;
};
util::buffer_t<std::uint8_t> make_cursor_image(util::buffer_t<std::uint8_t> &&img_data, DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info) {
switch(shape_info.Type) {
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR:
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR:
return std::move(img_data);
default:
break;
}
shape_info.Height /= 2;
util::buffer_t<std::uint8_t> cursor_img { shape_info.Width * shape_info.Height * 4 };
auto bytes = shape_info.Pitch * shape_info.Height;
auto pixel_begin = (std::uint32_t*)std::begin(cursor_img);
auto pixel_data = pixel_begin;
auto and_mask = std::begin(img_data);
auto xor_mask = std::begin(img_data) + bytes;
for(auto x = 0; x < bytes; ++x) {
for(auto c = 7; c >= 0; --c) {
auto bit = 1 << c;
auto color_type = ((*and_mask & bit) ? 1 : 0) + ((*xor_mask & bit) ? 2 : 0);
constexpr std::uint32_t black = 0xFF000000;
constexpr std::uint32_t white = 0xFFFFFFFF;
constexpr std::uint32_t transparent = 0;
switch(color_type) {
case 0: //black
*pixel_data = black;
break;
case 2: //white
*pixel_data = white;
break;
case 1: //transparent
{
*pixel_data = transparent;
break;
}
case 3: //inverse
{
auto top_p = pixel_data - shape_info.Width;
auto left_p = pixel_data - 1;
auto right_p = pixel_data + 1;
auto bottom_p = pixel_data + shape_info.Width;
// Get the x coordinate of the pixel
auto column = (pixel_data - pixel_begin) % shape_info.Width != 0;
if(top_p >= pixel_begin && *top_p == transparent) {
*top_p = black;
}
if(column != 0 && left_p >= pixel_begin && *left_p == transparent) {
*left_p = black;
}
if(bottom_p < (std::uint32_t*)std::end(cursor_img)) {
*bottom_p = black;
}
if(column != shape_info.Width -1) {
*right_p = black;
}
*pixel_data = white;
}
}
++pixel_data;
}
++and_mask;
++xor_mask;
}
return cursor_img;
}
class hwdevice_t : public platf::hwdevice_t {
public:
hwdevice_t(std::vector<hwdevice_t*> *hwdevices_p) : hwdevices_p { hwdevices_p } {}
hwdevice_t() = delete;
void set_cursor_pos(LONG rel_x, LONG rel_y, bool visible) {
cursor_visible = visible;
if(!visible) {
return;
}
LONG x = ((double)rel_x) * out_width / (double)in_width;
LONG y = ((double)rel_y) * out_height / (double)in_height;
// Ensure it's within bounds
auto left_out = std::min<LONG>(out_width, std::max<LONG>(0, x));
auto top_out = std::min<LONG>(out_height, std::max<LONG>(0, y));
auto right_out = std::max<LONG>(0, std::min<LONG>(out_width, x + cursor_scaled_width));
auto bottom_out = std::max<LONG>(0, std::min<LONG>(out_height, y + cursor_scaled_height));
auto left_in = std::max<LONG>(0, -rel_x);
auto top_in = std::max<LONG>(0, -rel_y);
auto right_in = std::min<LONG>(in_width - rel_x, cursor_width);
auto bottom_in = std::min<LONG>(in_height - rel_y, cursor_height);
RECT rect_in { left_in, top_in, right_in, bottom_in };
RECT rect_out { left_out, top_out, right_out, bottom_out };
ctx->VideoProcessorSetStreamSourceRect(processor.get(), 1, TRUE, &rect_in);
ctx->VideoProcessorSetStreamDestRect(processor.get(), 1, TRUE, &rect_out);
}
int set_cursor_texture(texture2d_t::pointer texture, LONG width, LONG height) {
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_desc = { 0, (D3D11_VPIV_DIMENSION)D3D11_VPIV_DIMENSION_TEXTURE2D, { 0, 0 } };
video::processor_in_t::pointer processor_in_p;
auto status = device->CreateVideoProcessorInputView(texture, processor_e.get(), &input_desc, &processor_in_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create cursor VideoProcessorInputView [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
cursor_in.reset(processor_in_p);
cursor_width = width;
cursor_height = height;
cursor_scaled_width = ((double)width) / in_width * out_width;
cursor_scaled_height = ((double)height) / in_height * out_height;
return 0;
}
int convert(platf::img_t &img_base) override {
auto &img = (img_d3d_t&)img_base;
auto it = texture_to_processor_in.find(img.texture.get());
if(it == std::end(texture_to_processor_in)) {
D3D11_VIDEO_PROCESSOR_INPUT_VIEW_DESC input_desc = { 0, (D3D11_VPIV_DIMENSION)D3D11_VPIV_DIMENSION_TEXTURE2D, { 0, 0 } };
video::processor_in_t::pointer processor_in_p;
auto status = device->CreateVideoProcessorInputView(img.texture.get(), processor_e.get(), &input_desc, &processor_in_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create VideoProcessorInputView [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
it = texture_to_processor_in.emplace(img.texture.get(), processor_in_p).first;
}
auto &processor_in = it->second;
D3D11_VIDEO_PROCESSOR_STREAM stream[] {
{ TRUE, 0, 0, 0, 0, nullptr, processor_in.get(), nullptr },
{ TRUE, 0, 0, 0, 0, nullptr, cursor_in.get(), nullptr }
};
auto status = ctx->VideoProcessorBlt(processor.get(), processor_out.get(), 0, cursor_visible ? 2 : 1, stream);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed size and color conversion [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
return 0;
}
void set_colorspace(std::uint32_t colorspace, std::uint32_t color_range) override {
colorspace |= (color_range >> 4);
ctx->VideoProcessorSetOutputColorSpace(processor.get(), (D3D11_VIDEO_PROCESSOR_COLOR_SPACE*)&colorspace);
}
int init(
std::shared_ptr<platf::display_t> display, device_t::pointer device_p, device_ctx_t::pointer device_ctx_p,
int in_width, int in_height, int out_width, int out_height,
pix_fmt_e pix_fmt
) {
HRESULT status;
cursor_visible = false;
platf::hwdevice_t::img = &img;
this->out_width = out_width;
this->out_height = out_height;
this->in_width = in_width;
this->in_height = in_height;
video::device_t::pointer vdevice_p;
status = device_p->QueryInterface(IID_ID3D11VideoDevice, (void**)&vdevice_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query ID3D11VideoDevice interface [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
device.reset(vdevice_p);
video::ctx_t::pointer ctx_p;
status = device_ctx_p->QueryInterface(IID_ID3D11VideoContext, (void**)&ctx_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query ID3D11VideoContext interface [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
ctx.reset(ctx_p);
D3D11_VIDEO_PROCESSOR_CONTENT_DESC contentDesc {
D3D11_VIDEO_FRAME_FORMAT_PROGRESSIVE,
{ 1, 1 }, (UINT)in_width, (UINT)in_height,
{ 1, 1 }, (UINT)out_width, (UINT)out_height,
D3D11_VIDEO_USAGE_OPTIMAL_QUALITY
};
video::processor_enum_t::pointer vp_e_p;
status = device->CreateVideoProcessorEnumerator(&contentDesc, &vp_e_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create video processor enumerator [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
processor_e.reset(vp_e_p);
video::processor_t::pointer processor_p;
status = device->CreateVideoProcessor(processor_e.get(), 0, &processor_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create video processor [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
processor.reset(processor_p);
D3D11_TEXTURE2D_DESC t {};
t.Width = out_width;
t.Height = out_height;
t.MipLevels = 1;
t.ArraySize = 1;
t.SampleDesc.Count = 1;
t.Usage = D3D11_USAGE_DEFAULT;
t.Format = pix_fmt == pix_fmt_e::nv12 ? DXGI_FORMAT_NV12 : DXGI_FORMAT_P010;
t.BindFlags = D3D11_BIND_RENDER_TARGET | D3D11_BIND_VIDEO_ENCODER;
dxgi::texture2d_t::pointer tex_p {};
status = device_p->CreateTexture2D(&t, nullptr, &tex_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create video output texture [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
img.texture.reset(tex_p);
img.display = std::move(display);
img.width = out_width;
img.height = out_height;
img.data = (std::uint8_t*)tex_p;
img.row_pitch = out_width;
img.pixel_pitch = 1;
D3D11_VIDEO_PROCESSOR_OUTPUT_VIEW_DESC output_desc { D3D11_VPOV_DIMENSION_TEXTURE2D, 0 };
video::processor_out_t::pointer processor_out_p;
status = device->CreateVideoProcessorOutputView(img.texture.get(), processor_e.get(), &output_desc, &processor_out_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create VideoProcessorOutputView [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
processor_out.reset(processor_out_p);
// Tell video processor alpha values need to be enabled
ctx->VideoProcessorSetStreamAlpha(processor.get(), 1, TRUE, 1.0f);
device_p->AddRef();
data = device_p;
return 0;
}
~hwdevice_t() override {
if(data) {
((ID3D11Device*)data)->Release();
}
auto it = std::find(std::begin(*hwdevices_p), std::end(*hwdevices_p), this);
if(it != std::end(*hwdevices_p)) {
hwdevices_p->erase(it);
}
}
img_d3d_t img;
video::device_t device;
video::ctx_t ctx;
video::processor_enum_t processor_e;
video::processor_t processor;
video::processor_out_t processor_out;
std::unordered_map<texture2d_t::pointer, video::processor_in_t> texture_to_processor_in;
video::processor_in_t cursor_in;
bool cursor_visible;
LONG cursor_width, cursor_height;
LONG cursor_scaled_width, cursor_scaled_height;
LONG in_width, in_height;
double out_width, out_height;
std::vector<hwdevice_t*> *hwdevices_p;
};
capture_e display_vram_t::snapshot(platf::img_t *img_base, std::chrono::milliseconds timeout, bool cursor_visible) {
auto img = (img_d3d_t*)img_base;
HRESULT status;
DXGI_OUTDUPL_FRAME_INFO frame_info;
resource_t::pointer res_p {};
auto capture_status = dup.next_frame(frame_info, timeout, &res_p);
resource_t res{res_p};
if (capture_status != capture_e::ok) {
return capture_status;
}
const bool update_flag =
frame_info.AccumulatedFrames != 0 || frame_info.LastPresentTime.QuadPart != 0 ||
frame_info.LastMouseUpdateTime.QuadPart != 0 || frame_info.PointerShapeBufferSize > 0;
if(!update_flag) {
return capture_e::timeout;
}
if(frame_info.PointerShapeBufferSize > 0) {
DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info {};
util::buffer_t<std::uint8_t> img_data { frame_info.PointerShapeBufferSize };
UINT dummy;
status = dup.dup->GetFramePointerShape(img_data.size(), std::begin(img_data), &dummy, &shape_info);
if (FAILED(status)) {
BOOST_LOG(error) << "Failed to get new pointer shape [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
auto cursor_img = make_cursor_image(std::move(img_data), shape_info);
D3D11_SUBRESOURCE_DATA data {
std::begin(cursor_img),
4 * shape_info.Width,
0
};
// Create texture for cursor
D3D11_TEXTURE2D_DESC t {};
t.Width = shape_info.Width;
t.Height = cursor_img.size() / data.SysMemPitch;
t.MipLevels = 1;
t.ArraySize = 1;
t.SampleDesc.Count = 1;
t.Usage = D3D11_USAGE_DEFAULT;
t.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
t.BindFlags = D3D11_BIND_RENDER_TARGET;
dxgi::texture2d_t::pointer tex_p {};
auto status = device->CreateTexture2D(&t, &data, &tex_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create dummy texture [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
texture2d_t texture { tex_p };
for(auto *hwdevice : hwdevices) {
if(hwdevice->set_cursor_texture(tex_p, t.Width, t.Height)) {
return capture_e::error;
}
}
cursor.texture = std::move(texture);
cursor.width = t.Width;
cursor.height = t.Height;
}
if(frame_info.LastMouseUpdateTime.QuadPart) {
for(auto *hwdevice : hwdevices) {
hwdevice->set_cursor_pos(frame_info.PointerPosition.Position.x, frame_info.PointerPosition.Position.y, frame_info.PointerPosition.Visible && cursor_visible);
}
}
texture2d_t::pointer src_p {};
status = res->QueryInterface(IID_ID3D11Texture2D, (void **)&src_p);
if (FAILED(status)) {
BOOST_LOG(error) << "Couldn't query interface [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
texture2d_t src { src_p };
device_ctx->CopyResource(img->texture.get(), src.get());
return capture_e::ok;
}
std::shared_ptr<platf::img_t> display_vram_t::alloc_img() {
auto img = std::make_shared<img_d3d_t>();
D3D11_TEXTURE2D_DESC t {};
t.Width = width;
t.Height = height;
t.MipLevels = 1;
t.ArraySize = 1;
t.SampleDesc.Count = 1;
t.Usage = D3D11_USAGE_DEFAULT;
t.Format = format;
t.BindFlags = D3D11_BIND_RENDER_TARGET;
dxgi::texture2d_t::pointer tex_p {};
auto status = device->CreateTexture2D(&t, nullptr, &tex_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create img buf texture [0x"sv << util::hex(status).to_string_view() << ']';
return nullptr;
}
img->data = (std::uint8_t*)tex_p;
img->row_pitch = 0;
img->pixel_pitch = 4;
img->width = 0;
img->height = 0;
img->texture.reset(tex_p);
img->display = shared_from_this();
return img;
}
int display_vram_t::dummy_img(platf::img_t *img_base) {
auto img = (img_d3d_t*)img_base;
img->row_pitch = width * 4;
auto dummy_data = std::make_unique<int[]>(width * height);
D3D11_SUBRESOURCE_DATA data {
dummy_data.get(),
(UINT)img->row_pitch,
0
};
D3D11_TEXTURE2D_DESC t {};
t.Width = width;
t.Height = height;
t.MipLevels = 1;
t.ArraySize = 1;
t.SampleDesc.Count = 1;
t.Usage = D3D11_USAGE_DEFAULT;
t.Format = format;
t.BindFlags = D3D11_BIND_RENDER_TARGET;
dxgi::texture2d_t::pointer tex_p {};
auto status = device->CreateTexture2D(&t, &data, &tex_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create dummy texture [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
img->data = (std::uint8_t*)tex_p;
img->texture.reset(tex_p);
img->height = height;
img->width = width;
img->pixel_pitch = 4;
return 0;
}
std::shared_ptr<platf::hwdevice_t> display_vram_t::make_hwdevice(int width, int height, pix_fmt_e pix_fmt) {
if(pix_fmt != platf::pix_fmt_e::nv12 && pix_fmt != platf::pix_fmt_e::p010) {
BOOST_LOG(error) << "display_vram_t doesn't support pixel format ["sv << (int)pix_fmt << ']';
return nullptr;
}
auto hwdevice = std::make_shared<hwdevice_t>(&hwdevices);
auto ret = hwdevice->init(
shared_from_this(),
device.get(),
device_ctx.get(),
this->width, this->height,
width, height,
pix_fmt);
if(ret) {
return nullptr;
}
if(cursor.texture && hwdevice->set_cursor_texture(cursor.texture.get(), cursor.width, cursor.height)) {
return nullptr;
}
hwdevices.emplace_back(hwdevice.get());
return hwdevice;
}
}

View File

@@ -10,7 +10,7 @@
#include <ViGEm/Client.h>
#include "sunshine/main.h"
#include "common.h"
#include "sunshine/platform/common.h"
namespace platf {
using namespace std::literals;
@@ -249,9 +249,7 @@ void keyboard(input_t &input, uint16_t modcode, bool release) {
auto key_state = GetAsyncKeyState(modcode);
bool key_state_down = (key_state & KEY_STATE_DOWN) != 0;
if(key_state_down != release) {
BOOST_LOG(warning) << "Key state of vkey ["sv << util::hex(modcode).to_string_view() << "] does not match the desired state ["sv << (release ? "on]"sv : "off]"sv);
return;
BOOST_LOG(debug) << "Key state of vkey ["sv << util::hex(modcode).to_string_view() << "] does not match the desired state ["sv << (release ? "on]"sv : "off]"sv);
}
INPUT i {};
@@ -333,6 +331,10 @@ void gamepad(input_t &input, int nr, const gamepad_state_t &gamepad_state) {
}
}
int thread_priority() {
return SetThreadPriority(GetCurrentThread(), THREAD_PRIORITY_HIGHEST) ? 0 : 1;
}
void freeInput(void *p) {
auto vigem = (vigem_t*)p;

View File

@@ -1,741 +0,0 @@
//
// Created by loki on 1/12/20.
//
#include <dxgi.h>
#include <d3d11.h>
#include <d3dcommon.h>
#include <dxgi1_2.h>
#include <codecvt>
#include <sunshine/config.h>
#include "sunshine/main.h"
#include "common.h"
namespace platf {
using namespace std::literals;
}
namespace platf::dxgi {
template<class T>
void Release(T *dxgi) {
dxgi->Release();
}
using factory1_t = util::safe_ptr<IDXGIFactory1, Release<IDXGIFactory1>>;
using dxgi_t = util::safe_ptr<IDXGIDevice, Release<IDXGIDevice>>;
using dxgi1_t = util::safe_ptr<IDXGIDevice1, Release<IDXGIDevice1>>;
using device_t = util::safe_ptr<ID3D11Device, Release<ID3D11Device>>;
using device_ctx_t = util::safe_ptr<ID3D11DeviceContext, Release<ID3D11DeviceContext>>;
using adapter_t = util::safe_ptr<IDXGIAdapter1, Release<IDXGIAdapter1>>;
using output_t = util::safe_ptr<IDXGIOutput, Release<IDXGIOutput>>;
using output1_t = util::safe_ptr<IDXGIOutput1, Release<IDXGIOutput1>>;
using dup_t = util::safe_ptr<IDXGIOutputDuplication, Release<IDXGIOutputDuplication>>;
using texture2d_t = util::safe_ptr<ID3D11Texture2D, Release<ID3D11Texture2D>>;
using resource_t = util::safe_ptr<IDXGIResource, Release<IDXGIResource>>;
extern const char *format_str[];
class duplication_t {
public:
dup_t dup;
bool has_frame {};
capture_e next_frame(DXGI_OUTDUPL_FRAME_INFO &frame_info, resource_t::pointer *res_p) {
auto capture_status = release_frame();
if(capture_status != capture_e::ok) {
return capture_status;
}
auto status = dup->AcquireNextFrame(1000, &frame_info, res_p);
switch(status) {
case S_OK:
has_frame = true;
return capture_e::ok;
case DXGI_ERROR_WAIT_TIMEOUT:
return capture_e::timeout;
case WAIT_ABANDONED:
case DXGI_ERROR_ACCESS_LOST:
case DXGI_ERROR_ACCESS_DENIED:
return capture_e::reinit;
default:
BOOST_LOG(error) << "Couldn't acquire next frame [0x"sv << util::hex(status).to_string_view();
return capture_e::error;
}
}
capture_e reset(dup_t::pointer dup_p = dup_t::pointer()) {
auto capture_status = release_frame();
dup.reset(dup_p);
return capture_status;
}
capture_e release_frame() {
if(!has_frame) {
return capture_e::ok;
}
auto status = dup->ReleaseFrame();
switch (status) {
case S_OK:
has_frame = false;
return capture_e::ok;
case DXGI_ERROR_WAIT_TIMEOUT:
return capture_e::timeout;
case WAIT_ABANDONED:
case DXGI_ERROR_ACCESS_LOST:
case DXGI_ERROR_ACCESS_DENIED:
has_frame = false;
return capture_e::reinit;
default:
BOOST_LOG(error) << "Couldn't release frame [0x"sv << util::hex(status).to_string_view();
return capture_e::error;
}
}
~duplication_t() {
release_frame();
}
};
class display_t;
struct img_t : public ::platf::img_t {
~img_t() override {
delete[] data;
data = nullptr;
}
};
struct cursor_t {
std::vector<std::uint8_t> img_data;
DXGI_OUTDUPL_POINTER_SHAPE_INFO shape_info;
int x, y;
bool visible;
};
void blend_cursor_monochrome(const cursor_t &cursor, img_t &img) {
int height = cursor.shape_info.Height / 2;
int width = cursor.shape_info.Width;
int pitch = cursor.shape_info.Pitch;
// img cursor.{x,y} < 0, skip parts of the cursor.img_data
auto cursor_skip_y = -std::min(0, cursor.y);
auto cursor_skip_x = -std::min(0, cursor.x);
// img cursor.{x,y} > img.{x,y}, truncate parts of the cursor.img_data
auto cursor_truncate_y = std::max(0, cursor.y - img.height);
auto cursor_truncate_x = std::max(0, cursor.x - img.width);
auto cursor_width = width - cursor_skip_x - cursor_truncate_x;
auto cursor_height = height - cursor_skip_y - cursor_truncate_y;
if(cursor_height > height || cursor_width > width) {
return;
}
auto img_skip_y = std::max(0, cursor.y);
auto img_skip_x = std::max(0, cursor.x);
auto cursor_img_data = cursor.img_data.data() + cursor_skip_y * pitch;
int delta_height = std::min(cursor_height - cursor_truncate_y, std::max(0, img.height - img_skip_y));
int delta_width = std::min(cursor_width - cursor_truncate_x, std::max(0, img.width - img_skip_x));
auto pixels_per_byte = width / pitch;
auto bytes_per_row = delta_width / pixels_per_byte;
auto img_data = (int*)img.data;
for(int i = 0; i < delta_height; ++i) {
auto and_mask = &cursor_img_data[i * pitch];
auto xor_mask = &cursor_img_data[(i + height) * pitch];
auto img_pixel_p = &img_data[(i + img_skip_y) * (img.row_pitch / img.pixel_pitch) + img_skip_x];
auto skip_x = cursor_skip_x;
for(int x = 0; x < bytes_per_row; ++x) {
for(auto bit = 0u; bit < 8; ++bit) {
if(skip_x > 0) {
--skip_x;
continue;
}
int and_ = *and_mask & (1 << (7 - bit)) ? -1 : 0;
int xor_ = *xor_mask & (1 << (7 - bit)) ? -1 : 0;
*img_pixel_p &= and_;
*img_pixel_p ^= xor_;
++img_pixel_p;
}
++and_mask;
++xor_mask;
}
}
}
void blend_cursor_color(const cursor_t &cursor, img_t &img) {
int height = cursor.shape_info.Height;
int width = cursor.shape_info.Width;
int pitch = cursor.shape_info.Pitch;
// img cursor.y < 0, skip parts of the cursor.img_data
auto cursor_skip_y = -std::min(0, cursor.y);
auto cursor_skip_x = -std::min(0, cursor.x);
// img cursor.{x,y} > img.{x,y}, truncate parts of the cursor.img_data
auto cursor_truncate_y = std::max(0, cursor.y - img.height);
auto cursor_truncate_x = std::max(0, cursor.x - img.width);
auto img_skip_y = std::max(0, cursor.y);
auto img_skip_x = std::max(0, cursor.x);
auto cursor_width = width - cursor_skip_x - cursor_truncate_x;
auto cursor_height = height - cursor_skip_y - cursor_truncate_y;
if(cursor_height > height || cursor_width > width) {
return;
}
auto cursor_img_data = (int*)&cursor.img_data[cursor_skip_y * pitch];
int delta_height = std::min(cursor_height - cursor_truncate_y, std::max(0, img.height - img_skip_y));
int delta_width = std::min(cursor_width - cursor_truncate_x, std::max(0, img.width - img_skip_x));
auto img_data = (int*)img.data;
for(int i = 0; i < delta_height; ++i) {
auto cursor_begin = &cursor_img_data[i * cursor.shape_info.Width + cursor_skip_x];
auto cursor_end = &cursor_begin[delta_width];
auto img_pixel_p = &img_data[(i + img_skip_y) * (img.row_pitch / img.pixel_pitch) + img_skip_x];
std::for_each(cursor_begin, cursor_end, [&](int cursor_pixel) {
auto colors_out = (std::uint8_t*)&cursor_pixel;
auto colors_in = (std::uint8_t*)img_pixel_p;
//TODO: When use of IDXGIOutput5 is implemented, support different color formats
auto alpha = colors_out[3];
if(alpha == 255) {
*img_pixel_p = cursor_pixel;
}
else {
colors_in[0] = colors_out[0] + (colors_in[0] * (255 - alpha) + 255/2) / 255;
colors_in[1] = colors_out[1] + (colors_in[1] * (255 - alpha) + 255/2) / 255;
colors_in[2] = colors_out[2] + (colors_in[2] * (255 - alpha) + 255/2) / 255;
}
++img_pixel_p;
});
}
}
void blend_cursor(const cursor_t &cursor, img_t &img) {
switch(cursor.shape_info.Type) {
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_COLOR:
blend_cursor_color(cursor, img);
break;
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MONOCHROME:
blend_cursor_monochrome(cursor, img);
break;
case DXGI_OUTDUPL_POINTER_SHAPE_TYPE_MASKED_COLOR:
default:
BOOST_LOG(warning) << "Unsupported cursor format ["sv << cursor.shape_info.Type << ']';
}
}
class display_t : public ::platf::display_t {
public:
capture_e snapshot(::platf::img_t *img_base, bool cursor_visible) override {
auto img = (img_t *) img_base;
HRESULT status;
DXGI_OUTDUPL_FRAME_INFO frame_info;
resource_t::pointer res_p {};
auto capture_status = dup.next_frame(frame_info, &res_p);
resource_t res{res_p};
if (capture_status != capture_e::ok) {
return capture_status;
}
if (frame_info.PointerShapeBufferSize > 0) {
auto &img_data = cursor.img_data;
img_data.resize(frame_info.PointerShapeBufferSize);
UINT dummy;
status = dup.dup->GetFramePointerShape(img_data.size(), img_data.data(), &dummy, &cursor.shape_info);
if (FAILED(status)) {
BOOST_LOG(error) << "Failed to get new pointer shape [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
}
if(frame_info.LastMouseUpdateTime.QuadPart) {
cursor.x = frame_info.PointerPosition.Position.x;
cursor.y = frame_info.PointerPosition.Position.y;
cursor.visible = frame_info.PointerPosition.Visible;
}
// If frame has been updated
if (frame_info.LastPresentTime.QuadPart != 0) {
{
texture2d_t::pointer src_p {};
status = res->QueryInterface(IID_ID3D11Texture2D, (void **)&src_p);
texture2d_t src{src_p};
if (FAILED(status)) {
BOOST_LOG(error) << "Couldn't query interface [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
//Copy from GPU to CPU
device_ctx->CopyResource(texture.get(), src.get());
}
if(current_img.pData) {
device_ctx->Unmap(texture.get(), 0);
current_img.pData = nullptr;
}
status = device_ctx->Map(texture.get(), 0, D3D11_MAP_READ, 0, &current_img);
if (FAILED(status)) {
BOOST_LOG(error) << "Failed to map texture [0x"sv << util::hex(status).to_string_view() << ']';
return capture_e::error;
}
}
const bool update_flag =
frame_info.LastMouseUpdateTime.QuadPart ||
frame_info.LastPresentTime.QuadPart != 0 ||
frame_info.PointerShapeBufferSize > 0;
if(!update_flag) {
return capture_e::timeout;
}
if(img->width != width || img->height != height) {
delete[] img->data;
img->data = new std::uint8_t[height * current_img.RowPitch];
img->width = width;
img->height = height;
img->row_pitch = current_img.RowPitch;
}
std::copy_n((std::uint8_t*)current_img.pData, height * current_img.RowPitch, (std::uint8_t*)img->data);
if(cursor_visible && cursor.visible) {
blend_cursor(cursor, *img);
}
return capture_e::ok;
}
std::unique_ptr<::platf::img_t> alloc_img() override {
auto img = std::make_unique<img_t>();
img->data = nullptr;
img->height = 0;
img->width = 0;
img->row_pitch = 0;
img->pixel_pitch = 4;
return img;
}
int init() {
/* Uncomment when use of IDXGIOutput5 is implemented
std::call_once(windows_cpp_once_flag, []() {
DECLARE_HANDLE(DPI_AWARENESS_CONTEXT);
const auto DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2 = ((DPI_AWARENESS_CONTEXT)-4);
typedef BOOL (*User32_SetProcessDpiAwarenessContext)(DPI_AWARENESS_CONTEXT value);
auto user32 = LoadLibraryA("user32.dll");
auto f = (User32_SetProcessDpiAwarenessContext)GetProcAddress(user32, "SetProcessDpiAwarenessContext");
if(f) {
f(DPI_AWARENESS_CONTEXT_PER_MONITOR_AWARE_V2);
}
FreeLibrary(user32);
});
*/
current_img.pData = nullptr; // current_img is not yet mapped
dxgi::factory1_t::pointer factory_p {};
dxgi::adapter_t::pointer adapter_p {};
dxgi::output_t::pointer output_p {};
dxgi::device_t::pointer device_p {};
dxgi::device_ctx_t::pointer device_ctx_p {};
HRESULT status;
status = CreateDXGIFactory1(IID_IDXGIFactory1, (void**)&factory_p);
factory.reset(factory_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create DXGIFactory1 [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
std::wstring_convert<std::codecvt_utf8_utf16<wchar_t>, wchar_t> converter;
auto adapter_name = converter.from_bytes(config::video.adapter_name);
auto output_name = converter.from_bytes(config::video.output_name);
for(int x = 0; factory_p->EnumAdapters1(x, &adapter_p) != DXGI_ERROR_NOT_FOUND; ++x) {
dxgi::adapter_t adapter_tmp { adapter_p };
DXGI_ADAPTER_DESC1 adapter_desc;
adapter_tmp->GetDesc1(&adapter_desc);
if(!adapter_name.empty() && adapter_desc.Description != adapter_name) {
continue;
}
for(int y = 0; adapter_tmp->EnumOutputs(y, &output_p) != DXGI_ERROR_NOT_FOUND; ++y) {
dxgi::output_t output_tmp {output_p };
DXGI_OUTPUT_DESC desc;
output_tmp->GetDesc(&desc);
if(!output_name.empty() && desc.DeviceName != output_name) {
continue;
}
if(desc.AttachedToDesktop) {
output = std::move(output_tmp);
width = desc.DesktopCoordinates.right - desc.DesktopCoordinates.left;
height = desc.DesktopCoordinates.bottom - desc.DesktopCoordinates.top;
}
}
if(output) {
adapter = std::move(adapter_tmp);
break;
}
}
if(!output) {
BOOST_LOG(error) << "Failed to locate an output device"sv;
return -1;
}
D3D_FEATURE_LEVEL featureLevels[] {
D3D_FEATURE_LEVEL_12_1,
D3D_FEATURE_LEVEL_12_0,
D3D_FEATURE_LEVEL_11_1,
D3D_FEATURE_LEVEL_11_0,
D3D_FEATURE_LEVEL_10_1,
D3D_FEATURE_LEVEL_10_0,
D3D_FEATURE_LEVEL_9_3,
D3D_FEATURE_LEVEL_9_2,
D3D_FEATURE_LEVEL_9_1
};
status = adapter->QueryInterface(IID_IDXGIAdapter, (void**)&adapter_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query IDXGIAdapter interface"sv;
return -1;
}
status = D3D11CreateDevice(
adapter_p,
D3D_DRIVER_TYPE_UNKNOWN,
nullptr,
D3D11_CREATE_DEVICE_VIDEO_SUPPORT,
featureLevels, sizeof(featureLevels) / sizeof(D3D_FEATURE_LEVEL),
D3D11_SDK_VERSION,
&device_p,
&feature_level,
&device_ctx_p);
adapter_p->Release();
device.reset(device_p);
device_ctx.reset(device_ctx_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create D3D11 device [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
DXGI_ADAPTER_DESC adapter_desc;
adapter->GetDesc(&adapter_desc);
auto description = converter.to_bytes(adapter_desc.Description);
BOOST_LOG(info) << std::endl
<< "Device Description : " << description << std::endl
<< "Device Vendor ID : 0x"sv << util::hex(adapter_desc.VendorId).to_string_view() << std::endl
<< "Device Device ID : 0x"sv << util::hex(adapter_desc.DeviceId).to_string_view() << std::endl
<< "Device Video Mem : "sv << adapter_desc.DedicatedVideoMemory / 1048576 << " MiB"sv << std::endl
<< "Device Sys Mem : "sv << adapter_desc.DedicatedSystemMemory / 1048576 << " MiB"sv << std::endl
<< "Share Sys Mem : "sv << adapter_desc.SharedSystemMemory / 1048576 << " MiB"sv << std::endl
<< "Feature Level : 0x"sv << util::hex(feature_level).to_string_view() << std::endl
<< "Capture size : "sv << width << 'x' << height;
// Bump up thread priority
{
dxgi::dxgi_t::pointer dxgi_p {};
status = device->QueryInterface(IID_IDXGIDevice, (void**)&dxgi_p);
dxgi::dxgi_t dxgi { dxgi_p };
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query DXGI interface from device [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
dxgi->SetGPUThreadPriority(7);
}
// Try to reduce latency
{
dxgi::dxgi1_t::pointer dxgi_p {};
status = device->QueryInterface(IID_IDXGIDevice, (void**)&dxgi_p);
dxgi::dxgi1_t dxgi { dxgi_p };
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query DXGI interface from device [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
dxgi->SetMaximumFrameLatency(1);
}
//FIXME: Duplicate output on RX580 in combination with DOOM (2016) --> BSOD
//TODO: Use IDXGIOutput5 for improved performance
{
dxgi::output1_t::pointer output1_p {};
status = output->QueryInterface(IID_IDXGIOutput1, (void**)&output1_p);
dxgi::output1_t output1 {output1_p };
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to query IDXGIOutput1 from the output"sv;
return -1;
}
// We try this twice, in case we still get an error on reinitialization
for(int x = 0; x < 2; ++x) {
dxgi::dup_t::pointer dup_p {};
status = output1->DuplicateOutput((IUnknown*)device.get(), &dup_p);
if(SUCCEEDED(status)) {
dup.reset(dup_p);
break;
}
std::this_thread::sleep_for(200ms);
}
if(FAILED(status)) {
BOOST_LOG(error) << "DuplicateOutput Failed [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
}
DXGI_OUTDUPL_DESC dup_desc;
dup.dup->GetDesc(&dup_desc);
format = dup_desc.ModeDesc.Format;
BOOST_LOG(debug) << "Source format ["sv << format_str[dup_desc.ModeDesc.Format] << ']';
D3D11_TEXTURE2D_DESC t {};
t.Width = width;
t.Height = height;
t.MipLevels = 1;
t.ArraySize = 1;
t.SampleDesc.Count = 1;
t.Usage = D3D11_USAGE_STAGING;
t.Format = format;
t.CPUAccessFlags = D3D11_CPU_ACCESS_READ;
dxgi::texture2d_t::pointer tex_p {};
status = device->CreateTexture2D(&t, nullptr, &tex_p);
texture.reset(tex_p);
if(FAILED(status)) {
BOOST_LOG(error) << "Failed to create texture [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
// map the texture simply to get the pitch and stride
status = device_ctx->Map(texture.get(), 0, D3D11_MAP_READ, 0, &current_img);
if(FAILED(status)) {
BOOST_LOG(error) << "Error: Failed to map the texture [0x"sv << util::hex(status).to_string_view() << ']';
return -1;
}
return 0;
}
~display_t() override {
if(current_img.pData) {
device_ctx->Unmap(texture.get(), 0);
current_img.pData = nullptr;
}
}
factory1_t factory;
adapter_t adapter;
output_t output;
device_t device;
device_ctx_t device_ctx;
duplication_t dup;
cursor_t cursor;
texture2d_t texture;
int width, height;
DXGI_FORMAT format;
D3D_FEATURE_LEVEL feature_level;
D3D11_MAPPED_SUBRESOURCE current_img;
};
const char *format_str[] = {
"DXGI_FORMAT_UNKNOWN",
"DXGI_FORMAT_R32G32B32A32_TYPELESS",
"DXGI_FORMAT_R32G32B32A32_FLOAT",
"DXGI_FORMAT_R32G32B32A32_UINT",
"DXGI_FORMAT_R32G32B32A32_SINT",
"DXGI_FORMAT_R32G32B32_TYPELESS",
"DXGI_FORMAT_R32G32B32_FLOAT",
"DXGI_FORMAT_R32G32B32_UINT",
"DXGI_FORMAT_R32G32B32_SINT",
"DXGI_FORMAT_R16G16B16A16_TYPELESS",
"DXGI_FORMAT_R16G16B16A16_FLOAT",
"DXGI_FORMAT_R16G16B16A16_UNORM",
"DXGI_FORMAT_R16G16B16A16_UINT",
"DXGI_FORMAT_R16G16B16A16_SNORM",
"DXGI_FORMAT_R16G16B16A16_SINT",
"DXGI_FORMAT_R32G32_TYPELESS",
"DXGI_FORMAT_R32G32_FLOAT",
"DXGI_FORMAT_R32G32_UINT",
"DXGI_FORMAT_R32G32_SINT",
"DXGI_FORMAT_R32G8X24_TYPELESS",
"DXGI_FORMAT_D32_FLOAT_S8X24_UINT",
"DXGI_FORMAT_R32_FLOAT_X8X24_TYPELESS",
"DXGI_FORMAT_X32_TYPELESS_G8X24_UINT",
"DXGI_FORMAT_R10G10B10A2_TYPELESS",
"DXGI_FORMAT_R10G10B10A2_UNORM",
"DXGI_FORMAT_R10G10B10A2_UINT",
"DXGI_FORMAT_R11G11B10_FLOAT",
"DXGI_FORMAT_R8G8B8A8_TYPELESS",
"DXGI_FORMAT_R8G8B8A8_UNORM",
"DXGI_FORMAT_R8G8B8A8_UNORM_SRGB",
"DXGI_FORMAT_R8G8B8A8_UINT",
"DXGI_FORMAT_R8G8B8A8_SNORM",
"DXGI_FORMAT_R8G8B8A8_SINT",
"DXGI_FORMAT_R16G16_TYPELESS",
"DXGI_FORMAT_R16G16_FLOAT",
"DXGI_FORMAT_R16G16_UNORM",
"DXGI_FORMAT_R16G16_UINT",
"DXGI_FORMAT_R16G16_SNORM",
"DXGI_FORMAT_R16G16_SINT",
"DXGI_FORMAT_R32_TYPELESS",
"DXGI_FORMAT_D32_FLOAT",
"DXGI_FORMAT_R32_FLOAT",
"DXGI_FORMAT_R32_UINT",
"DXGI_FORMAT_R32_SINT",
"DXGI_FORMAT_R24G8_TYPELESS",
"DXGI_FORMAT_D24_UNORM_S8_UINT",
"DXGI_FORMAT_R24_UNORM_X8_TYPELESS",
"DXGI_FORMAT_X24_TYPELESS_G8_UINT",
"DXGI_FORMAT_R8G8_TYPELESS",
"DXGI_FORMAT_R8G8_UNORM",
"DXGI_FORMAT_R8G8_UINT",
"DXGI_FORMAT_R8G8_SNORM",
"DXGI_FORMAT_R8G8_SINT",
"DXGI_FORMAT_R16_TYPELESS",
"DXGI_FORMAT_R16_FLOAT",
"DXGI_FORMAT_D16_UNORM",
"DXGI_FORMAT_R16_UNORM",
"DXGI_FORMAT_R16_UINT",
"DXGI_FORMAT_R16_SNORM",
"DXGI_FORMAT_R16_SINT",
"DXGI_FORMAT_R8_TYPELESS",
"DXGI_FORMAT_R8_UNORM",
"DXGI_FORMAT_R8_UINT",
"DXGI_FORMAT_R8_SNORM",
"DXGI_FORMAT_R8_SINT",
"DXGI_FORMAT_A8_UNORM",
"DXGI_FORMAT_R1_UNORM",
"DXGI_FORMAT_R9G9B9E5_SHAREDEXP",
"DXGI_FORMAT_R8G8_B8G8_UNORM",
"DXGI_FORMAT_G8R8_G8B8_UNORM",
"DXGI_FORMAT_BC1_TYPELESS",
"DXGI_FORMAT_BC1_UNORM",
"DXGI_FORMAT_BC1_UNORM_SRGB",
"DXGI_FORMAT_BC2_TYPELESS",
"DXGI_FORMAT_BC2_UNORM",
"DXGI_FORMAT_BC2_UNORM_SRGB",
"DXGI_FORMAT_BC3_TYPELESS",
"DXGI_FORMAT_BC3_UNORM",
"DXGI_FORMAT_BC3_UNORM_SRGB",
"DXGI_FORMAT_BC4_TYPELESS",
"DXGI_FORMAT_BC4_UNORM",
"DXGI_FORMAT_BC4_SNORM",
"DXGI_FORMAT_BC5_TYPELESS",
"DXGI_FORMAT_BC5_UNORM",
"DXGI_FORMAT_BC5_SNORM",
"DXGI_FORMAT_B5G6R5_UNORM",
"DXGI_FORMAT_B5G5R5A1_UNORM",
"DXGI_FORMAT_B8G8R8A8_UNORM",
"DXGI_FORMAT_B8G8R8X8_UNORM",
"DXGI_FORMAT_R10G10B10_XR_BIAS_A2_UNORM",
"DXGI_FORMAT_B8G8R8A8_TYPELESS",
"DXGI_FORMAT_B8G8R8A8_UNORM_SRGB",
"DXGI_FORMAT_B8G8R8X8_TYPELESS",
"DXGI_FORMAT_B8G8R8X8_UNORM_SRGB",
"DXGI_FORMAT_BC6H_TYPELESS",
"DXGI_FORMAT_BC6H_UF16",
"DXGI_FORMAT_BC6H_SF16",
"DXGI_FORMAT_BC7_TYPELESS",
"DXGI_FORMAT_BC7_UNORM",
"DXGI_FORMAT_BC7_UNORM_SRGB",
"DXGI_FORMAT_AYUV",
"DXGI_FORMAT_Y410",
"DXGI_FORMAT_Y416",
"DXGI_FORMAT_NV12",
"DXGI_FORMAT_P010",
"DXGI_FORMAT_P016",
"DXGI_FORMAT_420_OPAQUE",
"DXGI_FORMAT_YUY2",
"DXGI_FORMAT_Y210",
"DXGI_FORMAT_Y216",
"DXGI_FORMAT_NV11",
"DXGI_FORMAT_AI44",
"DXGI_FORMAT_IA44",
"DXGI_FORMAT_P8",
"DXGI_FORMAT_A8P8",
"DXGI_FORMAT_B4G4R4A4_UNORM",
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL,
"DXGI_FORMAT_P208",
"DXGI_FORMAT_V208",
"DXGI_FORMAT_V408"
};
}
namespace platf {
std::unique_ptr<display_t> display() {
auto disp = std::make_unique<dxgi::display_t>();
if (disp->init()) {
return nullptr;
}
return disp;
}
}

149
sunshine/round_robin.h Normal file
View File

@@ -0,0 +1,149 @@
#ifndef KITTY_UTIL_ITERATOR_H
#define KITTY_UTIL_ITERATOR_H
#include <iterator>
namespace util {
template<class V, class T>
class it_wrap_t : public std::iterator<std::random_access_iterator_tag, V> {
public:
typedef T iterator;
typedef typename std::iterator<std::random_access_iterator_tag, V>::value_type class_t;
typedef class_t& reference;
typedef class_t* pointer;
typedef std::ptrdiff_t diff_t;
iterator operator += (diff_t step) {
while(step-- > 0) {
++_this();
}
return _this();
}
iterator operator -= (diff_t step) {
while(step-- > 0) {
--_this();
}
return _this();
}
iterator operator +(diff_t step) {
iterator new_ = _this();
return new_ += step;
}
iterator operator -(diff_t step) {
iterator new_ = _this();
return new_ -= step;
}
diff_t operator -(iterator first) {
diff_t step = 0;
while(first != _this()) {
++step;
++first;
}
return step;
}
iterator operator++() { _this().inc(); return _this(); }
iterator operator--() { _this().dec(); return _this(); }
iterator operator++(int) {
iterator new_ = _this();
++_this();
return new_;
}
iterator operator--(int) {
iterator new_ = _this();
--_this();
return new_;
}
reference operator*() { return *_this().get(); }
const reference operator*() const { return *_this().get(); }
pointer operator->() { return &*_this(); }
const pointer operator->() const { return &*_this(); }
bool operator != (const iterator &other) const {
return !(_this() == other);
}
bool operator < (const iterator &other) const {
return !(_this() >= other);
}
bool operator >= (const iterator &other) const {
return _this() == other || _this() > other;
}
bool operator <= (const iterator &other) const {
return _this() == other || _this() < other;
}
bool operator == (const iterator &other) const { return _this().eq(other); };
bool operator > (const iterator &other) const { return _this().gt(other); }
private:
iterator &_this() { return *static_cast<iterator*>(this); }
const iterator &_this() const { return *static_cast<const iterator*>(this); }
};
template<class V, class It>
class round_robin_t : public it_wrap_t<V, round_robin_t<V, It>> {
public:
using iterator = It;
using pointer = V*;
round_robin_t(iterator begin, iterator end) : _begin(begin), _end(end), _pos(begin) {}
void inc() {
++_pos;
if(_pos == _end) {
_pos = _begin;
}
}
void dec() {
if(_pos == _begin) {
_pos = _end;
}
--_pos;
}
bool eq(const round_robin_t &other) const {
return *_pos == *other._pos;
}
pointer get() const {
return &*_pos;
}
private:
It _begin;
It _end;
It _pos;
};
template<class V, class It>
round_robin_t<V, It> make_round_robin(It begin, It end) {
return round_robin_t<V, It>(begin, end);
}
}
#endif

View File

@@ -97,7 +97,7 @@ public:
std::vector<char> full_payload;
auto old_msg = std::move(_queue_packet);
TUPLE_2D_REF(_, old_packet, old_msg);
auto &old_packet = old_msg.second;
std::string_view new_payload{(char *) packet->data, packet->dataLength};
std::string_view old_payload{(char *) old_packet->data, old_packet->dataLength};
@@ -274,7 +274,7 @@ void cmd_describe(rtsp_server_t *server, net::peer_t peer, msg_t&& req) {
option.content = const_cast<char*>(seqn_str.c_str());
std::string_view payload;
if(config::video.hevc_mode == 0) {
if(config::video.hevc_mode == 1) {
payload = "surround-params=NONE"sv;
}
else {
@@ -404,7 +404,7 @@ void cmd_announce(rtsp_server_t *server, net::peer_t peer, msg_t &&req) {
return;
}
if(config.monitor.videoFormat != 0 && config::video.hevc_mode == 0) {
if(config.monitor.videoFormat != 0 && config::video.hevc_mode == 1) {
BOOST_LOG(warning) << "HEVC is disabled, yet the client requested HEVC"sv;
respond(server->host(), peer, &option, 400, "BAD REQUEST", req->sequenceNumber, {});

View File

@@ -494,8 +494,11 @@ void controlBroadcastThread(safe::signal_t *shutdown_event, control_server_t *se
server->send(std::string_view {(char*)payload.data(), payload.size()});
shutdown_event->raise(true);
continue;
auto lg = server->_map_addr_session.lock();
for(auto pos = std::begin(*server->_map_addr_session); pos != std::end(*server->_map_addr_session); ++pos) {
auto session = pos->second.second;
session->shutdown_event.raise(true);
}
}
server->iterate(500ms);
@@ -614,8 +617,6 @@ void videoBroadcastThread(safe::signal_t *shutdown_event, udp::socket &sock, vid
frame_new = "\000\000\000\001("sv;
}
assert(std::search(std::begin(payload), std::end(payload), std::begin(hevc_i_frame), std::end(hevc_i_frame)) ==
std::end(payload));
payload_new = replace(payload, frame_old, frame_new);
payload = {(char *) payload_new.data(), payload_new.size()};
}

View File

@@ -46,6 +46,15 @@ public:
return *this;
}
template<class V>
sync_t &operator=(V &&val) {
auto lg = lock();
raw = val;
return *this;
}
sync_t &operator=(const value_t &val) noexcept {
auto lg = lock();

View File

@@ -33,7 +33,7 @@ public:
// pop and view shoud not be used interchangebly
status_t pop() {
std::unique_lock ul{_lock};
std::unique_lock ul{ _lock };
if (!_continue) {
return util::false_v<status_t>;
@@ -55,7 +55,7 @@ public:
// pop and view shoud not be used interchangebly
template<class Rep, class Period>
status_t pop(std::chrono::duration<Rep, Period> delay) {
std::unique_lock ul{_lock};
std::unique_lock ul{ _lock };
if (!_continue) {
return util::false_v<status_t>;
@@ -74,7 +74,7 @@ public:
// pop and view shoud not be used interchangebly
const status_t &view() {
std::unique_lock ul{_lock};
std::unique_lock ul{ _lock };
if (!_continue) {
return util::false_v<status_t>;
@@ -98,7 +98,7 @@ public:
}
void stop() {
std::lock_guard lg{_lock};
std::lock_guard lg{ _lock };
_continue = false;
@@ -106,7 +106,7 @@ public:
}
void reset() {
std::lock_guard lg{_lock};
std::lock_guard lg{ _lock };
_continue = true;
@@ -118,8 +118,8 @@ public:
}
private:
bool _continue{true};
status_t _status;
bool _continue { true };
status_t _status { util::false_v<status_t> };
std::condition_variable _cv;
std::mutex _lock;
@@ -170,7 +170,7 @@ public:
}
status_t pop() {
std::unique_lock ul{_lock};
std::unique_lock ul{ _lock };
if (!_continue) {
return util::false_v<status_t>;
@@ -191,11 +191,12 @@ public:
}
std::vector<T> &unsafe() {
std::lock_guard { _lock };
return _queue;
}
void stop() {
std::lock_guard lg{_lock};
std::lock_guard lg{ _lock };
_continue = false;
@@ -208,7 +209,7 @@ public:
private:
bool _continue{true};
bool _continue{ true };
std::mutex _lock;
std::condition_variable _cv;
@@ -274,9 +275,8 @@ public:
void release() {
std::lock_guard lg { owner->_lock };
auto c = owner->_count.fetch_sub(1, std::memory_order_acquire);
if(c - 1 == 0) {
if(!--owner->_count) {
owner->_destruct(*get());
(*this)->~element_type();
}
@@ -296,10 +296,9 @@ public:
template<class FC, class FD>
shared_t(FC && fc, FD &&fd) : _construct { std::forward<FC>(fc) }, _destruct { std::forward<FD>(fd) } {}
[[nodiscard]] ptr_t ref() {
auto c = _count.fetch_add(1, std::memory_order_acquire);
if(!c) {
std::lock_guard lg { _lock };
std::lock_guard lg { _lock };
if(!_count++) {
new(_object_buf.data()) element_type;
if(_construct(*reinterpret_cast<element_type*>(_object_buf.data()))) {
return ptr_t { nullptr };
@@ -314,7 +313,7 @@ private:
std::array<std::uint8_t, sizeof(element_type)> _object_buf;
std::atomic<std::uint32_t> _count;
std::uint32_t _count;
std::mutex _lock;
};

View File

@@ -107,6 +107,9 @@ using optional_t = either_t<
std::is_pointer_v<T>),
T, std::optional<T>>;
template<class... Ts> struct overloaded : Ts... { using Ts::operator()...; };
template<class... Ts> overloaded(Ts...) -> overloaded<Ts...>;
template<class T>
class FailGuard {
public:
@@ -388,69 +391,6 @@ void c_free(T *p) {
template<class T>
using c_ptr = safe_ptr<T, c_free<T>>;
template<class T>
class FakeContainer {
typedef T pointer;
pointer _begin;
pointer _end;
public:
FakeContainer(pointer begin, pointer end) : _begin(begin), _end(end) {}
pointer begin() { return _begin; }
pointer end() { return _end; }
const pointer begin() const { return _begin; }
const pointer end() const { return _end; }
const pointer cbegin() const { return _begin; }
const pointer cend() const { return _end; }
pointer data() { return begin(); }
const pointer data() const { return cbegin(); }
std::size_t size() const { return std::distance(begin(), end()); }
};
template<class T>
FakeContainer<T> toContainer(T begin, T end) {
return { begin, end };
}
template<class T>
FakeContainer<T> toContainer(T begin, std::size_t end) {
return { begin, begin + end };
}
template<class T>
FakeContainer<T*> toContainer(T * const begin) {
T *end = begin;
auto default_val = T();
while(*end != default_val) {
++end;
}
return toContainer(begin, end);
}
template<class T, class H>
struct _init_helper;
template<template<class...> class T, class H, class... Args>
struct _init_helper<T<Args...>, H> {
using type = T<Args...>;
static type move(Args&&... args, H&&) {
return std::make_tuple(std::move(args)...);
}
static type copy(const Args&... args, const H&) {
return std::make_tuple(args...);
}
};
inline std::int64_t from_chars(const char *begin, const char *end) {
std::int64_t res {};
std::int64_t mul = 1;
@@ -496,6 +436,80 @@ public:
}
};
template<class T>
class wrap_ptr {
public:
using element_type = T;
using pointer = element_type*;
using reference = element_type&;
wrap_ptr() : _own_ptr { false }, _p { nullptr } {}
wrap_ptr(pointer p) : _own_ptr { false }, _p { p } {}
wrap_ptr(std::unique_ptr<element_type> &&uniq_p) : _own_ptr { true }, _p { uniq_p.release() } {}
wrap_ptr(wrap_ptr &&other) : _own_ptr { other._own_ptr }, _p { other._p } {
other._own_ptr = false;
}
wrap_ptr &operator=(wrap_ptr &&other) noexcept {
if(_own_ptr) {
delete _p;
}
_p = other._p;
_own_ptr = other._own_ptr;
other._own_ptr = false;
return *this;
}
template<class V>
wrap_ptr &operator=(std::unique_ptr<V> &&uniq_ptr) {
static_assert(std::is_base_of_v<element_type, V>, "element_type must be base class of V");
_own_ptr = true;
_p = uniq_ptr.release();
return *this;
}
wrap_ptr &operator=(pointer p) {
if(_own_ptr) {
delete _p;
}
_p = p;
_own_ptr = false;
return *this;
}
~wrap_ptr() {
if(_own_ptr) {
delete _p;
}
_own_ptr = false;
}
const reference operator*() const {
return *_p;
}
reference operator*() {
return *_p;
}
const pointer operator->() const {
return _p;
}
pointer operator->() {
return _p;
}
private:
bool _own_ptr;
pointer _p;
};
template<class T>
class buffer_t {
public:

File diff suppressed because it is too large Load Diff

View File

@@ -56,6 +56,8 @@ void capture(
idr_event_t idr_events,
config_t config,
void *channel_data);
int init();
}
#endif //SUNSHINE_VIDEO_H