mirror of
https://github.com/LizardByte/Sunshine.git
synced 2025-08-10 00:52:16 +00:00
Compare commits
1219 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
446825b73d | ||
|
|
0d100a57a6 | ||
|
|
55a225d21c | ||
|
|
ae12424279 | ||
|
|
3c223eb289 | ||
|
|
9ac1e3dcd6 | ||
|
|
a21e231cae | ||
|
|
2c4e293e21 | ||
|
|
1b45b57d07 | ||
|
|
905904960d | ||
|
|
48559a5876 | ||
|
|
d2461e1908 | ||
|
|
f8819d32e3 | ||
|
|
09dff34105 | ||
|
|
16e2789197 | ||
|
|
08ac580bc6 | ||
|
|
cdfcdf2dc7 | ||
|
|
fb7c9e22ff | ||
|
|
b2c5da2cfc | ||
|
|
98d61b16f6 | ||
|
|
df6e15f1f7 | ||
|
|
6cc13b8ef6 | ||
|
|
592f3a70a3 | ||
|
|
9155e39e91 | ||
|
|
142f06714f | ||
|
|
a3e2c4fa31 | ||
|
|
8b507d2b30 | ||
|
|
0698210ce1 | ||
|
|
d33e3a2882 | ||
|
|
bcd5188ac5 | ||
|
|
1f2ad8da00 | ||
|
|
5a60090ddc | ||
|
|
3bb784b379 | ||
|
|
35b660851c | ||
|
|
d625dda4ea | ||
|
|
9181028bcf | ||
|
|
fa6c279efc | ||
|
|
9d6d59aa55 | ||
|
|
9df6283da4 | ||
|
|
c8c80807da | ||
|
|
3f202be09a | ||
|
|
4b642f6e01 | ||
|
|
da390c37db | ||
|
|
7242202291 | ||
|
|
f213aae7b8 | ||
|
|
bfd1e81a25 | ||
|
|
c81aa99c38 | ||
|
|
c4c0413f9e | ||
|
|
49fc618682 | ||
|
|
1fbbab9df7 | ||
|
|
7f6383833c | ||
|
|
4ef97c755a | ||
|
|
fa14b6ead7 | ||
|
|
42f6634e85 | ||
|
|
e88dec22ee | ||
|
|
f44ae4cc8d | ||
|
|
3510b8636a | ||
|
|
9fe539f87d | ||
|
|
4fc444b5b3 | ||
|
|
5480d3d59d | ||
|
|
696a11942c | ||
|
|
43d47c6f3c | ||
|
|
b405888088 | ||
|
|
3771062499 | ||
|
|
44ad28ebf4 | ||
|
|
5477f58f18 | ||
|
|
7dbd5b41b2 | ||
|
|
f210b89dd7 | ||
|
|
6ff236727f | ||
|
|
3af1744c9b | ||
|
|
b2fe0423d2 | ||
|
|
009975dc7b | ||
|
|
23ef23159a | ||
|
|
d51afbe19d | ||
|
|
a26877a917 | ||
|
|
6607062648 | ||
|
|
77d3a7e2ab | ||
|
|
7770b5f708 | ||
|
|
e2fce257b5 | ||
|
|
c95f54f874 | ||
|
|
9b6d0b7a06 | ||
|
|
effa98f76a | ||
|
|
65574a02d4 | ||
|
|
5980e520b9 | ||
|
|
00aa23b342 | ||
|
|
f1c225fccc | ||
|
|
08cb5fc2f2 | ||
|
|
1a929cc37b | ||
|
|
c72aeef67b | ||
|
|
76896eba88 | ||
|
|
1c1a7fa8c0 | ||
|
|
ff883058e6 | ||
|
|
a3922d9eef | ||
|
|
cc688c7845 | ||
|
|
997e8c6e5a | ||
|
|
e410426257 | ||
|
|
30a790ba40 | ||
|
|
052297a1a5 | ||
|
|
12efe96297 | ||
|
|
952e142ffa | ||
|
|
0439d7a83a | ||
|
|
76ffa2a0b5 | ||
|
|
f9963ed39b | ||
|
|
a7a9df3b9d | ||
|
|
cfe7129100 | ||
|
|
97b1790d0c | ||
|
|
9ae46e1325 | ||
|
|
e5dedbbe46 | ||
|
|
17cd230c55 | ||
|
|
a5213c6225 | ||
|
|
248b1bfa19 | ||
|
|
0c6d0edacf | ||
|
|
03b62730ae | ||
|
|
91f4f15c49 | ||
|
|
3b53734eab | ||
|
|
2dc150761c | ||
|
|
8aaf9fd141 | ||
|
|
8b3b00059e | ||
|
|
e00aa4f0f3 | ||
|
|
a95a553970 | ||
|
|
d6db10afb4 | ||
|
|
8ad7af86c0 | ||
|
|
dc5571ba98 | ||
|
|
88a450bf1f | ||
|
|
27919697a7 | ||
|
|
a996902a33 | ||
|
|
ad20572dde | ||
|
|
f4a48f44e4 | ||
|
|
c7fe8f65bd | ||
|
|
215c86455f | ||
|
|
05f5370efe | ||
|
|
c1f0daa024 | ||
|
|
c0b042fa8a | ||
|
|
d69d540031 | ||
|
|
95437d15f3 | ||
|
|
cbb5ec3f29 | ||
|
|
c3ec53c0ae | ||
|
|
5adbd2988c | ||
|
|
1e037db7bd | ||
|
|
bb092c0723 | ||
|
|
1041f87a5d | ||
|
|
2b1514b547 | ||
|
|
8a99187562 | ||
|
|
b1ac4bc57c | ||
|
|
6c5a323b4a | ||
|
|
dfb8d9d87f | ||
|
|
7a23d6aa81 | ||
|
|
302bf58631 | ||
|
|
66f1ac664a | ||
|
|
223e87f30b | ||
|
|
633f0035c2 | ||
|
|
66f46ab341 | ||
|
|
f4edce318c | ||
|
|
a54830cf39 | ||
|
|
63c105bfb1 | ||
|
|
a1d07ff0e0 | ||
|
|
5e6a42abb2 | ||
|
|
3fd38b3a2e | ||
|
|
cb406bce06 | ||
|
|
a4a5d30603 | ||
|
|
5e40248fdf | ||
|
|
08d623ce44 | ||
|
|
12f1ec6b25 | ||
|
|
8c8a7bb886 | ||
|
|
37c0f794b0 | ||
|
|
fe1321f33c | ||
|
|
5b239c17d0 | ||
|
|
c60ebb76b9 | ||
|
|
6f369398b3 | ||
|
|
84fdc7ee08 | ||
|
|
e8aa7499af | ||
|
|
a7b19f48fe | ||
|
|
a6e47ff033 | ||
|
|
dbe56ee583 | ||
|
|
ed5ee24efb | ||
|
|
97716fa0c8 | ||
|
|
7648496b89 | ||
|
|
5ed17fb89d | ||
|
|
e39d9bc662 | ||
|
|
d912cce5ba | ||
|
|
fbd6807eb8 | ||
|
|
f0cfc4aa8a | ||
|
|
67a569c0b8 | ||
|
|
981d878082 | ||
|
|
4c1371aaeb | ||
|
|
9af770e217 | ||
|
|
4cbf8c01d2 | ||
|
|
8132c82b07 | ||
|
|
4a924acbc3 | ||
|
|
390c664e84 | ||
|
|
01b8ba353a | ||
|
|
66615a0be0 | ||
|
|
e924e2eedb | ||
|
|
4190667d94 | ||
|
|
0bdb887e2c | ||
|
|
92f5c49685 | ||
|
|
89a0109fa7 | ||
|
|
cf2de6e81e | ||
|
|
6a2a485435 | ||
|
|
b310436bdb | ||
|
|
bea78dd3eb | ||
|
|
447a00936b | ||
|
|
ca3bab3242 | ||
|
|
e1b112cafd | ||
|
|
949506208e | ||
|
|
84ae033a37 | ||
|
|
8509ee72c3 | ||
|
|
8a853fc79d | ||
|
|
c665e211cd | ||
|
|
1e6d9da2d6 | ||
|
|
71d249b32e | ||
|
|
bfdcfbb357 | ||
|
|
660d8e191c | ||
|
|
5af39cbbb0 | ||
|
|
f34e3b03fb | ||
|
|
4428054d03 | ||
|
|
b210356dfd | ||
|
|
1905163b26 | ||
|
|
3cab7e1067 | ||
|
|
42a990a12b | ||
|
|
5f3a466631 | ||
|
|
e1efdadce1 | ||
|
|
2183f8eb0f | ||
|
|
3df9f3de17 | ||
|
|
cf5460bd80 | ||
|
|
3113de6bfe | ||
|
|
6e9aac3b83 | ||
|
|
f932d6c18c | ||
|
|
d0ce0a67f2 | ||
|
|
a82a9f8ada | ||
|
|
548eeb8889 | ||
|
|
dcdd716a57 | ||
|
|
dc491fa5d4 | ||
|
|
322da7ea30 | ||
|
|
a043dfcf6c | ||
|
|
3ee511cabb | ||
|
|
c9c93a265a | ||
|
|
b484060831 | ||
|
|
894cb6507e | ||
|
|
4769a9348b | ||
|
|
7ad91e2dab | ||
|
|
c1b4dccd7e | ||
|
|
166f9f73e0 | ||
|
|
1295096568 | ||
|
|
db55ff8ea1 | ||
|
|
88d67277f6 | ||
|
|
83c8a460e1 | ||
|
|
cc0ac47f29 | ||
|
|
272368b59c | ||
|
|
87856862dd | ||
|
|
b5ec178cd6 | ||
|
|
49afefc43f | ||
|
|
e2bb1a720a | ||
|
|
cd89808a21 | ||
|
|
9f0af0f8ae | ||
|
|
afd52ba5e0 | ||
|
|
87f01fc0b8 | ||
|
|
952d961871 | ||
|
|
ef2ca538a3 | ||
|
|
f9b2686547 | ||
|
|
35cdc2c89b | ||
|
|
3baace6711 | ||
|
|
afe661db56 | ||
|
|
b6ae848bb5 | ||
|
|
64b52205ab | ||
|
|
93315f280e | ||
|
|
9b66a5a16b | ||
|
|
09c9a1a16b | ||
|
|
b4703efbef | ||
|
|
264c9272df | ||
|
|
79ce11fe72 | ||
|
|
9213037ad4 | ||
|
|
25a64bff44 | ||
|
|
f51a84c9ec | ||
|
|
907798f9da | ||
|
|
7f53388304 | ||
|
|
43525415fd | ||
|
|
96b3bf30f6 | ||
|
|
8e63b2523a | ||
|
|
e1f0e3a719 | ||
|
|
6800fc00f4 | ||
|
|
e0f2d3affc | ||
|
|
ac1feb386c | ||
|
|
de091570b9 | ||
|
|
99f9174353 | ||
|
|
b40d414346 | ||
|
|
eedcd49713 | ||
|
|
418d3cc76c | ||
|
|
cf6107971d | ||
|
|
62af7d255c | ||
|
|
d4a4096bba | ||
|
|
5e5ae0b4b8 | ||
|
|
636a928fc0 | ||
|
|
5f9101f72d | ||
|
|
f9040a55d5 | ||
|
|
674a9da166 | ||
|
|
2298cbbe20 | ||
|
|
58ed5ba3ce | ||
|
|
f7d4f49809 | ||
|
|
43fa4100d2 | ||
|
|
b3a1615170 | ||
|
|
b421c8e2e9 | ||
|
|
977a4d3d4a | ||
|
|
642c4a9ed7 | ||
|
|
83ea433857 | ||
|
|
0ac67f13d7 | ||
|
|
f32387f67e | ||
|
|
6980ee36b3 | ||
|
|
e04ed497a6 | ||
|
|
7b8abc49e2 | ||
|
|
53bd9ea82b | ||
|
|
adeb99f8c2 | ||
|
|
65b557d003 | ||
|
|
ccd568af3e | ||
|
|
749bfa89ef | ||
|
|
663a92ce33 | ||
|
|
8b0e6a28c2 | ||
|
|
9a95ce5132 | ||
|
|
7dc8546c25 | ||
|
|
30c178baa1 | ||
|
|
911035c711 | ||
|
|
6000b85b1a | ||
|
|
d661568536 | ||
|
|
c4e3687c01 | ||
|
|
a6b8371178 | ||
|
|
f546ee8551 | ||
|
|
db28239939 | ||
|
|
9f87401173 | ||
|
|
4100f790ee | ||
|
|
a4acaf15b0 | ||
|
|
0de52efdb1 | ||
|
|
1d242aed7c | ||
|
|
4d3c9b0be8 | ||
|
|
8164c09ea0 | ||
|
|
b464902f25 | ||
|
|
844f041a58 | ||
|
|
de628e843e | ||
|
|
de6779ed83 | ||
|
|
2afa3a4390 | ||
|
|
094ab4eb1a | ||
|
|
c184f16e28 | ||
|
|
15aca474eb | ||
|
|
d963bd1daa | ||
|
|
d38392aea0 | ||
|
|
b7ef109d95 | ||
|
|
dc8bda0e1b | ||
|
|
7cc5e1345c | ||
|
|
49147694de | ||
|
|
58a71bf3e7 | ||
|
|
5ac84fd03f | ||
|
|
1439c8d951 | ||
|
|
ce9934ca52 | ||
|
|
7451c1bd17 | ||
|
|
7340f7660b | ||
|
|
a9af8472df | ||
|
|
b5fa9bb1be | ||
|
|
f850b037bc | ||
|
|
da3c39e9e3 | ||
|
|
7d4df19cdc | ||
|
|
d19c883067 | ||
|
|
42eda48ce4 | ||
|
|
fd0dc9ab8e | ||
|
|
1415c8b200 | ||
|
|
8ba3c073e7 | ||
|
|
92f1313993 | ||
|
|
2b3c514aad | ||
|
|
819501c4e7 | ||
|
|
3b2226c4ea | ||
|
|
00405892cb | ||
|
|
3cd3d261e9 | ||
|
|
4f07672cfa | ||
|
|
25e21ee807 | ||
|
|
270d4ddffe | ||
|
|
46dede3381 | ||
|
|
7019756fa1 | ||
|
|
a1d8cc2296 | ||
|
|
c61b31e8a6 | ||
|
|
802e5c79fa | ||
|
|
6b64149591 | ||
|
|
8fc8884dbc | ||
|
|
2b76111e51 | ||
|
|
54221ae938 | ||
|
|
f07171315f | ||
|
|
f4074341a5 | ||
|
|
9c4763af75 | ||
|
|
6dc10a4d60 | ||
|
|
4b332d6fa0 | ||
|
|
12a361a3d9 | ||
|
|
840013ec78 | ||
|
|
e45452b9bc | ||
|
|
8509260194 | ||
|
|
2a13697fef | ||
|
|
bd51a7d5fa | ||
|
|
1cf0360520 | ||
|
|
4a0d632c6e | ||
|
|
d336de68a2 | ||
|
|
1175a3184e | ||
|
|
dde56c2bb7 | ||
|
|
7da652e299 | ||
|
|
3663e35ecf | ||
|
|
e78ec5c2ce | ||
|
|
70ae7a2fa9 | ||
|
|
651d75fce7 | ||
|
|
c3dfcc1f11 | ||
|
|
a02d314dde | ||
|
|
b5fd69a9ed | ||
|
|
2d969c2ccc | ||
|
|
211b25848f | ||
|
|
d051b58190 | ||
|
|
ab0a6b5fa6 | ||
|
|
0c827690ec | ||
|
|
f5d7cf7692 | ||
|
|
dd73f45175 | ||
|
|
e86207606a | ||
|
|
ca21e6a8ac | ||
|
|
9990b9b04b | ||
|
|
341bc98730 | ||
|
|
1a1cf20152 | ||
|
|
7a5890469c | ||
|
|
65c4f01998 | ||
|
|
676b5559ec | ||
|
|
e9b46201fd | ||
|
|
9b4fc8a270 | ||
|
|
7cb0286414 | ||
|
|
3b41888c66 | ||
|
|
4a65d2cafc | ||
|
|
34f1e89366 | ||
|
|
91cf3bcdcc | ||
|
|
e7ec6050d9 | ||
|
|
0f3eaf0f84 | ||
|
|
c75efd3999 | ||
|
|
f2934c620b | ||
|
|
4f6b001483 | ||
|
|
de3c37969f | ||
|
|
c719ddfc31 | ||
|
|
a5e56cf47d | ||
|
|
84fd2c5766 | ||
|
|
68ba1db24a | ||
|
|
ca00949851 | ||
|
|
88925c705f | ||
|
|
c2752262e5 | ||
|
|
496e51d93a | ||
|
|
6c04065ba7 | ||
|
|
fff419a7ff | ||
|
|
a3e3da3136 | ||
|
|
49bfd2ba1f | ||
|
|
93aebf461a | ||
|
|
c409022df5 | ||
|
|
c4441db606 | ||
|
|
0d0496adf3 | ||
|
|
ca6f02c953 | ||
|
|
8b86abfceb | ||
|
|
5135c16bda | ||
|
|
e6d6d47be1 | ||
|
|
639af4f08a | ||
|
|
12bf5cffc5 | ||
|
|
6741997e59 | ||
|
|
369a941c48 | ||
|
|
2a69385aed | ||
|
|
31f7faa6a5 | ||
|
|
dc4393a583 | ||
|
|
5eb3e7c75f | ||
|
|
6858f9c8d4 | ||
|
|
af342c8cc9 | ||
|
|
4e4a5c8df8 | ||
|
|
cc2d982ceb | ||
|
|
e2bef750b4 | ||
|
|
ffdcf0fea8 | ||
|
|
2ea414d1d4 | ||
|
|
c4977b5393 | ||
|
|
13c2dce3f1 | ||
|
|
b0a02a5985 | ||
|
|
b0df4eabd1 | ||
|
|
ceb7f5f41a | ||
|
|
aa46b8e293 | ||
|
|
63a83cdf7a | ||
|
|
bd033f9e15 | ||
|
|
62ca9c31a0 | ||
|
|
e8ef708034 | ||
|
|
ec450be8b5 | ||
|
|
d4df041210 | ||
|
|
ced0029abc | ||
|
|
a9cf0ebf18 | ||
|
|
3d6611fd50 | ||
|
|
74736c6b76 | ||
|
|
0e0b2ce366 | ||
|
|
3b49deac25 | ||
|
|
734400dc77 | ||
|
|
ef9abf2f15 | ||
|
|
b286c06144 | ||
|
|
521335c387 | ||
|
|
780339d91b | ||
|
|
b332633b07 | ||
|
|
4cd1014bac | ||
|
|
7a1e5f43d9 | ||
|
|
c4054c75a7 | ||
|
|
f36d81954b | ||
|
|
eacae3954e | ||
|
|
56cf3e4ede | ||
|
|
293ee266af | ||
|
|
536df759ae | ||
|
|
4bdf8375cc | ||
|
|
a6921fffad | ||
|
|
1ad0c93ad8 | ||
|
|
97f333c970 | ||
|
|
b854807d40 | ||
|
|
a9b9d1bd09 | ||
|
|
0044ec1d52 | ||
|
|
9c976a23de | ||
|
|
9930880ee6 | ||
|
|
82e1b61a31 | ||
|
|
5bb197ccfc | ||
|
|
20c0426ace | ||
|
|
0a9cc511ed | ||
|
|
2a7af03f9a | ||
|
|
7910ac78a5 | ||
|
|
67762aa445 | ||
|
|
7f22774e08 | ||
|
|
cbafe09396 | ||
|
|
615f7e5875 | ||
|
|
3f309832f7 | ||
|
|
719f4cef59 | ||
|
|
da582198db | ||
|
|
04a2ecaff4 | ||
|
|
3b8b4653e9 | ||
|
|
ea27955b16 | ||
|
|
e223ba53f9 | ||
|
|
907d0bfcd5 | ||
|
|
a014391ae7 | ||
|
|
84584c950b | ||
|
|
f1d82a7d09 | ||
|
|
01155ef4a3 | ||
|
|
88cf616a48 | ||
|
|
b3cdadca86 | ||
|
|
3bd9f6b710 | ||
|
|
e28cc5e645 | ||
|
|
045970bcc5 | ||
|
|
6fca2c593c | ||
|
|
e61bbe87b4 | ||
|
|
80ebc9982e | ||
|
|
9a2689692a | ||
|
|
a31c6c4cd0 | ||
|
|
1f79f4ed12 | ||
|
|
6da0483951 | ||
|
|
3abb199697 | ||
|
|
4c7afc05c7 | ||
|
|
222e53781d | ||
|
|
8d4bd87ad2 | ||
|
|
861331be1c | ||
|
|
2fa2afed56 | ||
|
|
5e910b5fab | ||
|
|
1520cb7bf9 | ||
|
|
4b658cd86b | ||
|
|
f6311ceb3f | ||
|
|
9b2321a722 | ||
|
|
1e91356155 | ||
|
|
fb7a3a0758 | ||
|
|
c2d4ffdaed | ||
|
|
c5e6b84e3d | ||
|
|
68f35f6ab1 | ||
|
|
2b450839a1 | ||
|
|
df3e7c5ca1 | ||
|
|
a4fa42fa96 | ||
|
|
d6183430ef | ||
|
|
f54a32feac | ||
|
|
37edcb1b55 | ||
|
|
320b691086 | ||
|
|
5163ec93b4 | ||
|
|
2be7790415 | ||
|
|
27d2735454 | ||
|
|
361e5f7ea7 | ||
|
|
4a483078ad | ||
|
|
4dff0c8384 | ||
|
|
543254691b | ||
|
|
96205dc48d | ||
|
|
ec5ea7cffb | ||
|
|
8bb7a63479 | ||
|
|
95302485a0 | ||
|
|
7a1347c1f5 | ||
|
|
a78ad20142 | ||
|
|
9dd6576b26 | ||
|
|
cf8a843b03 | ||
|
|
8b423315d0 | ||
|
|
7417430659 | ||
|
|
a494bfa1e9 | ||
|
|
e239751f50 | ||
|
|
91465ed257 | ||
|
|
830fa6567b | ||
|
|
f347089312 | ||
|
|
a577f76d71 | ||
|
|
89f05711e0 | ||
|
|
b19803564c | ||
|
|
a9988cb346 | ||
|
|
b288993b86 | ||
|
|
37e608a889 | ||
|
|
067214a9f9 | ||
|
|
b5896c8f26 | ||
|
|
9f4ad530ca | ||
|
|
c6d489da11 | ||
|
|
c3d498e30c | ||
|
|
bd8ba66023 | ||
|
|
cf859b7c13 | ||
|
|
3b2c70422a | ||
|
|
a9ebdf6785 | ||
|
|
8164b514e1 | ||
|
|
398bf9819e | ||
|
|
59959d6397 | ||
|
|
cbbd813c5a | ||
|
|
f4344ade53 | ||
|
|
8e1b718d7f | ||
|
|
ed62a1f93d | ||
|
|
592cb002bd | ||
|
|
ebc581e165 | ||
|
|
c7a3f7f46e | ||
|
|
41df9c658b | ||
|
|
4bada2d9f6 | ||
|
|
83da80f631 | ||
|
|
2bcb713b82 | ||
|
|
e5239583b5 | ||
|
|
4126a1a98e | ||
|
|
abf2a5ea4e | ||
|
|
a9bbadf8ad | ||
|
|
38dcdcba2f | ||
|
|
4a50fcafd8 | ||
|
|
67f35dfc5f | ||
|
|
de9136c61e | ||
|
|
dd7736e806 | ||
|
|
720f44c03b | ||
|
|
3981ca0840 | ||
|
|
9f707f9015 | ||
|
|
1b6bf8fa34 | ||
|
|
41e6344995 | ||
|
|
5993deadb2 | ||
|
|
7306a3468d | ||
|
|
1513880f63 | ||
|
|
41044ab97f | ||
|
|
1b392e3921 | ||
|
|
870b6e2675 | ||
|
|
ac7fcfe056 | ||
|
|
55ac123d03 | ||
|
|
c748980f58 | ||
|
|
d1a41ad8fe | ||
|
|
db6e2a57ad | ||
|
|
f2c53a52c2 | ||
|
|
adfb3619c3 | ||
|
|
8781fbcc20 | ||
|
|
2c0873332c | ||
|
|
a2c45f8de0 | ||
|
|
cefcaed358 | ||
|
|
6ba4b56f32 | ||
|
|
bfda8558ea | ||
|
|
cc5c1a9b90 | ||
|
|
c5d782dac5 | ||
|
|
60d63fcd21 | ||
|
|
8f99d6cf01 | ||
|
|
4c6a0cdc37 | ||
|
|
0cad1bf6a5 | ||
|
|
95a478e55e | ||
|
|
41a30b5826 | ||
|
|
b1030b6601 | ||
|
|
5dfd5d8027 | ||
|
|
51b7dc5b5c | ||
|
|
7bfbdd5bc0 | ||
|
|
aa3137c0a9 | ||
|
|
c2027a5481 | ||
|
|
909e36b80d | ||
|
|
4fa2624495 | ||
|
|
a97c88c45b | ||
|
|
c579f638fc | ||
|
|
65fdf8f6d1 | ||
|
|
f6fd1f7e84 | ||
|
|
732f5bf21d | ||
|
|
6d2d3eca18 | ||
|
|
997738816d | ||
|
|
41906b6fab | ||
|
|
c910de12ff | ||
|
|
2baed357f2 | ||
|
|
ee513939aa | ||
|
|
b843ab7b97 | ||
|
|
8c37fa8d8b | ||
|
|
a46a14c6ac | ||
|
|
2fec2bfc51 | ||
|
|
ea1e6f20a8 | ||
|
|
aeb72cba02 | ||
|
|
279fb8803e | ||
|
|
59394e23f4 | ||
|
|
8f78b599ae | ||
|
|
cc4ec1b526 | ||
|
|
18a977fdf1 | ||
|
|
bc945df0a7 | ||
|
|
32867d1bbf | ||
|
|
c844290c81 | ||
|
|
4a1f5194cc | ||
|
|
0db8e634a8 | ||
|
|
8309ee965a | ||
|
|
0fa4a89223 | ||
|
|
9350afbe6a | ||
|
|
7ce9d27a67 | ||
|
|
83a4440cad | ||
|
|
85cd54fdfe | ||
|
|
c3eabebd91 | ||
|
|
26aff26eb0 | ||
|
|
07b974d638 | ||
|
|
75cdac5dbf | ||
|
|
7c6fecf13d | ||
|
|
7c96ee1e00 | ||
|
|
975c4e6b26 | ||
|
|
c4838424db | ||
|
|
e07279707a | ||
|
|
1e0db7df4e | ||
|
|
b41cbc8ab4 | ||
|
|
906870d36f | ||
|
|
95baeed75e | ||
|
|
a622c1591e | ||
|
|
030269b596 | ||
|
|
5ff5d46ba5 | ||
|
|
d1ab44912b | ||
|
|
23c6e455fe | ||
|
|
7b5ac1c869 | ||
|
|
143ca274f5 | ||
|
|
7d51a4bfbf | ||
|
|
23e64f23a8 | ||
|
|
a5e2df11eb | ||
|
|
4406f7428b | ||
|
|
17770fe130 | ||
|
|
ff47a13bc3 | ||
|
|
ed38b7e86c | ||
|
|
3f2ee64293 | ||
|
|
2512e7f445 | ||
|
|
9fbfca5699 | ||
|
|
1039160d3a | ||
|
|
214478760b | ||
|
|
c49cbd3c3c | ||
|
|
de8cff072d | ||
|
|
2b0e1fb9dc | ||
|
|
f6d9061441 | ||
|
|
71b5495569 | ||
|
|
0fa68397b7 | ||
|
|
2561886189 | ||
|
|
b73ddc232b | ||
|
|
65b9b653d0 | ||
|
|
ca8917dd1b | ||
|
|
bbdf9618ea | ||
|
|
23f9474e9e | ||
|
|
69642d2db3 | ||
|
|
47a7c5e27b | ||
|
|
502bf8ebf8 | ||
|
|
ecba80372d | ||
|
|
f272b865cd | ||
|
|
89cfbc6bd3 | ||
|
|
e2d3fef9db | ||
|
|
c5b8deff41 | ||
|
|
8478ccca5d | ||
|
|
a268cb552d | ||
|
|
1b0978f252 | ||
|
|
8730980ab8 | ||
|
|
caf70d703e | ||
|
|
95ced89d5e | ||
|
|
e4c9c292e5 | ||
|
|
fe7a7f4d77 | ||
|
|
cb0b32f90a | ||
|
|
9e93bb2dd8 | ||
|
|
0e6c41f823 | ||
|
|
725212b8a4 | ||
|
|
a275ee6b65 | ||
|
|
997095ce39 | ||
|
|
8b9cd51134 | ||
|
|
e7cbfb3ee9 | ||
|
|
1f7bdb1b2a | ||
|
|
9f14b2278d | ||
|
|
4177b02064 | ||
|
|
847d7b6980 | ||
|
|
ef1114512f | ||
|
|
38c36c00c9 | ||
|
|
846820f4ec | ||
|
|
c39e90b9b0 | ||
|
|
096d7587c3 | ||
|
|
27f5bb60e5 | ||
|
|
70ed5ce829 | ||
|
|
e58dc8e446 | ||
|
|
60e3538adc | ||
|
|
e2fb02323c | ||
|
|
57c7945847 | ||
|
|
68c723e135 | ||
|
|
0241414dfb | ||
|
|
c5a356f3e7 | ||
|
|
deecd19af2 | ||
|
|
f4cb13aa0e | ||
|
|
4385014049 | ||
|
|
fcb84132f4 | ||
|
|
50bd3094b4 | ||
|
|
d7cb71f877 | ||
|
|
e287404992 | ||
|
|
d332f11101 | ||
|
|
f78a9e2ccf | ||
|
|
bd7294e672 | ||
|
|
b3304a059d | ||
|
|
d0529fb234 | ||
|
|
bb912786bd | ||
|
|
8f47190ffc | ||
|
|
196f1f7471 | ||
|
|
e3cc25f23f | ||
|
|
a963b31c1d | ||
|
|
4d1689d6e9 | ||
|
|
fed329568c | ||
|
|
e3f642ac25 | ||
|
|
c94d922282 | ||
|
|
7563a0fa1f | ||
|
|
f5db0e438b | ||
|
|
9982ae4675 | ||
|
|
69eba9c493 | ||
|
|
2c67d73f08 | ||
|
|
7f4b9cf36c | ||
|
|
9c0ea17ada | ||
|
|
91a6e55f74 | ||
|
|
fea650fbe4 | ||
|
|
35e0497f86 | ||
|
|
fbc3735f44 | ||
|
|
9be80c103e | ||
|
|
640f2b1c55 | ||
|
|
d8df57130a | ||
|
|
3ba0533773 | ||
|
|
b1224032a1 | ||
|
|
1a7ed53559 | ||
|
|
9e9487617a | ||
|
|
d68b8138a5 | ||
|
|
10ca72f934 | ||
|
|
9dbf0df67d | ||
|
|
b072af3082 | ||
|
|
35c8b74bb4 | ||
|
|
7fbe9ba34f | ||
|
|
81c6ca5915 | ||
|
|
d73a4a38e5 | ||
|
|
6309f478a2 | ||
|
|
4ca2c0e740 | ||
|
|
ea9ada8d20 | ||
|
|
302b61090b | ||
|
|
75d224cd67 | ||
|
|
2ff9a129c0 | ||
|
|
a1a4ce1af8 | ||
|
|
44ac873100 | ||
|
|
3d179a869a | ||
|
|
530f2de79e | ||
|
|
512e581d56 | ||
|
|
e5c2ad2069 | ||
|
|
6af961199e | ||
|
|
276aa23a61 | ||
|
|
e33a7ff53b | ||
|
|
d9d50d8943 | ||
|
|
7c753e2289 | ||
|
|
ff81a286bb | ||
|
|
c7c3ac7c9c | ||
|
|
74f673e23c | ||
|
|
7c51fbfd18 | ||
|
|
7839ff8057 | ||
|
|
8b91e168e5 | ||
|
|
08f056bb3f | ||
|
|
808af7fce1 | ||
|
|
090d353f3d | ||
|
|
c4b371ccc9 | ||
|
|
06a1119512 | ||
|
|
b80c4253f0 | ||
|
|
3f306de5e1 | ||
|
|
39f9506446 | ||
|
|
00de30d336 | ||
|
|
b59df48dde | ||
|
|
3840b3c561 | ||
|
|
b5424ec671 | ||
|
|
ec184fb2ab | ||
|
|
4a750c7b16 | ||
|
|
f38bbf90bb | ||
|
|
05dcff4f87 | ||
|
|
b458118e34 | ||
|
|
7a920da06d | ||
|
|
ec84f43b80 | ||
|
|
03d572fe10 | ||
|
|
c41df22c88 | ||
|
|
3b3b9e2bd9 | ||
|
|
ebf9dbe931 | ||
|
|
0b3b78891b | ||
|
|
12af30b75b | ||
|
|
a9775a0686 | ||
|
|
ca9809ca7e | ||
|
|
97e14de63e | ||
|
|
0f4cdc2d21 | ||
|
|
869b6ed89d | ||
|
|
1876de0a68 | ||
|
|
019d064d8e | ||
|
|
d6b45eb825 | ||
|
|
556a6aaf33 | ||
|
|
fc7ec9e538 | ||
|
|
1f239214a1 | ||
|
|
9e224987f7 | ||
|
|
81317ce672 | ||
|
|
fce23c482c | ||
|
|
1d2e042240 | ||
|
|
d852bb82a3 | ||
|
|
fdb7754043 | ||
|
|
62c3faaacb | ||
|
|
b58279beea | ||
|
|
898d62bad9 | ||
|
|
446c8ace82 | ||
|
|
e007ee9976 | ||
|
|
6721155155 | ||
|
|
b80619fb0f | ||
|
|
e75e26467f | ||
|
|
2187f0b198 | ||
|
|
3382a5d03c | ||
|
|
fff9145680 | ||
|
|
13e57bb183 | ||
|
|
948500ae41 | ||
|
|
17bcdd7902 | ||
|
|
03837a9308 | ||
|
|
b8bfc13cf9 | ||
|
|
24403cdd25 | ||
|
|
13d0106feb | ||
|
|
dce64fc487 | ||
|
|
0629fe7846 | ||
|
|
315ec47523 | ||
|
|
9ed2141fc8 | ||
|
|
5b40c5008f | ||
|
|
62db9ae01a | ||
|
|
8c2cd2f60d | ||
|
|
0812f6f3c3 | ||
|
|
065e9e718a | ||
|
|
27f1dc318b | ||
|
|
ac5f439839 | ||
|
|
b4255e22aa | ||
|
|
d15c1af152 | ||
|
|
0140989f3a | ||
|
|
793e329fa5 | ||
|
|
6702802829 | ||
|
|
dae9a67fe2 | ||
|
|
b8e11b1272 | ||
|
|
f08b6abc96 | ||
|
|
965812bc19 | ||
|
|
7f643345ce | ||
|
|
28fecbc50c | ||
|
|
6c11e9f27d | ||
|
|
2af179630a | ||
|
|
c243e82047 | ||
|
|
3de77b1849 | ||
|
|
37d09b0bdb | ||
|
|
9a5d23ebde | ||
|
|
da3ed5ff79 | ||
|
|
2e8b462fe5 | ||
|
|
87b2b708f8 | ||
|
|
7ddf8bbe94 | ||
|
|
a87782b025 | ||
|
|
c39f2b0c1f | ||
|
|
a07ad3e479 | ||
|
|
c58f95b79e | ||
|
|
d9f7952710 | ||
|
|
ab70a056fc | ||
|
|
844ba53f54 | ||
|
|
63b920cd7b | ||
|
|
2a5fd78789 | ||
|
|
bc52fe9b82 | ||
|
|
667c113d5b | ||
|
|
38915859ba | ||
|
|
70d0be4b9a | ||
|
|
dad446ea41 | ||
|
|
d283900e43 | ||
|
|
4b043e31fe | ||
|
|
f65882e42a | ||
|
|
1fda8f6219 | ||
|
|
620c629bb4 | ||
|
|
a1f63da057 | ||
|
|
5ff5942258 | ||
|
|
8c803e6a34 | ||
|
|
a93a640d42 | ||
|
|
62abd1ee19 | ||
|
|
d55c6ee84c | ||
|
|
d3419697a5 | ||
|
|
a7171d77db | ||
|
|
fccb8d080d | ||
|
|
24acfabe07 | ||
|
|
61be0eb051 | ||
|
|
c4b4393177 | ||
|
|
388e4696ff | ||
|
|
43dc7cf7c0 | ||
|
|
e8feb00b33 | ||
|
|
42472bec85 | ||
|
|
769c4c8f99 | ||
|
|
049044f768 | ||
|
|
971c784f14 | ||
|
|
747ba7f23a | ||
|
|
4a3f3622b8 | ||
|
|
7d2705424a | ||
|
|
b228fd371c | ||
|
|
cbd2a8269c | ||
|
|
7f9467d759 | ||
|
|
9cf7c14a26 | ||
|
|
f09aea81fe | ||
|
|
8249d41848 | ||
|
|
d33f742241 | ||
|
|
6d0499b0e3 | ||
|
|
fd7a6d070b | ||
|
|
3d81b0fe7a | ||
|
|
1d37a19468 | ||
|
|
cc3cf60015 | ||
|
|
8d4e55fcbb | ||
|
|
25aa8b41a5 | ||
|
|
667878d2eb | ||
|
|
53a9f365ce | ||
|
|
a587338701 | ||
|
|
a52f547726 | ||
|
|
136e9c43a5 | ||
|
|
616f62fb9f | ||
|
|
9d10eaeef4 | ||
|
|
f23fcee382 | ||
|
|
21b1a4a336 | ||
|
|
b346ac2eb0 | ||
|
|
db5a9363ba | ||
|
|
0a637b2272 | ||
|
|
169a53b568 | ||
|
|
109b48a108 | ||
|
|
118707f37a | ||
|
|
e169259f6f | ||
|
|
355df9a615 | ||
|
|
96cfb1f368 | ||
|
|
545cca792b | ||
|
|
ae04c4afbb | ||
|
|
e8fadd2848 | ||
|
|
beb6bdfadb | ||
|
|
8bf4ade9d8 | ||
|
|
ea928c53b4 | ||
|
|
c1697c8562 | ||
|
|
57c4986f0e | ||
|
|
0cc7e35ed9 | ||
|
|
d6eceaf0dc | ||
|
|
cf7eb14573 | ||
|
|
ed5de34800 | ||
|
|
27d4f6063f | ||
|
|
62662edc8d | ||
|
|
b67600962a | ||
|
|
1eda45a81a | ||
|
|
926e95f527 | ||
|
|
029194cb60 | ||
|
|
7e3abefc2c | ||
|
|
cf9eb961fc | ||
|
|
0a05c28df8 | ||
|
|
0034438c9e | ||
|
|
2691489dab | ||
|
|
9e7ecf8db2 | ||
|
|
32e6054435 | ||
|
|
bd2d846557 | ||
|
|
0932d8bab9 | ||
|
|
82ac3becd8 | ||
|
|
7b86ea9e87 | ||
|
|
63d15333f2 | ||
|
|
ac862f9157 | ||
|
|
23b09e3d41 | ||
|
|
4024378772 | ||
|
|
64e1132579 | ||
|
|
6749b64ed5 | ||
|
|
0c43b83598 | ||
|
|
c47e3cb020 | ||
|
|
f3b07efaf3 | ||
|
|
02360f7aef | ||
|
|
c21301a423 | ||
|
|
64a6c1419b | ||
|
|
6f95d360b5 | ||
|
|
71cef4b700 | ||
|
|
56bb4ca865 | ||
|
|
fa0ff91d23 | ||
|
|
13caabaec8 | ||
|
|
8f2e00f31b | ||
|
|
78244a48d2 | ||
|
|
8be7c563a9 | ||
|
|
3fe134d8a6 | ||
|
|
f4c145136c | ||
|
|
ee10a795bd | ||
|
|
8e32c8e6f4 | ||
|
|
30f7742f51 | ||
|
|
55ce46e6ed | ||
|
|
9e03cdda42 | ||
|
|
76f34be395 | ||
|
|
ee89b211de | ||
|
|
0c5daa44a1 | ||
|
|
5495c8367a | ||
|
|
988bf33b40 | ||
|
|
e53f65c305 | ||
|
|
1afd1b7c94 | ||
|
|
33fbd5f431 | ||
|
|
17d3fcf0d0 | ||
|
|
cf77b301bc | ||
|
|
9d52174d6b | ||
|
|
e2874b40c1 | ||
|
|
914f329eea | ||
|
|
877c739f1b | ||
|
|
61b195e9f4 | ||
|
|
c53564dd31 | ||
|
|
9eb4eadda6 | ||
|
|
44ebc4846b | ||
|
|
3a3d4928f9 | ||
|
|
84c55d6efc | ||
|
|
7f636a25a8 | ||
|
|
825a706139 | ||
|
|
4df57a722e | ||
|
|
45f9ce3a20 | ||
|
|
f2863cceb7 | ||
|
|
6b9ed7fcb3 | ||
|
|
f33a587218 | ||
|
|
bdb9ed9001 | ||
|
|
41911a26fb | ||
|
|
0942f2ee7f | ||
|
|
a05a3b355c | ||
|
|
ff1722bbd1 | ||
|
|
dc281119af | ||
|
|
b848db8f2b | ||
|
|
bc261fddf2 | ||
|
|
bb251d5046 | ||
|
|
ec9481392a | ||
|
|
8961b0462e | ||
|
|
ffb80c5fc3 | ||
|
|
4835366a0c | ||
|
|
c09855f703 | ||
|
|
5761b05f3b | ||
|
|
ff1ea1a63e | ||
|
|
0ea6363172 | ||
|
|
fe5375f17b | ||
|
|
fd43ebc2bf | ||
|
|
113e7a52d4 | ||
|
|
9f6b4ed93b | ||
|
|
7aff15f743 | ||
|
|
1c4435312e | ||
|
|
dcb32eaaf7 | ||
|
|
ba07fd510e | ||
|
|
2fb5f8a7d0 | ||
|
|
b119121e10 | ||
|
|
cb101eca16 | ||
|
|
29674b8ed0 | ||
|
|
c6a0eeef3f | ||
|
|
825efb512a | ||
|
|
cd870bdcdd | ||
|
|
0868d898f6 | ||
|
|
66a78f0d40 | ||
|
|
2b04e1428c | ||
|
|
57f444357d | ||
|
|
fe08c241ec | ||
|
|
3d8a99f541 | ||
|
|
03236a50e5 | ||
|
|
a6c1649493 | ||
|
|
33a330fd6c | ||
|
|
3901e404a9 | ||
|
|
381e8bcfaa | ||
|
|
1050978246 | ||
|
|
9e48e58221 | ||
|
|
5d313b509e | ||
|
|
022b2202f6 | ||
|
|
2e9a1cfbba | ||
|
|
27a1144217 | ||
|
|
1d84c8f9ce | ||
|
|
04421d84a3 | ||
|
|
92cd8648fa | ||
|
|
41cc9a3e80 | ||
|
|
b97c902d10 | ||
|
|
25309f21ee | ||
|
|
1ba8da9780 | ||
|
|
66bc335d3f | ||
|
|
e834f375fb | ||
|
|
9c41972b65 | ||
|
|
41c30e9cfd | ||
|
|
fd8cbf0c7d | ||
|
|
4fe90dcbd6 | ||
|
|
377b086882 | ||
|
|
ade2ef3a15 | ||
|
|
6f428eb316 | ||
|
|
2970ad662c | ||
|
|
1dfe49e765 | ||
|
|
7a93a72710 | ||
|
|
661c8260e5 | ||
|
|
13c2da07e8 | ||
|
|
71b214ca43 | ||
|
|
513942c888 | ||
|
|
67df04e0a2 | ||
|
|
7b45f0d899 | ||
|
|
0f661e467e | ||
|
|
0232d8027c | ||
|
|
3a0377851d | ||
|
|
a93bad4cf3 | ||
|
|
88c3828ad3 | ||
|
|
c19853f03f | ||
|
|
1b7e103ef6 | ||
|
|
900d59b3ac | ||
|
|
37a9256587 | ||
|
|
127b5501d9 | ||
|
|
a081a9f5c4 | ||
|
|
fe8c2ceab9 | ||
|
|
1be5b4787f | ||
|
|
bb88d6f828 | ||
|
|
a7030641f3 | ||
|
|
208de3dae9 | ||
|
|
e81db118d5 | ||
|
|
488d8e5fc2 | ||
|
|
0049b36471 | ||
|
|
66b4b70023 | ||
|
|
438ae6a761 | ||
|
|
0cfb440cf6 | ||
|
|
a613280bbd | ||
|
|
6112c81db7 | ||
|
|
3ec4bf52e1 | ||
|
|
ec44a4391a | ||
|
|
3f26a4fd21 | ||
|
|
bb5b003dd5 | ||
|
|
99777c8e82 | ||
|
|
4daaa1f089 | ||
|
|
0828cc3f83 | ||
|
|
790835f6c6 | ||
|
|
fe3784454a | ||
|
|
b336bf2fcb | ||
|
|
87be37293e | ||
|
|
7abcfc0390 | ||
|
|
020e2069cb | ||
|
|
83dd07469e | ||
|
|
0a883ab651 | ||
|
|
7b39b93bb2 | ||
|
|
30496c79ab |
73
.clang-format
Normal file
73
.clang-format
Normal file
@@ -0,0 +1,73 @@
|
||||
---
|
||||
# This file is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Generated from CLion C/C++ Code Style settings
|
||||
BasedOnStyle: LLVM
|
||||
AccessModifierOffset: -2
|
||||
AlignAfterOpenBracket: DontAlign
|
||||
AlignConsecutiveAssignments: Consecutive
|
||||
AlignOperands: Align
|
||||
AllowAllArgumentsOnNextLine: false
|
||||
AllowAllConstructorInitializersOnNextLine: false
|
||||
AllowAllParametersOfDeclarationOnNextLine: false
|
||||
AllowShortBlocksOnASingleLine: Always
|
||||
AllowShortCaseLabelsOnASingleLine: false
|
||||
AllowShortFunctionsOnASingleLine: All
|
||||
AllowShortIfStatementsOnASingleLine: WithoutElse
|
||||
AllowShortLambdasOnASingleLine: All
|
||||
AllowShortLoopsOnASingleLine: true
|
||||
AlwaysBreakAfterReturnType: None
|
||||
AlwaysBreakTemplateDeclarations: Yes
|
||||
BreakBeforeBraces: Custom
|
||||
BraceWrapping:
|
||||
AfterCaseLabel: false
|
||||
AfterClass: false
|
||||
AfterControlStatement: Never
|
||||
AfterEnum: false
|
||||
AfterFunction: false
|
||||
AfterNamespace: false
|
||||
AfterObjCDeclaration: false
|
||||
AfterUnion: false
|
||||
BeforeCatch: true
|
||||
BeforeElse: true
|
||||
IndentBraces: false
|
||||
SplitEmptyFunction: false
|
||||
SplitEmptyRecord: true
|
||||
BreakBeforeBinaryOperators: None
|
||||
BreakBeforeTernaryOperators: false
|
||||
BreakConstructorInitializers: BeforeColon
|
||||
BreakInheritanceList: BeforeColon
|
||||
ColumnLimit: 0
|
||||
CompactNamespaces: false
|
||||
ContinuationIndentWidth: 2
|
||||
IndentCaseLabels: false
|
||||
IndentPPDirectives: None
|
||||
IndentWidth: 2
|
||||
KeepEmptyLinesAtTheStartOfBlocks: true
|
||||
MaxEmptyLinesToKeep: 2
|
||||
NamespaceIndentation: None
|
||||
ObjCSpaceAfterProperty: false
|
||||
ObjCSpaceBeforeProtocolList: true
|
||||
PointerAlignment: Right
|
||||
ReflowComments: false
|
||||
SpaceAfterCStyleCast: false
|
||||
SpaceAfterLogicalNot: false
|
||||
SpaceAfterTemplateKeyword: false
|
||||
SpaceBeforeAssignmentOperators: true
|
||||
SpaceBeforeCpp11BracedList: true
|
||||
SpaceBeforeCtorInitializerColon: true
|
||||
SpaceBeforeInheritanceColon: true
|
||||
SpaceBeforeParens: Never
|
||||
SpaceBeforeRangeBasedForLoopColon: true
|
||||
SpaceInEmptyParentheses: false
|
||||
SpacesBeforeTrailingComments: 1
|
||||
SpacesInAngles: Never
|
||||
SpacesInCStyleCastParentheses: false
|
||||
SpacesInContainerLiterals: false
|
||||
SpacesInParentheses: false
|
||||
SpacesInSquareBrackets: false
|
||||
TabWidth: 2
|
||||
Cpp11BracedListStyle: false
|
||||
UseTab: Never
|
||||
18
.dockerignore
Normal file
18
.dockerignore
Normal file
@@ -0,0 +1,18 @@
|
||||
# ignore git files
|
||||
.git*
|
||||
|
||||
# ignore hidden files
|
||||
.*
|
||||
|
||||
# ignore repo directories and files
|
||||
docs/
|
||||
scripts/
|
||||
tools/
|
||||
crowdin.yml
|
||||
|
||||
# ignore dev directories
|
||||
build/
|
||||
venv/
|
||||
|
||||
# ignore artifacts
|
||||
artifacts/
|
||||
7
.flake8
Normal file
7
.flake8
Normal file
@@ -0,0 +1,7 @@
|
||||
[flake8]
|
||||
filename =
|
||||
*.py,
|
||||
*.pys
|
||||
max-line-length = 120
|
||||
extend-exclude =
|
||||
venv/
|
||||
150
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
150
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
---
|
||||
name: Bug Report
|
||||
description: Create a bug report to help us improve.
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: >
|
||||
**THIS IS NOT THE PLACE TO ASK FOR SUPPORT!**
|
||||
Please use our [Support Center](https://app.lizardbyte.dev/support) for support issues.
|
||||
Non actionable bug reports will be locked and closed!
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is there an existing issue for this?
|
||||
description: Please search to see if an issue already exists for the bug you encountered.
|
||||
options:
|
||||
- label: I have searched the existing issues
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is your issue described in the documentation?
|
||||
description: Please read our [documentation](https://docs.lizardbyte.dev/projects/sunshine)
|
||||
options:
|
||||
- label: I have read the documentation
|
||||
required: true
|
||||
- type: checkboxes
|
||||
attributes:
|
||||
label: Is your issue present in the nightly release?
|
||||
description: Please test the [nightly](https://github.com/LizardByte/Sunshine/releases/tag/nightly-dev) release
|
||||
options:
|
||||
- label: This issue is present in the nightly release
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the Bug
|
||||
description: A clear and concise description of the bug.
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected Behavior
|
||||
description: A clear and concise description of what you expected to happen.
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: Additional Context
|
||||
description: Add any other context about the bug here.
|
||||
- type: dropdown
|
||||
id: os
|
||||
attributes:
|
||||
label: Host Operating System
|
||||
description: What version operating system are you running the software on?
|
||||
options:
|
||||
- Docker
|
||||
- Linux
|
||||
- macOS
|
||||
- Windows
|
||||
- other, n/a
|
||||
- type: input
|
||||
id: os-version
|
||||
attributes:
|
||||
label: Operating System Version
|
||||
description: Provide the version of the operating system. Additionally a build number would be helpful.
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: os-architecture
|
||||
attributes:
|
||||
label: Architecture
|
||||
options:
|
||||
- 32 bit
|
||||
- 64 bit
|
||||
- arm
|
||||
- other, n/a
|
||||
- type: input
|
||||
id: version
|
||||
attributes:
|
||||
label: Sunshine commit or version
|
||||
placeholder: eg. 0.16.0
|
||||
validations:
|
||||
required: true
|
||||
- type: dropdown
|
||||
id: package_type
|
||||
attributes:
|
||||
label: Package
|
||||
description: The package you installed
|
||||
options:
|
||||
- Linux - AppImage
|
||||
- Linux - AUR
|
||||
- Linux - 20.04-deb
|
||||
- Linux - 22.04-deb
|
||||
- Linux - Docker
|
||||
- Linux - flatpak
|
||||
- Linux - rpm
|
||||
- macOS - dmg
|
||||
- macOS - Portfile
|
||||
- macOS - pkg
|
||||
- Windows - installer
|
||||
- Windows - portable
|
||||
- other (not listed)
|
||||
- other (self built)
|
||||
- other (fork of this repo)
|
||||
- type: dropdown
|
||||
id: graphics_type
|
||||
attributes:
|
||||
label: GPU Type
|
||||
description: The type of the installed graphics card.
|
||||
options:
|
||||
- AMD
|
||||
- Intel
|
||||
- Nvidia
|
||||
- none (software encoding)
|
||||
- type: input
|
||||
id: graphics_model
|
||||
attributes:
|
||||
label: GPU Model
|
||||
description: The model of the installed graphics card.
|
||||
placeholder: e.g. GeForce RTX 2080 SUPER
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: graphics_driver
|
||||
attributes:
|
||||
label: GPU Driver/Mesa Version
|
||||
description: The driver/mesa version of the installed graphics card.
|
||||
placeholder: e.g. 497.29
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: capture_method
|
||||
attributes:
|
||||
label: Capture Method (Linux Only)
|
||||
description: If on Linux, the capture method being used.
|
||||
placeholder: e.g. PipeWire/KVM/X11/KMS
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: logs
|
||||
attributes:
|
||||
label: Relevant log output
|
||||
description: |
|
||||
Please copy and paste any relevant log output. This will be automatically formatted into code,
|
||||
so no need for backticks.
|
||||
render: Shell
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Make sure to close your issue when it's solved! If you found the solution yourself please comment
|
||||
so that others benefit from it.
|
||||
13
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
13
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Support Center
|
||||
url: https://app.lizardbyte.dev/support
|
||||
about: Official LizardByte support
|
||||
- name: Feature request
|
||||
url: https://app.lizardbyte.dev/feedback
|
||||
about: Share your suggestions or ideas to help us improve
|
||||
54
.github/dependabot.yml
vendored
Normal file
54
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "docker"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "08:00"
|
||||
target-branch: "nightly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "08:30"
|
||||
target-branch: "nightly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "npm"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "09:00"
|
||||
target-branch: "nightly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "nuget"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "09:30"
|
||||
target-branch: "nightly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "10:00"
|
||||
target-branch: "nightly"
|
||||
open-pull-requests-limit: 10
|
||||
|
||||
- package-ecosystem: "gitsubmodule"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
time: "10:30"
|
||||
target-branch: "nightly"
|
||||
open-pull-requests-limit: 10
|
||||
49
.github/label-actions.yml
vendored
Normal file
49
.github/label-actions.yml
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Configuration for Label Actions - https://github.com/dessant/label-actions
|
||||
|
||||
added:
|
||||
comment: >
|
||||
This feature has been added and will be available in the next release.
|
||||
fixed:
|
||||
comment: >
|
||||
This issue has been fixed and will be available in the next release.
|
||||
invalid:duplicate:
|
||||
comment: >
|
||||
:wave: @{issue-author}, this appears to be a duplicate of a pre-existing issue.
|
||||
close: true
|
||||
lock: true
|
||||
unlabel: 'status:awaiting-triage'
|
||||
|
||||
-invalid:duplicate:
|
||||
reopen: true
|
||||
unlock: true
|
||||
|
||||
invalid:support:
|
||||
comment: >
|
||||
:wave: @{issue-author}, we use the issue tracker exclusively for bug reports.
|
||||
However, this issue appears to be a support request. Please use our
|
||||
[Support Center](https://app.lizardbyte.dev/support) for support issues. Thanks.
|
||||
close: true
|
||||
lock: true
|
||||
lock-reason: 'off-topic'
|
||||
unlabel: 'status:awaiting-triage'
|
||||
|
||||
-invalid:support:
|
||||
reopen: true
|
||||
unlock: true
|
||||
|
||||
invalid:template-incomplete:
|
||||
issues:
|
||||
comment: >
|
||||
:wave: @{issue-author}, please edit your issue to complete the template with
|
||||
all the required info. Your issue will be automatically closed in 5 days if
|
||||
the template is not completed. Thanks.
|
||||
prs:
|
||||
comment: >
|
||||
:wave: @{issue-author}, please edit your PR to complete the template with
|
||||
all the required info. Your PR will be automatically closed in 5 days if
|
||||
the template is not completed. Thanks.
|
||||
28
.github/pr_release_template.md
vendored
Normal file
28
.github/pr_release_template.md
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
## Description
|
||||
<!--- Please include a summary of the changes. --->
|
||||
This PR was created automatically.
|
||||
|
||||
|
||||
### Screenshot
|
||||
<!--- Include screenshots if the changes are UI-related. --->
|
||||
|
||||
|
||||
### Issues Fixed or Closed
|
||||
<!--- Close issue example: `- Closes #1` --->
|
||||
<!--- Fix bug issue example: `- Fixes #2` --->
|
||||
<!--- Resolve issue example: `- Resolves #3` --->
|
||||
|
||||
|
||||
## Type of Change
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Dependency update (updates to dependencies)
|
||||
- [ ] Documentation update (changes to documentation)
|
||||
- [ ] Repository update (changes to repository files, e.g. `.github/...`)
|
||||
|
||||
## Branch Updates
|
||||
- [x] I want maintainers to keep my branch updated
|
||||
|
||||
## Changelog Summary
|
||||
<!--- Summarize all the changes in a bulleted list. --->
|
||||
954
.github/workflows/CI.yml
vendored
Normal file
954
.github/workflows/CI.yml
vendored
Normal file
@@ -0,0 +1,954 @@
|
||||
---
|
||||
name: CI
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [master, nightly]
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches: [master, nightly]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
github_env:
|
||||
name: GitHub Env Debug
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Dump github context
|
||||
run: echo "$GITHUB_CONTEXT"
|
||||
shell: bash
|
||||
env:
|
||||
GITHUB_CONTEXT: ${{ toJson(github) }}
|
||||
|
||||
check_changelog:
|
||||
name: Check Changelog
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
if: ${{ github.ref == 'refs/heads/master' || github.base_ref == 'master' }}
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Verify Changelog
|
||||
id: verify_changelog
|
||||
if: ${{ github.ref == 'refs/heads/master' || github.base_ref == 'master' }}
|
||||
# base_ref for pull request check, ref for push
|
||||
uses: LizardByte/.github/actions/verify_changelog@master
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
outputs:
|
||||
next_version: ${{ steps.verify_changelog.outputs.changelog_parser_version }}
|
||||
next_version_bare: ${{ steps.verify_changelog.outputs.changelog_parser_version_bare }}
|
||||
last_version: ${{ steps.verify_changelog.outputs.latest_release_tag_name }}
|
||||
release_body: ${{ steps.verify_changelog.outputs.changelog_parser_description }}
|
||||
|
||||
check_versions:
|
||||
name: Check Versions
|
||||
runs-on: ubuntu-latest
|
||||
needs: check_changelog
|
||||
if: ${{ github.ref == 'refs/heads/master' || github.base_ref == 'master' }}
|
||||
# base_ref for pull request check, ref for push
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Check CMakeLists.txt Version
|
||||
run: |
|
||||
version=$(grep -o -E '^project\(Sunshine VERSION [0-9]+\.[0-9]+\.[0-9]+' CMakeLists.txt | \
|
||||
grep -o -E '[0-9]+\.[0-9]+\.[0-9]+')
|
||||
echo "cmakelists_version=${version}" >> $GITHUB_ENV
|
||||
|
||||
- name: Compare CMakeList.txt Version
|
||||
if: ${{ env.cmakelists_version != needs.check_changelog.outputs.next_version_bare }}
|
||||
run: |
|
||||
echo CMakeLists version: "$cmakelists_version"
|
||||
echo Changelog version: "${{ needs.check_changelog.outputs.next_version_bare }}"
|
||||
echo Within 'CMakeLists.txt' change "project(Sunshine [VERSION $cmakelists_version]" to \
|
||||
"project(Sunshine [VERSION ${{ needs.check_changelog.outputs.next_version_bare }}]"
|
||||
exit 1
|
||||
|
||||
setup_release:
|
||||
name: Setup Release
|
||||
needs: check_changelog
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set release details
|
||||
id: release_details
|
||||
env:
|
||||
RELEASE_BODY: ${{ needs.check_changelog.outputs.release_body }}
|
||||
run: |
|
||||
# determine to create a release or not
|
||||
if [[ $GITHUB_EVENT_NAME == "push" ]]; then
|
||||
RELEASE=true
|
||||
else
|
||||
RELEASE=false
|
||||
fi
|
||||
|
||||
# set the release tag
|
||||
COMMIT=${{ github.sha }}
|
||||
if [[ $GITHUB_REF == refs/heads/master ]]; then
|
||||
TAG="${{ needs.check_changelog.outputs.next_version }}"
|
||||
RELEASE_NAME="${{ needs.check_changelog.outputs.next_version }}"
|
||||
RELEASE_BODY="$RELEASE_BODY"
|
||||
PRE_RELEASE="false"
|
||||
elif [[ $GITHUB_REF == refs/heads/nightly ]]; then
|
||||
TAG="nightly-dev"
|
||||
RELEASE_NAME="nightly"
|
||||
RELEASE_BODY="automated nightly release - $(date -u +'%Y-%m-%dT%H:%M:%SZ') - ${COMMIT}"
|
||||
PRE_RELEASE="true"
|
||||
fi
|
||||
|
||||
echo "create_release=${RELEASE}" >> $GITHUB_OUTPUT
|
||||
echo "release_tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "release_commit=${COMMIT}" >> $GITHUB_OUTPUT
|
||||
echo "release_name=${RELEASE_NAME}" >> $GITHUB_OUTPUT
|
||||
echo "pre_release=${PRE_RELEASE}" >> $GITHUB_OUTPUT
|
||||
|
||||
# this is stupid but works for multiline strings
|
||||
echo "RELEASE_BODY<<EOF" >> $GITHUB_ENV
|
||||
echo "$RELEASE_BODY" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
outputs:
|
||||
create_release: ${{ steps.release_details.outputs.create_release }}
|
||||
release_tag: ${{ steps.release_details.outputs.release_tag }}
|
||||
release_commit: ${{ steps.release_details.outputs.release_commit }}
|
||||
release_name: ${{ steps.release_details.outputs.release_name }}
|
||||
release_body: ${{ env.RELEASE_BODY }}
|
||||
pre_release: ${{ steps.release_details.outputs.pre_release }}
|
||||
|
||||
setup_flatpak_matrix:
|
||||
name: Setup Flatpak Matrix
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set release details
|
||||
id: flatpak_matrix
|
||||
# https://www.cynkra.com/blog/2020-12-23-dynamic-gha
|
||||
run: |
|
||||
# determine which architectures to build
|
||||
if [[ $GITHUB_EVENT_NAME == "push" ]]; then
|
||||
matrix=$((
|
||||
echo '{ "arch" : ["x86_64", "aarch64"] }'
|
||||
) | jq -c .)
|
||||
else
|
||||
matrix=$((
|
||||
echo '{ "arch" : ["x86_64"] }'
|
||||
) | jq -c .)
|
||||
fi
|
||||
|
||||
echo $matrix
|
||||
echo $matrix | jq .
|
||||
echo "matrix=$matrix" >> $GITHUB_OUTPUT
|
||||
|
||||
outputs:
|
||||
matrix: ${{ steps.flatpak_matrix.outputs.matrix }}
|
||||
|
||||
build_linux_aur:
|
||||
name: Linux AUR
|
||||
runs-on: ubuntu-latest
|
||||
needs: setup_release
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Dependencies Linux AUR
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y \
|
||||
cmake
|
||||
|
||||
- name: Configure PKGBUILD files
|
||||
id: prepare
|
||||
run: |
|
||||
# variables for manifest
|
||||
aur_publish=false
|
||||
aur_pkg=sunshine-dev
|
||||
sub_version=""
|
||||
conflicts="'sunshine'"
|
||||
provides="'sunshine'"
|
||||
|
||||
branch=${GITHUB_HEAD_REF}
|
||||
|
||||
# check the branch variable
|
||||
if [ -z "$branch" ]; then
|
||||
echo "This is a PUSH event"
|
||||
commit=${{ github.sha }}
|
||||
clone_url=${{ github.event.repository.clone_url }}
|
||||
|
||||
if [[ ${{ github.ref == 'refs/heads/master' }} == true ]]; then
|
||||
echo "This is a main release event"
|
||||
aur_publish=true
|
||||
aur_pkg=sunshine
|
||||
conflicts=""
|
||||
provides=""
|
||||
elif [[ ${{ github.ref == 'refs/heads/nightly' }} == true ]]; then
|
||||
echo "This is a nightly release event"
|
||||
sub_version=".r${commit}"
|
||||
fi
|
||||
else
|
||||
echo "This is a PR event"
|
||||
commit=${{ github.event.pull_request.head.sha }}
|
||||
clone_url=${{ github.event.pull_request.head.repo.clone_url }}
|
||||
|
||||
sub_version=".r${commit}"
|
||||
fi
|
||||
echo "Commit: ${commit}"
|
||||
echo "Clone URL: ${clone_url}"
|
||||
|
||||
echo "aur_publish=${aur_publish}" >> $GITHUB_OUTPUT
|
||||
echo "aur_pkg=${aur_pkg}" >> $GITHUB_OUTPUT
|
||||
|
||||
mkdir -p artifacts
|
||||
mkdir -p build
|
||||
|
||||
cd build
|
||||
cmake -DSUNSHINE_CONFIGURE_AUR=ON \
|
||||
-DSUNSHINE_AUR_PKG=${aur_pkg} \
|
||||
-DSUNSHINE_SUB_VERSION=${sub_version} \
|
||||
-DSUNSHINE_AUR_CONFLICTS=${conflicts} \
|
||||
-DSUNSHINE_AUR_PROVIDES=${provides} \
|
||||
-DGITHUB_CLONE_URL=${clone_url} \
|
||||
-DGITHUB_COMMIT=${commit} \
|
||||
-DSUNSHINE_CONFIGURE_ONLY=ON \
|
||||
..
|
||||
cd ..
|
||||
|
||||
mv ./build/PKGBUILD ./artifacts/
|
||||
|
||||
- name: Validate package
|
||||
uses: LizardByte/archlinux-package-action@master
|
||||
with:
|
||||
path: artifacts
|
||||
flags: '--syncdeps --noconfirm'
|
||||
namcap: true
|
||||
srcinfo: true
|
||||
aur: true # workaround mirror problem
|
||||
|
||||
- name: Upload Artifacts
|
||||
if: ${{ always() }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sunshine-linux-aur
|
||||
path: artifacts/
|
||||
|
||||
- name: Publish AUR package
|
||||
if: ${{ steps.prepare.outputs.aur_publish == 'true' }}
|
||||
uses: KSXGitHub/github-actions-deploy-aur@v2.6.0
|
||||
with:
|
||||
pkgname: ${{ steps.prepare.outputs.aur_pkg }}
|
||||
pkgbuild: ./artifacts/PKGBUILD
|
||||
assets: |
|
||||
./artifacts/*
|
||||
commit_username: ${{ secrets.AUR_USERNAME }}
|
||||
commit_email: ${{ secrets.AUR_EMAIL }}
|
||||
ssh_private_key: ${{ secrets.AUR_SSH_PRIVATE_KEY }}
|
||||
commit_message: Automatic update from GitHub ${{ github.repository }} per ${{ github.ref }}
|
||||
allow_empty_commits: false
|
||||
|
||||
build_linux_flatpak:
|
||||
name: Linux Flatpak
|
||||
runs-on: ubuntu-22.04
|
||||
needs: [setup_release, setup_flatpak_matrix]
|
||||
strategy:
|
||||
fail-fast: false # false to test all, true to fail entire job if any fail
|
||||
matrix: ${{fromJson(needs.setup_flatpak_matrix.outputs.matrix)}}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup Dependencies Linux Flatpak
|
||||
run: |
|
||||
PLATFORM_VERSION=21.08
|
||||
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y \
|
||||
cmake \
|
||||
flatpak \
|
||||
qemu-user-static
|
||||
sudo su $(whoami) -c "flatpak --user remote-add --if-not-exists flathub \
|
||||
https://flathub.org/repo/flathub.flatpakrepo"
|
||||
sudo su $(whoami) -c "flatpak --user install -y flathub \
|
||||
org.flatpak.Builder \
|
||||
org.freedesktop.Platform/${{ matrix.arch }}/${PLATFORM_VERSION} \
|
||||
org.freedesktop.Sdk/${{ matrix.arch }}/${PLATFORM_VERSION} \
|
||||
org.freedesktop.Sdk.Extension.node18/${{ matrix.arch }}/${PLATFORM_VERSION} \
|
||||
"
|
||||
|
||||
- name: Cache Flatpak build
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ./build/.flatpak-builder
|
||||
key: flatpak-${{ matrix.arch }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
flatpak-${{ matrix.arch }}-
|
||||
|
||||
- name: Configure Flatpak Manifest
|
||||
run: |
|
||||
# variables for manifest
|
||||
branch=${GITHUB_HEAD_REF}
|
||||
|
||||
# check the branch variable
|
||||
if [ -z "$branch" ]
|
||||
then
|
||||
echo "This is a PUSH event"
|
||||
branch=${{ github.ref_name }}
|
||||
commit=${{ github.sha }}
|
||||
clone_url=${{ github.event.repository.clone_url }}
|
||||
else
|
||||
echo "This is a PR event"
|
||||
commit=${{ github.event.pull_request.head.sha }}
|
||||
clone_url=${{ github.event.pull_request.head.repo.clone_url }}
|
||||
fi
|
||||
echo "Branch: ${branch}"
|
||||
echo "Commit: ${commit}"
|
||||
echo "Clone URL: ${clone_url}"
|
||||
|
||||
mkdir -p build
|
||||
mkdir -p artifacts
|
||||
|
||||
cd build
|
||||
cmake -DGITHUB_CLONE_URL=${clone_url} \
|
||||
-DGITHUB_BRANCH=${branch} \
|
||||
-DGITHUB_COMMIT=${commit} \
|
||||
-DSUNSHINE_CONFIGURE_FLATPAK_MAN=ON \
|
||||
-DSUNSHINE_CONFIGURE_ONLY=ON \
|
||||
..
|
||||
|
||||
- name: Build Linux Flatpak
|
||||
working-directory: build
|
||||
run: |
|
||||
sudo su $(whoami) -c 'flatpak run org.flatpak.Builder --arch=${{ matrix.arch }} --repo=repo --force-clean \
|
||||
--stop-at=cuda build-sunshine dev.lizardbyte.sunshine.yml'
|
||||
cp -r .flatpak-builder copy-of-flatpak-builder
|
||||
sudo su $(whoami) -c 'flatpak run org.flatpak.Builder --arch=${{ matrix.arch }} --repo=repo --force-clean \
|
||||
build-sunshine dev.lizardbyte.sunshine.yml'
|
||||
rm -rf .flatpak-builder
|
||||
mv copy-of-flatpak-builder .flatpak-builder
|
||||
sudo su $(whoami) -c 'flatpak build-bundle --arch=${{ matrix.arch }} ./repo \
|
||||
../artifacts/sunshine_${{ matrix.arch }}.flatpak dev.lizardbyte.sunshine'
|
||||
sudo su $(whoami) -c 'flatpak build-bundle --runtime --arch=${{ matrix.arch }} ./repo \
|
||||
../artifacts/sunshine_debug_${{ matrix.arch }}.flatpak dev.lizardbyte.sunshine.Debug'
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sunshine-linux-flatpak-${{ matrix.arch }}
|
||||
path: artifacts/
|
||||
|
||||
- name: Create/Update GitHub Release
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
name: ${{ needs.setup_release.outputs.release_name }}
|
||||
tag: ${{ needs.setup_release.outputs.release_tag }}
|
||||
commit: ${{ needs.setup_release.outputs.release_commit }}
|
||||
artifacts: "*artifacts/*"
|
||||
token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
allowUpdates: true
|
||||
body: ${{ needs.setup_release.outputs.release_body }}
|
||||
discussionCategory: announcements
|
||||
prerelease: ${{ needs.setup_release.outputs.pre_release }}
|
||||
|
||||
build_linux:
|
||||
name: Linux
|
||||
runs-on: ubuntu-${{ matrix.dist }}
|
||||
needs: [check_changelog, setup_release]
|
||||
strategy:
|
||||
fail-fast: false # false to test all, true to fail entire job if any fail
|
||||
matrix:
|
||||
include: # package these differently
|
||||
- type: appimage
|
||||
EXTRA_ARGS: '-DSUNSHINE_CONFIGURE_APPIMAGE=ON'
|
||||
dist: 20.04
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup Dependencies Linux
|
||||
run: |
|
||||
sudo add-apt-repository ppa:ubuntu-toolchain-r/test -y
|
||||
|
||||
if [[ ${{ matrix.dist }} == "18.04" ]]; then
|
||||
# Ubuntu 18.04 packages
|
||||
sudo add-apt-repository ppa:savoury1/boost-defaults-1.71 -y
|
||||
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y \
|
||||
libboost-filesystem1.71-dev \
|
||||
libboost-log1.71-dev \
|
||||
libboost-regex1.71-dev \
|
||||
libboost-thread1.71-dev \
|
||||
libboost-program-options1.71-dev
|
||||
|
||||
# Install cmake
|
||||
wget https://cmake.org/files/v3.22/cmake-3.22.2-linux-x86_64.sh
|
||||
chmod +x cmake-3.22.2-linux-x86_64.sh
|
||||
mkdir /opt/cmake
|
||||
./cmake-3.22.2-linux-x86_64.sh --prefix=/opt/cmake --skip-license
|
||||
ln --force --symbolic /opt/cmake/bin/cmake /usr/local/bin/cmake
|
||||
cmake --version
|
||||
|
||||
# install newer tar from focal... appimagelint fails on 18.04 without this
|
||||
echo "original tar version"
|
||||
tar --version
|
||||
wget -O tar.deb http://security.ubuntu.com/ubuntu/pool/main/t/tar/tar_1.30+dfsg-7ubuntu0.20.04.2_amd64.deb
|
||||
sudo apt-get -y install -f ./tar.deb
|
||||
echo "new tar version"
|
||||
tar --version
|
||||
else
|
||||
# Ubuntu 20.04+ packages
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y \
|
||||
cmake \
|
||||
libboost-filesystem-dev \
|
||||
libboost-log-dev \
|
||||
libboost-thread-dev \
|
||||
libboost-program-options-dev
|
||||
fi
|
||||
|
||||
sudo apt-get install -y \
|
||||
build-essential \
|
||||
gcc-10 \
|
||||
g++-10 \
|
||||
libavdevice-dev \
|
||||
libcap-dev \
|
||||
libcurl4-openssl-dev \
|
||||
libdrm-dev \
|
||||
libevdev-dev \
|
||||
libmfx-dev \
|
||||
libnuma-dev \
|
||||
libopus-dev \
|
||||
libpulse-dev \
|
||||
libssl-dev \
|
||||
libva-dev \
|
||||
libvdpau-dev \
|
||||
libwayland-dev \
|
||||
libx11-dev \
|
||||
libxcb-shm0-dev \
|
||||
libxcb-xfixes0-dev \
|
||||
libxcb1-dev \
|
||||
libxfixes-dev \
|
||||
libxrandr-dev \
|
||||
libxtst-dev \
|
||||
wget
|
||||
|
||||
# clean apt cache
|
||||
sudo apt-get clean
|
||||
sudo rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Update gcc alias
|
||||
# https://stackoverflow.com/a/70653945/11214013
|
||||
sudo update-alternatives --install \
|
||||
/usr/bin/gcc gcc /usr/bin/gcc-10 100 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-10 \
|
||||
--slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
|
||||
|
||||
# Install CUDA
|
||||
sudo wget \
|
||||
https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run \
|
||||
--progress=bar:force:noscroll -q --show-progress -O /root/cuda.run
|
||||
sudo chmod a+x /root/cuda.run
|
||||
sudo /root/cuda.run --silent --toolkit --toolkitpath=/usr --no-opengl-libs --no-man-page --no-drm
|
||||
sudo rm /root/cuda.run
|
||||
|
||||
- name: Build Linux
|
||||
run: |
|
||||
mkdir -p build
|
||||
mkdir -p artifacts
|
||||
|
||||
npm install
|
||||
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=share/sunshine \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-DSUNSHINE_ENABLE_WAYLAND=ON \
|
||||
-DSUNSHINE_ENABLE_X11=ON \
|
||||
-DSUNSHINE_ENABLE_DRM=ON \
|
||||
-DSUNSHINE_ENABLE_CUDA=ON \
|
||||
${{ matrix.EXTRA_ARGS }} \
|
||||
..
|
||||
make -j ${nproc}
|
||||
|
||||
- name: Package Linux - CPACK
|
||||
if: ${{ matrix.type == 'cpack' }}
|
||||
working-directory: build
|
||||
run: |
|
||||
cpack -G DEB
|
||||
mv ./cpack_artifacts/Sunshine.deb ../artifacts/sunshine-${{ matrix.dist }}.deb
|
||||
|
||||
if [[ ${{ matrix.dist }} == "20.04" ]]; then
|
||||
cpack -G RPM
|
||||
mv ./cpack_artifacts/Sunshine.rpm ../artifacts/sunshine.rpm
|
||||
fi
|
||||
|
||||
- name: Set AppImage Version
|
||||
if: ${{ matrix.type == 'appimage' && ( needs.check_changelog.outputs.next_version_bare != needs.check_changelog.outputs.last_version ) }} # yamllint disable-line rule:line-length
|
||||
run: |
|
||||
version=${{ needs.check_changelog.outputs.next_version_bare }}
|
||||
echo "VERSION=${version}" >> $GITHUB_ENV
|
||||
|
||||
- name: Package Linux - AppImage
|
||||
if: ${{ matrix.type == 'appimage' }}
|
||||
working-directory: build
|
||||
run: |
|
||||
# install sunshine to the DESTDIR
|
||||
make install DESTDIR=AppDir
|
||||
|
||||
# custom AppRun file
|
||||
cp -f ../packaging/linux/AppImage/AppRun ./AppDir/
|
||||
chmod +x ./AppDir/AppRun
|
||||
|
||||
# variables
|
||||
DESKTOP_FILE="${DESKTOP_FILE:-sunshine.desktop}"
|
||||
ICON_FILE="${ICON_FILE:-sunshine.png}"
|
||||
|
||||
# AppImage
|
||||
# https://docs.appimage.org/packaging-guide/index.html
|
||||
wget https://github.com/linuxdeploy/linuxdeploy/releases/download/continuous/linuxdeploy-x86_64.AppImage
|
||||
chmod +x linuxdeploy-x86_64.AppImage
|
||||
|
||||
./linuxdeploy-x86_64.AppImage \
|
||||
--appdir ./AppDir \
|
||||
--executable ./sunshine \
|
||||
--icon-file "../$ICON_FILE" \
|
||||
--desktop-file "./$DESKTOP_FILE" \
|
||||
--library /usr/lib/x86_64-linux-gnu/libpango-1.0.so.0 \
|
||||
--library /usr/lib/x86_64-linux-gnu/libpangocairo-1.0.so.0 \
|
||||
--library /usr/lib/x86_64-linux-gnu/libpangoft2-1.0.so.0 \
|
||||
--output appimage
|
||||
|
||||
# move
|
||||
mv Sunshine*.AppImage ../artifacts/sunshine.AppImage
|
||||
|
||||
# permissions
|
||||
chmod +x ../artifacts/sunshine.AppImage
|
||||
|
||||
- name: Verify AppImage
|
||||
if: ${{ matrix.type == 'appimage' }}
|
||||
run: |
|
||||
wget https://github.com/TheAssassin/appimagelint/releases/download/continuous/appimagelint-x86_64.AppImage
|
||||
chmod +x appimagelint-x86_64.AppImage
|
||||
|
||||
# rm -rf ~/.cache/appimagelint/
|
||||
|
||||
./appimagelint-x86_64.AppImage ./artifacts/sunshine.AppImage
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sunshine-linux-${{ matrix.type }}-${{ matrix.dist }}
|
||||
path: artifacts/
|
||||
|
||||
- name: Create/Update GitHub Release
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
name: ${{ needs.setup_release.outputs.release_name }}
|
||||
tag: ${{ needs.setup_release.outputs.release_tag }}
|
||||
commit: ${{ needs.setup_release.outputs.release_commit }}
|
||||
artifacts: "*artifacts/*"
|
||||
token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
allowUpdates: true
|
||||
body: ${{ needs.setup_release.outputs.release_body }}
|
||||
discussionCategory: announcements
|
||||
prerelease: ${{ needs.setup_release.outputs.pre_release }}
|
||||
|
||||
build_mac:
|
||||
name: MacOS
|
||||
runs-on: macos-11
|
||||
needs: setup_release
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup Dependencies MacOS
|
||||
run: |
|
||||
# install dependencies using homebrew
|
||||
brew install boost cmake curl node opus
|
||||
|
||||
# fix openssl header not found
|
||||
ln -sf /usr/local/opt/openssl/include/openssl /usr/local/include/openssl
|
||||
|
||||
- name: Build MacOS
|
||||
run: |
|
||||
npm install
|
||||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=local/sunshine/assets \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
..
|
||||
make -j ${nproc}
|
||||
|
||||
- name: Package MacOS
|
||||
run: |
|
||||
mkdir -p artifacts
|
||||
cd build
|
||||
|
||||
# package
|
||||
cpack -G DragNDrop
|
||||
mv ./cpack_artifacts/Sunshine.dmg ../artifacts/sunshine-macos-experimental-dragndrop.dmg
|
||||
|
||||
cpack -G Bundle
|
||||
mv ./cpack_artifacts/Sunshine.dmg ../artifacts/sunshine-macos-experimental-bundle.dmg
|
||||
|
||||
cpack -G ZIP
|
||||
mv ./cpack_artifacts/Sunshine.zip ../artifacts/sunshine-macos-experimental-archive.zip
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sunshine-macos
|
||||
path: artifacts/
|
||||
|
||||
# this step can be removed after packages are fixed
|
||||
- name: Delete experimental packages
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
|
||||
working-directory: artifacts
|
||||
run: |
|
||||
rm -f ./sunshine-macos-experimental-dragndrop.dmg
|
||||
rm -f ./sunshine-macos-experimental-bundle.dmg
|
||||
rm -f ./sunshine-macos-experimental-archive.zip
|
||||
|
||||
# # no artifacts to release currently
|
||||
# - name: Create/Update GitHub Release
|
||||
# if: ${{ needs.setup_release.outputs.create_release == 'true' }}
|
||||
# uses: ncipollo/release-action@v1
|
||||
# with:
|
||||
# name: ${{ needs.setup_release.outputs.release_name }}
|
||||
# tag: ${{ needs.setup_release.outputs.release_tag }}
|
||||
# commit: ${{ needs.setup_release.outputs.release_commit }}
|
||||
# artifacts: "*artifacts/*"
|
||||
# token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
# allowUpdates: true
|
||||
# body: ${{ needs.setup_release.outputs.release_body }}
|
||||
# discussionCategory: announcements
|
||||
# prerelease: ${{ needs.setup_release.outputs.pre_release }}
|
||||
|
||||
build_mac_port:
|
||||
name: Macports
|
||||
needs: setup_release
|
||||
runs-on: macos-11
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Checkout ports
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: macports/macports-ports
|
||||
fetch-depth: 64
|
||||
path: ports
|
||||
|
||||
- name: Checkout mpbb
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
repository: macports/mpbb
|
||||
path: mpbb
|
||||
|
||||
- name: Setup Dependencies Macports
|
||||
run: |
|
||||
# install dependencies using homebrew
|
||||
brew install cmake
|
||||
|
||||
- name: Configure Portfile
|
||||
run: |
|
||||
# variables for Portfile
|
||||
branch=${GITHUB_HEAD_REF}
|
||||
|
||||
# check the branch variable
|
||||
if [ -z "$branch" ]
|
||||
then
|
||||
echo "This is a PUSH event"
|
||||
commit=${{ github.sha }}
|
||||
clone_url=${{ github.event.repository.clone_url }}
|
||||
else
|
||||
echo "This is a PR event"
|
||||
commit=${{ github.event.pull_request.head.sha }}
|
||||
clone_url=${{ github.event.pull_request.head.repo.clone_url }}
|
||||
fi
|
||||
echo "Commit: ${commit}"
|
||||
echo "Clone URL: ${clone_url}"
|
||||
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DGITHUB_COMMIT=${commit} \
|
||||
-DGITHUB_CLONE_URL=${clone_url} \
|
||||
-DSUNSHINE_CONFIGURE_PORTFILE=ON \
|
||||
-DSUNSHINE_CONFIGURE_ONLY=ON \
|
||||
..
|
||||
cd ..
|
||||
|
||||
# copy Portfile to artifacts
|
||||
mkdir -p artifacts
|
||||
cp -f ./build/Portfile ./artifacts/
|
||||
|
||||
# copy Portfile to ports
|
||||
mkdir -p ./ports/multimedia/Sunshine
|
||||
cp -f ./build/Portfile ./ports/multimedia/Sunshine/Portfile
|
||||
|
||||
# testing
|
||||
cat ./artifacts/Portfile
|
||||
|
||||
- name: Bootstrap MacPorts
|
||||
run: |
|
||||
. ports/.github/workflows/bootstrap.sh
|
||||
|
||||
# Add getopt, mpbb and the MacPorts paths to $PATH for the subsequent steps.
|
||||
echo "/opt/mports/bin" >> $GITHUB_PATH
|
||||
echo "${PWD}/mpbb" >> $GITHUB_PATH
|
||||
echo "/opt/local/bin" >> $GITHUB_PATH
|
||||
echo "/opt/local/sbin" >> $GITHUB_PATH
|
||||
|
||||
- name: Determine list of subports
|
||||
id: subportlist
|
||||
run: |
|
||||
set -eu
|
||||
port=Sunshine
|
||||
subportlist=""
|
||||
|
||||
echo "Listing subports for Sunshine"
|
||||
new_subports=$(mpbb \
|
||||
--work-dir /tmp/mpbb \
|
||||
list-subports \
|
||||
--archive-site= \
|
||||
--archive-site-private= \
|
||||
--include-deps=no \
|
||||
"$port" \
|
||||
| tr '\n' ' ')
|
||||
for subport in $new_subports; do
|
||||
echo "$subport"
|
||||
subportlist="$subportlist $subport"
|
||||
done
|
||||
echo "subportlist=${subportlist}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Run port lint for all subports
|
||||
run: |
|
||||
set -eu
|
||||
fail=0
|
||||
for subport in $subportlist; do
|
||||
echo "::group::${subport}"
|
||||
path=$(port file "$subport")
|
||||
messagetype="warning"
|
||||
if ! messages=$(port -q lint "$subport" 2>&1); then
|
||||
messagetype="error"
|
||||
fail=1
|
||||
fi
|
||||
if [ -n "$messages" ]; then
|
||||
echo "$messages"
|
||||
# See https://github.com/actions/toolkit/issues/193#issuecomment-605394935
|
||||
encoded_messages="port lint ${subport}:%0A"
|
||||
encoded_messages+="$(echo "${messages}" | sed -E 's/$/%0A/g' | tr -d '\n')"
|
||||
echo "::${messagetype} file=${path#${PWD}/ports/},line=1,col=1::${encoded_messages}"
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
done
|
||||
exit "$fail"
|
||||
env:
|
||||
subportlist: ${{ steps.subportlist.outputs.subportlist }}
|
||||
|
||||
- name: Build subports
|
||||
run: |
|
||||
set -eu
|
||||
fail=0
|
||||
for subport in $subportlist; do
|
||||
workdir="/tmp/mpbb/$subport"
|
||||
mkdir -p "$workdir/logs"
|
||||
touch "$workdir/logs/dependencies-progress.txt"
|
||||
echo "::group::Cleaning up between ports"
|
||||
sudo mpbb --work-dir "$workdir" cleanup
|
||||
echo "::endgroup::"
|
||||
echo "::group::Installing dependencies for ${subport}"
|
||||
sudo mpbb \
|
||||
--work-dir "$workdir" \
|
||||
install-dependencies \
|
||||
"$subport" >"$workdir/logs/install-dependencies.log" 2>&1 &
|
||||
deps_pid=$!
|
||||
tail -f "$workdir/logs/dependencies-progress.txt" 2>/dev/null &
|
||||
tail_pid=$!
|
||||
set +e
|
||||
wait "$deps_pid"
|
||||
deps_exit=$?
|
||||
set -e
|
||||
kill "$tail_pid" || true
|
||||
if [ "$deps_exit" -ne 0 ]; then
|
||||
echo "::endgroup::"
|
||||
echo "::error::Failed to install dependencies for ${subport}"
|
||||
fail=1
|
||||
continue
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
echo "::group::Installing ${subport}"
|
||||
set +e
|
||||
sudo mpbb \
|
||||
--work-dir "$workdir" \
|
||||
install-port \
|
||||
--source \
|
||||
"$subport"
|
||||
install_exit=$?
|
||||
set -e
|
||||
if [ "$install_exit" -ne 0 ]; then
|
||||
echo "::endgroup::"
|
||||
echo "::error::Failed to install ${subport}"
|
||||
fail=1
|
||||
continue
|
||||
fi
|
||||
echo "::endgroup::"
|
||||
done
|
||||
exit "$fail"
|
||||
env:
|
||||
subportlist: ${{ steps.subportlist.outputs.subportlist }}
|
||||
|
||||
- name: Package
|
||||
run: |
|
||||
# create packages
|
||||
sudo port pkg sunshine
|
||||
sudo port dmg sunshine
|
||||
|
||||
work=$(port work sunshine)
|
||||
echo "Sunshine port work directory: ${work}"
|
||||
|
||||
# move components out of port work directory
|
||||
sudo mv ${work}/Sunshine*component.pkg /tmp/
|
||||
|
||||
# copy artifacts
|
||||
sudo mv ${work}/Sunshine*.pkg ./artifacts/sunshine.pkg
|
||||
sudo mv ${work}/Sunshine*.dmg ./artifacts/sunshine.dmg
|
||||
|
||||
# move components back
|
||||
# sudo mv /tmp/Sunshine*component.pkg ${work}/
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sunshine-macports
|
||||
path: artifacts/
|
||||
|
||||
- name: Create/Update GitHub Release
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
name: ${{ needs.setup_release.outputs.release_name }}
|
||||
tag: ${{ needs.setup_release.outputs.release_tag }}
|
||||
commit: ${{ needs.setup_release.outputs.release_commit }}
|
||||
artifacts: "*artifacts/*"
|
||||
token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
allowUpdates: true
|
||||
body: ${{ needs.setup_release.outputs.release_body }}
|
||||
discussionCategory: announcements
|
||||
prerelease: ${{ needs.setup_release.outputs.pre_release }}
|
||||
|
||||
build_win:
|
||||
name: Windows
|
||||
runs-on: windows-2019
|
||||
needs: setup_release
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Setup Dependencies Windows
|
||||
uses: msys2/setup-msys2@v2
|
||||
with:
|
||||
update: true
|
||||
install: >-
|
||||
base-devel
|
||||
diffutils
|
||||
git
|
||||
make
|
||||
mingw-w64-x86_64-binutils
|
||||
mingw-w64-x86_64-boost
|
||||
mingw-w64-x86_64-cmake
|
||||
mingw-w64-x86_64-curl
|
||||
mingw-w64-x86_64-libmfx
|
||||
mingw-w64-x86_64-nsis
|
||||
mingw-w64-x86_64-openssl
|
||||
mingw-w64-x86_64-opus
|
||||
mingw-w64-x86_64-toolchain
|
||||
nasm
|
||||
wget
|
||||
yasm
|
||||
|
||||
- name: Install npm packages
|
||||
run: |
|
||||
npm install
|
||||
|
||||
- name: Build Windows
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DCMAKE_BUILD_TYPE=Release \
|
||||
-DSUNSHINE_ASSETS_DIR=assets \
|
||||
-G "MinGW Makefiles" \
|
||||
..
|
||||
mingw32-make -j$(nproc)
|
||||
|
||||
- name: Package Windows
|
||||
shell: msys2 {0}
|
||||
run: |
|
||||
mkdir -p artifacts
|
||||
cd build
|
||||
|
||||
# package
|
||||
cpack -G NSIS
|
||||
cpack -G ZIP
|
||||
|
||||
# move
|
||||
mv ./cpack_artifacts/Sunshine.exe ../artifacts/sunshine-windows.exe
|
||||
mv ./cpack_artifacts/Sunshine.zip ../artifacts/sunshine-windows.zip
|
||||
|
||||
- name: Upload Artifacts
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: sunshine-windows
|
||||
path: artifacts/
|
||||
|
||||
- name: Create/Update GitHub Release
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
name: ${{ needs.setup_release.outputs.release_name }}
|
||||
tag: ${{ needs.setup_release.outputs.release_tag }}
|
||||
commit: ${{ needs.setup_release.outputs.release_commit }}
|
||||
artifacts: "*artifacts/*"
|
||||
token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
allowUpdates: true
|
||||
body: ${{ needs.setup_release.outputs.release_body }}
|
||||
discussionCategory: announcements
|
||||
prerelease: ${{ needs.setup_release.outputs.pre_release }}
|
||||
|
||||
release-winget:
|
||||
name: Release to WinGet
|
||||
needs: [setup_release, build_win]
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' && github.ref == 'refs/heads/master' }}
|
||||
runs-on: windows-latest # the required action can only be run on Windows
|
||||
steps:
|
||||
- name: Release to WinGet
|
||||
uses: vedantmgoyal2009/winget-releaser@v2
|
||||
with:
|
||||
identifier: LizardByte.Sunshine
|
||||
release-tag: ${{ needs.setup_release.outputs.release_tag }}
|
||||
installers-regex: '\.exe$' # only .exe files
|
||||
token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
35
.github/workflows/auto-create-pr.yml
vendored
Normal file
35
.github/workflows/auto-create-pr.yml
vendored
Normal file
@@ -0,0 +1,35 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# This workflow creates a PR automatically when anything is merged/pushed into the `nightly` branch. The PR is created
|
||||
# against the `master` (default) branch.
|
||||
|
||||
name: Auto create PR
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'nightly'
|
||||
|
||||
jobs:
|
||||
create_pr:
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: repo-sync/pull-request@v2
|
||||
with:
|
||||
source_branch: "" # should be "nightly" as it's the triggering branch
|
||||
destination_branch: "master"
|
||||
pr_title: "Pulling ${{ github.ref_name }} into master"
|
||||
pr_template: ".github/pr_release_template.md"
|
||||
pr_assignee: "${{ secrets.GH_BOT_NAME }}"
|
||||
pr_draft: true
|
||||
pr_allow_empty: false
|
||||
github_token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
64
.github/workflows/automerge.yml
vendored
Normal file
64
.github/workflows/automerge.yml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# This workflow will, first, automatically approve PRs created by @LizardByte-bot. Then it will automerge relevant PRs.
|
||||
|
||||
name: Automerge PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- synchronize
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
autoapprove:
|
||||
if: >-
|
||||
contains(fromJson('["LizardByte-bot"]'), github.event.pull_request.user.login) &&
|
||||
contains(fromJson('["LizardByte-bot"]'), github.actor) &&
|
||||
startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Autoapproving
|
||||
uses: hmarr/auto-approve-action@v3
|
||||
with:
|
||||
github-token: "${{ secrets.GITHUB_TOKEN }}"
|
||||
|
||||
- name: Label autoapproved
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
script: |
|
||||
github.rest.issues.addLabels({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['autoapproved', 'autoupdate']
|
||||
})
|
||||
|
||||
automerge:
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
needs: [autoapprove]
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Automerging
|
||||
uses: pascalgn/automerge-action@v0.15.5
|
||||
env:
|
||||
BASE_BRANCHES: nightly
|
||||
GITHUB_TOKEN: ${{ secrets.GH_BOT_TOKEN }}
|
||||
GITHUB_LOGIN: ${{ secrets.GH_BOT_NAME }}
|
||||
MERGE_LABELS: "!dependencies"
|
||||
MERGE_METHOD: "squash"
|
||||
MERGE_COMMIT_MESSAGE: "{pullRequest.title} (#{pullRequest.number})"
|
||||
MERGE_DELETE_BRANCH: true
|
||||
MERGE_ERROR_FAIL: true
|
||||
MERGE_FILTER_AUTHOR: ${{ secrets.GH_BOT_NAME }}
|
||||
MERGE_RETRIES: "240" # 1 hour
|
||||
MERGE_RETRY_SLEEP: "15000" # 15 seconds
|
||||
72
.github/workflows/autoupdate-labeler.yml
vendored
Normal file
72
.github/workflows/autoupdate-labeler.yml
vendored
Normal file
@@ -0,0 +1,72 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Label PRs with `autoupdate` if various conditions are met, otherwise, remove the label.
|
||||
|
||||
name: Label PR autoupdate
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types:
|
||||
- edited
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
|
||||
jobs:
|
||||
label_pr:
|
||||
if: >-
|
||||
startsWith(github.repository, 'LizardByte/') &&
|
||||
contains(github.event.pull_request.body, fromJSON('"] I want maintainers to keep my branch updated"'))
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
steps:
|
||||
- name: Check if member
|
||||
id: org_member
|
||||
run: |
|
||||
status="true"
|
||||
gh api \
|
||||
-H "Accept: application/vnd.github+json" \
|
||||
/orgs/${{ github.repository_owner }}/members/${{ github.actor }} || status="false"
|
||||
|
||||
echo "result=${status}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Label autoupdate
|
||||
if: >-
|
||||
steps.org_member.outputs.result == 'true' &&
|
||||
contains(github.event.pull_request.labels.*.name, 'autoupdate') == false &&
|
||||
contains(github.event.pull_request.body,
|
||||
fromJSON('"\n- [x] I want maintainers to keep my branch updated"')) == true
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
script: |
|
||||
github.rest.issues.addLabels({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
labels: ['autoupdate']
|
||||
})
|
||||
|
||||
- name: Unlabel autoupdate
|
||||
if: >-
|
||||
contains(github.event.pull_request.labels.*.name, 'autoupdate') &&
|
||||
(
|
||||
(github.event.action == 'synchronize' && steps.org_member.outputs.result == 'false') ||
|
||||
(contains(github.event.pull_request.body,
|
||||
fromJSON('"\n- [x] I want maintainers to keep my branch updated"')) == false
|
||||
)
|
||||
)
|
||||
uses: actions/github-script@v6
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
script: |
|
||||
github.rest.issues.removeLabel({
|
||||
issue_number: context.issue.number,
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
name: ['autoupdate']
|
||||
})
|
||||
51
.github/workflows/autoupdate.yml
vendored
Normal file
51
.github/workflows/autoupdate.yml
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# This workflow is designed to work with the following workflows:
|
||||
# - automerge
|
||||
# - autoupdate-labeler
|
||||
|
||||
# It uses an action that auto-updates pull requests branches, when changes are pushed to their destination branch.
|
||||
# Auto-updating to the latest destination branch works only in the context of upstream repo and not forks.
|
||||
# Dependabot PRs are updated by an action that comments `@depdenabot rebase` on dependabot PRs. (disabled)
|
||||
|
||||
name: autoupdate
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'nightly'
|
||||
|
||||
jobs:
|
||||
autoupdate:
|
||||
name: Autoupdate autoapproved PR created in the upstream
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Update
|
||||
uses: docker://chinthakagodawita/autoupdate-action:v1
|
||||
env:
|
||||
EXCLUDED_LABELS: "central_dependency,dependencies"
|
||||
GITHUB_TOKEN: '${{ secrets.GH_BOT_TOKEN }}'
|
||||
PR_FILTER: "labelled"
|
||||
PR_LABELS: "autoupdate"
|
||||
PR_READY_STATE: "all"
|
||||
MERGE_CONFLICT_ACTION: "fail"
|
||||
|
||||
# Disabled due to:
|
||||
# - no major version tag, resulting in constant nagging to update this action
|
||||
# - additionally, the code is sketchy, 16k+ lines of code?
|
||||
# https://github.com/bbeesley/gha-auto-dependabot-rebase/blob/main/dist/main.cjs
|
||||
#
|
||||
# dependabot-rebase:
|
||||
# name: Dependabot Rebase
|
||||
# if: >-
|
||||
# startsWith(github.repository, 'LizardByte/')
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
# - name: rebase
|
||||
# uses: "bbeesley/gha-auto-dependabot-rebase@v1.3.18"
|
||||
# env:
|
||||
# GITHUB_TOKEN: ${{ secrets.GH_BOT_TOKEN }}
|
||||
390
.github/workflows/ci-docker.yml
vendored
Normal file
390
.github/workflows/ci-docker.yml
vendored
Normal file
@@ -0,0 +1,390 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# This workflow is intended to work with all our organization Docker projects. A readme named `DOCKER_README.md`
|
||||
# will be used to update the description on Docker hub.
|
||||
|
||||
# custom comments in dockerfiles:
|
||||
|
||||
# `# platforms: `
|
||||
# Comma separated list of platforms, i.e. `# platforms: linux/386,linux/amd64`. Docker platforms can alternatively
|
||||
# be listed in a file named `.docker_platforms`.
|
||||
# `# platforms_pr: `
|
||||
# Comma separated list of platforms to run for PR events, i.e. `# platforms_pr: linux/amd64`. This will take
|
||||
# precedence over the `# platforms: ` directive.
|
||||
# `# artifacts: `
|
||||
# `true` to build in two steps, stopping at `artifacts` build stage and extracting the image from there to the
|
||||
# GitHub runner.
|
||||
|
||||
name: CI Docker
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [master, nightly]
|
||||
types: [opened, synchronize, reopened]
|
||||
push:
|
||||
branches: [master, nightly]
|
||||
workflow_dispatch:
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
check_dockerfiles:
|
||||
name: Check Dockerfiles
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Find dockerfiles
|
||||
id: find
|
||||
run: |
|
||||
dockerfiles=$(find . -type f -iname "Dockerfile" -o -iname "*.dockerfile")
|
||||
|
||||
echo "found dockerfiles: ${dockerfiles}"
|
||||
|
||||
# do not quote to keep this as a single line
|
||||
echo dockerfiles=${dockerfiles} >> $GITHUB_OUTPUT
|
||||
|
||||
MATRIX_COMBINATIONS=""
|
||||
for FILE in ${dockerfiles}; do
|
||||
# extract tag from file name
|
||||
tag=$(echo $FILE | sed -r -z -e 's/(\.\/)*.*\/(Dockerfile)/None/gm')
|
||||
if [[ $tag == "None" ]]; then
|
||||
MATRIX_COMBINATIONS="$MATRIX_COMBINATIONS {\"dockerfile\": \"$FILE\"},"
|
||||
else
|
||||
tag=$(echo $FILE | sed -r -z -e 's/(\.\/)*.*\/(.+)(\.dockerfile)/-\2/gm')
|
||||
MATRIX_COMBINATIONS="$MATRIX_COMBINATIONS {\"dockerfile\": \"$FILE\", \"tag\": \"$tag\"},"
|
||||
fi
|
||||
done
|
||||
|
||||
# removes the last character (i.e. comma)
|
||||
MATRIX_COMBINATIONS=${MATRIX_COMBINATIONS::-1}
|
||||
|
||||
# setup matrix for later jobs
|
||||
matrix=$((
|
||||
echo "{ \"include\": [$MATRIX_COMBINATIONS] }"
|
||||
) | jq -c .)
|
||||
|
||||
echo $matrix
|
||||
echo $matrix | jq .
|
||||
echo "matrix=$matrix" >> $GITHUB_OUTPUT
|
||||
|
||||
outputs:
|
||||
dockerfiles: ${{ steps.find.outputs.dockerfiles }}
|
||||
matrix: ${{ steps.find.outputs.matrix }}
|
||||
|
||||
check_changelog:
|
||||
name: Check Changelog
|
||||
needs: [check_dockerfiles]
|
||||
if: ${{ needs.check_dockerfiles.outputs.dockerfiles }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
if: ${{ github.ref == 'refs/heads/master' || github.base_ref == 'master' }}
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Verify Changelog
|
||||
id: verify_changelog
|
||||
if: ${{ github.ref == 'refs/heads/master' || github.base_ref == 'master' }}
|
||||
# base_ref for pull request check, ref for push
|
||||
uses: LizardByte/.github/actions/verify_changelog@master
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
outputs:
|
||||
next_version: ${{ steps.verify_changelog.outputs.changelog_parser_version }}
|
||||
next_version_bare: ${{ steps.verify_changelog.outputs.changelog_parser_version_bare }}
|
||||
last_version: ${{ steps.verify_changelog.outputs.latest_release_tag_name }}
|
||||
release_body: ${{ steps.verify_changelog.outputs.changelog_parser_description }}
|
||||
|
||||
setup_release:
|
||||
name: Setup Release
|
||||
needs: check_changelog
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Set release details
|
||||
id: release_details
|
||||
env:
|
||||
RELEASE_BODY: ${{ needs.check_changelog.outputs.release_body }}
|
||||
run: |
|
||||
# determine to create a release or not
|
||||
if [[ $GITHUB_EVENT_NAME == "push" ]]; then
|
||||
RELEASE=true
|
||||
else
|
||||
RELEASE=false
|
||||
fi
|
||||
|
||||
# set the release tag
|
||||
COMMIT=${{ github.sha }}
|
||||
if [[ $GITHUB_REF == refs/heads/master ]]; then
|
||||
TAG="${{ needs.check_changelog.outputs.next_version }}"
|
||||
RELEASE_NAME="${{ needs.check_changelog.outputs.next_version }}"
|
||||
RELEASE_BODY="$RELEASE_BODY"
|
||||
PRE_RELEASE="false"
|
||||
elif [[ $GITHUB_REF == refs/heads/nightly ]]; then
|
||||
TAG="nightly-dev"
|
||||
RELEASE_NAME="nightly"
|
||||
RELEASE_BODY="automated nightly release - $(date -u +'%Y-%m-%dT%H:%M:%SZ') - ${COMMIT}"
|
||||
PRE_RELEASE="true"
|
||||
fi
|
||||
|
||||
echo "create_release=${RELEASE}" >> $GITHUB_OUTPUT
|
||||
echo "release_tag=${TAG}" >> $GITHUB_OUTPUT
|
||||
echo "release_commit=${COMMIT}" >> $GITHUB_OUTPUT
|
||||
echo "release_name=${RELEASE_NAME}" >> $GITHUB_OUTPUT
|
||||
echo "pre_release=${PRE_RELEASE}" >> $GITHUB_OUTPUT
|
||||
|
||||
# this is stupid but works for multiline strings
|
||||
echo "RELEASE_BODY<<EOF" >> $GITHUB_ENV
|
||||
echo "$RELEASE_BODY" >> $GITHUB_ENV
|
||||
echo "EOF" >> $GITHUB_ENV
|
||||
|
||||
outputs:
|
||||
create_release: ${{ steps.release_details.outputs.create_release }}
|
||||
release_tag: ${{ steps.release_details.outputs.release_tag }}
|
||||
release_commit: ${{ steps.release_details.outputs.release_commit }}
|
||||
release_name: ${{ steps.release_details.outputs.release_name }}
|
||||
release_body: ${{ env.RELEASE_BODY }}
|
||||
pre_release: ${{ steps.release_details.outputs.pre_release }}
|
||||
|
||||
lint_dockerfile:
|
||||
needs: [check_dockerfiles]
|
||||
if: ${{ needs.check_dockerfiles.outputs.dockerfiles }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.check_dockerfiles.outputs.matrix) }}
|
||||
name: Lint Dockerfile${{ matrix.tag }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Hadolint
|
||||
id: hadolint
|
||||
uses: hadolint/hadolint-action@v3.1.0
|
||||
with:
|
||||
dockerfile: ${{ matrix.dockerfile }}
|
||||
ignore: DL3008,DL3013,DL3016,DL3018,DL3028,DL3059
|
||||
output-file: ./hadolint.log
|
||||
verbose: true
|
||||
|
||||
- name: Log
|
||||
if: failure()
|
||||
run: |
|
||||
echo "Hadolint outcome: ${{ steps.hadolint.outcome }}" >> $GITHUB_STEP_SUMMARY
|
||||
cat "./hadolint.log" >> $GITHUB_STEP_SUMMARY
|
||||
|
||||
docker:
|
||||
needs: [check_dockerfiles, check_changelog, setup_release]
|
||||
if: ${{ needs.check_dockerfiles.outputs.dockerfiles }}
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
packages: write
|
||||
contents: write
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix: ${{ fromJson(needs.check_dockerfiles.outputs.matrix) }}
|
||||
name: Docker${{ matrix.tag }}
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
submodules: recursive
|
||||
|
||||
- name: Prepare
|
||||
id: prepare
|
||||
env:
|
||||
NV: ${{ needs.check_changelog.outputs.next_version }}
|
||||
run: |
|
||||
# get branch name
|
||||
BRANCH=${GITHUB_HEAD_REF}
|
||||
|
||||
if [ -z "$BRANCH" ]; then
|
||||
echo "This is a PUSH event"
|
||||
BRANCH=${{ github.ref_name }}
|
||||
fi
|
||||
|
||||
# determine to push image to dockerhub and ghcr or not
|
||||
if [[ $GITHUB_EVENT_NAME == "push" ]]; then
|
||||
PUSH=true
|
||||
else
|
||||
PUSH=false
|
||||
fi
|
||||
|
||||
# setup the tags
|
||||
REPOSITORY=${{ github.repository }}
|
||||
BASE_TAG=$(echo $REPOSITORY | tr '[:upper:]' '[:lower:]')
|
||||
COMMIT=${{ github.sha }}
|
||||
|
||||
TAGS="${BASE_TAG}:${COMMIT:0:7}${{ matrix.tag }},ghcr.io/${BASE_TAG}:${COMMIT:0:7}${{ matrix.tag }}"
|
||||
|
||||
if [[ $GITHUB_REF == refs/heads/master ]]; then
|
||||
TAGS="${TAGS},${BASE_TAG}:latest${{ matrix.tag }},ghcr.io/${BASE_TAG}:latest${{ matrix.tag }}"
|
||||
TAGS="${TAGS},${BASE_TAG}:master${{ matrix.tag }},ghcr.io/${BASE_TAG}:master${{ matrix.tag }}"
|
||||
elif [[ $GITHUB_REF == refs/heads/nightly ]]; then
|
||||
TAGS="${TAGS},${BASE_TAG}:nightly${{ matrix.tag }},ghcr.io/${BASE_TAG}:nightly${{ matrix.tag }}"
|
||||
else
|
||||
TAGS="${TAGS},${BASE_TAG}:test${{ matrix.tag }},ghcr.io/${BASE_TAG}:test${{ matrix.tag }}"
|
||||
fi
|
||||
|
||||
if [[ ${NV} != "" ]]; then
|
||||
TAGS="${TAGS},${BASE_TAG}:${NV}${{ matrix.tag }},ghcr.io/${BASE_TAG}:${NV}${{ matrix.tag }}"
|
||||
fi
|
||||
|
||||
# parse custom directives out of dockerfile
|
||||
# try to get the platforms from the dockerfile custom directive, i.e. `# platforms: xxx,yyy`
|
||||
# directives for PR event, i.e. not push event
|
||||
if [[ ${PUSH} == "false" ]]; then
|
||||
while read -r line; do
|
||||
if [[ $line == "# platforms_pr: "* && $PLATFORMS == "" ]]; then
|
||||
# echo the line and use `sed` to remove the custom directive
|
||||
PLATFORMS=$(echo -e "$line" | sed 's/# platforms_pr: //')
|
||||
elif [[ $PLATFORMS != "" ]]; then
|
||||
# break while loop once all custom "PR" event directives are found
|
||||
break
|
||||
fi
|
||||
done <"${{ matrix.dockerfile }}"
|
||||
fi
|
||||
# directives for all events... above directives will not be parsed if they were already found
|
||||
while read -r line; do
|
||||
if [[ $line == "# platforms: "* && $PLATFORMS == "" ]]; then
|
||||
# echo the line and use `sed` to remove the custom directive
|
||||
PLATFORMS=$(echo -e "$line" | sed 's/# platforms: //')
|
||||
elif [[ $line == "# artifacts: "* && $ARTIFACTS == "" ]]; then
|
||||
# echo the line and use `sed` to remove the custom directive
|
||||
ARTIFACTS=$(echo -e "$line" | sed 's/# artifacts: //')
|
||||
elif [[ $PLATFORMS != "" && $ARTIFACTS != "" ]]; then
|
||||
# break while loop once all custom directives are found
|
||||
break
|
||||
fi
|
||||
done <"${{ matrix.dockerfile }}"
|
||||
# if PLATFORMS is blank, fall back to the legacy method of reading from the `.docker_platforms` file
|
||||
if [[ $PLATFORMS == "" ]]; then
|
||||
# read the platforms from `.docker_platforms`
|
||||
PLATFORMS=$(<.docker_platforms)
|
||||
fi
|
||||
# if PLATFORMS is still blank, fall back to `linux/amd64`
|
||||
if [[ $PLATFORMS == "" ]]; then
|
||||
PLATFORMS="linux/amd64"
|
||||
fi
|
||||
|
||||
echo "branch=${BRANCH}" >> $GITHUB_OUTPUT
|
||||
echo "build_date=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT
|
||||
echo "commit=${COMMIT}" >> $GITHUB_OUTPUT
|
||||
echo "artifacts=${ARTIFACTS}" >> $GITHUB_OUTPUT
|
||||
echo "platforms=${PLATFORMS}" >> $GITHUB_OUTPUT
|
||||
echo "push=${PUSH}" >> $GITHUB_OUTPUT
|
||||
echo "tags=${TAGS}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Set Up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
id: buildx
|
||||
|
||||
- name: Cache Docker Layers
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: /tmp/.buildx-cache
|
||||
key: Docker-buildx${{ matrix.tag }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
Docker-buildx${{ matrix.tag }}-
|
||||
|
||||
- name: Log in to Docker Hub
|
||||
if: ${{ steps.prepare.outputs.push == 'true' }} # PRs do not have access to secrets
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_ACCESS_TOKEN }}
|
||||
|
||||
- name: Log in to the Container registry
|
||||
if: ${{ steps.prepare.outputs.push == 'true' }} # PRs do not have access to secrets
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ secrets.GH_BOT_NAME }}
|
||||
password: ${{ secrets.GH_BOT_TOKEN }}
|
||||
|
||||
- name: Build artifacts
|
||||
if: ${{ steps.prepare.outputs.artifacts == 'true' }}
|
||||
id: build_artifacts
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: ./
|
||||
file: ${{ matrix.dockerfile }}
|
||||
target: artifacts
|
||||
outputs: type=local,dest=artifacts
|
||||
push: false
|
||||
platforms: ${{ steps.prepare.outputs.platforms }}
|
||||
build-args: |
|
||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
||||
BUILD_VERSION=${{ needs.check_changelog.outputs.next_version }}
|
||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||
tags: ${{ steps.prepare.outputs.tags }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Build and push
|
||||
id: build
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: ./
|
||||
file: ${{ matrix.dockerfile }}
|
||||
push: ${{ steps.prepare.outputs.push }}
|
||||
platforms: ${{ steps.prepare.outputs.platforms }}
|
||||
build-args: |
|
||||
BRANCH=${{ steps.prepare.outputs.branch }}
|
||||
BUILD_DATE=${{ steps.prepare.outputs.build_date }}
|
||||
BUILD_VERSION=${{ needs.check_changelog.outputs.next_version }}
|
||||
COMMIT=${{ steps.prepare.outputs.commit }}
|
||||
tags: ${{ steps.prepare.outputs.tags }}
|
||||
cache-from: type=local,src=/tmp/.buildx-cache
|
||||
cache-to: type=local,dest=/tmp/.buildx-cache
|
||||
|
||||
- name: Arrange Artifacts
|
||||
if: ${{ steps.prepare.outputs.artifacts == 'true' }}
|
||||
working-directory: artifacts
|
||||
run: |
|
||||
# artifacts will be in sub directories named after the docker target platform, e.g. `linux_amd64`
|
||||
# so move files to the artifacts directory
|
||||
# https://unix.stackexchange.com/a/52816
|
||||
find ./ -type f -exec mv -t ./ -n '{}' +
|
||||
|
||||
- name: Upload Artifacts
|
||||
if: ${{ steps.prepare.outputs.artifacts == 'true' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: Docker${{ matrix.tag }}
|
||||
path: artifacts/
|
||||
|
||||
- name: Create/Update GitHub Release
|
||||
if: ${{ needs.setup_release.outputs.create_release == 'true' && steps.prepare.outputs.artifacts == 'true' }}
|
||||
uses: ncipollo/release-action@v1
|
||||
with:
|
||||
name: ${{ needs.setup_release.outputs.release_name }}
|
||||
tag: ${{ needs.setup_release.outputs.release_tag }}
|
||||
commit: ${{ needs.setup_release.outputs.release_commit }}
|
||||
artifacts: "*artifacts/*"
|
||||
token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
allowUpdates: true
|
||||
body: ${{ needs.setup_release.outputs.release_body }}
|
||||
discussionCategory: announcements
|
||||
prerelease: ${{ needs.setup_release.outputs.pre_release }}
|
||||
|
||||
- name: Update Docker Hub Description
|
||||
if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/master' }}
|
||||
uses: peter-evans/dockerhub-description@v3
|
||||
with:
|
||||
username: ${{ secrets.DOCKER_HUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKER_HUB_PASSWORD }} # token is not currently supported
|
||||
repository: ${{ env.BASE_TAG }}
|
||||
short-description: ${{ github.event.repository.description }}
|
||||
readme-filepath: ./DOCKER_README.md
|
||||
84
.github/workflows/cpp-lint.yml
vendored
Normal file
84
.github/workflows/cpp-lint.yml
vendored
Normal file
@@ -0,0 +1,84 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Lint c++ source files and cmake files.
|
||||
|
||||
name: C++ Lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [master, nightly]
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
clang-format:
|
||||
name: Clang Format Lint
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Find cpp files
|
||||
id: cpp_files
|
||||
run: |
|
||||
cpp_files=$(find . -type f -iname "*.cpp" -o -iname "*.h" -o -iname "*.m" -o -iname "*.mm")
|
||||
|
||||
echo "found cpp files: ${cpp_files}"
|
||||
|
||||
# do not quote to keep this as a single line
|
||||
echo cpp_files=${cpp_files} >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Clang format lint
|
||||
if: ${{ steps.cpp_files.outputs.cpp_files }}
|
||||
uses: DoozyX/clang-format-lint-action@v0.15
|
||||
with:
|
||||
source: ${{ steps.cpp_files.outputs.cpp_files }}
|
||||
extensions: 'cpp,h,m,mm'
|
||||
clangFormatVersion: 15
|
||||
style: file
|
||||
inplace: false
|
||||
|
||||
- name: Upload Artifacts
|
||||
if: failure()
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: clang-format-fixes
|
||||
path: ${{ steps.cpp_files.outputs.cpp_files }}
|
||||
|
||||
cmake-lint:
|
||||
name: CMake Lint
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip setuptools cmakelang
|
||||
|
||||
- name: Find cmake files
|
||||
id: cmake_files
|
||||
run: |
|
||||
cmake_files=$(find . -type f -iname "CMakeLists.txt" -o -iname "*.cmake")
|
||||
|
||||
echo "found cmake files: ${cmake_files}"
|
||||
|
||||
# do not quote to keep this as a single line
|
||||
echo cmake_files=${cmake_files} >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Test with cmake-lint
|
||||
run: |
|
||||
cmake-lint --line-width 120 --tab-size 4 ${{ steps.cmake_files.outputs.cmake_files }}
|
||||
59
.github/workflows/issues-stale.yml
vendored
Normal file
59
.github/workflows/issues-stale.yml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Manage stale issues and PRs.
|
||||
|
||||
name: Stale Issues / PRs
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: '00 10 * * *'
|
||||
|
||||
jobs:
|
||||
stale:
|
||||
name: Check Stale Issues / PRs
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Stale
|
||||
uses: actions/stale@v7
|
||||
with:
|
||||
close-issue-message: >
|
||||
This issue was closed because it has been stalled for 10 days with no activity.
|
||||
close-pr-message: >
|
||||
This PR was closed because it has been stalled for 10 days with no activity.
|
||||
days-before-stale: 90
|
||||
days-before-close: 10
|
||||
exempt-all-assignees: true
|
||||
exempt-issue-labels: 'added,fixed'
|
||||
exempt-pr-labels: 'dependencies,l10n'
|
||||
stale-issue-label: 'stale'
|
||||
stale-issue-message: >
|
||||
This issue is stale because it has been open for 90 days with no activity.
|
||||
Comment or remove the stale label, otherwise this will be closed in 10 days.
|
||||
stale-pr-label: 'stale'
|
||||
stale-pr-message: >
|
||||
This PR is stale because it has been open for 90 days with no activity.
|
||||
Comment or remove the stale label, otherwise this will be closed in 10 days.
|
||||
repo-token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
|
||||
- name: Invalid Template
|
||||
uses: actions/stale@v7
|
||||
with:
|
||||
close-issue-message: >
|
||||
This issue was closed because the the template was not completed after 5 days.
|
||||
close-pr-message: >
|
||||
This PR was closed because the the template was not completed after 5 days.
|
||||
days-before-stale: 0
|
||||
days-before-close: 5
|
||||
exempt-pr-labels: 'dependencies,l10n'
|
||||
only-labels: 'invalid:template-incomplete'
|
||||
stale-issue-label: 'invalid:template-incomplete'
|
||||
stale-issue-message: >
|
||||
Invalid issues template.
|
||||
stale-pr-label: 'invalid:template-incomplete'
|
||||
stale-pr-message: >
|
||||
Invalid PR template.
|
||||
repo-token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
25
.github/workflows/issues.yml
vendored
Normal file
25
.github/workflows/issues.yml
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Label and un-label actions using `../label-actions.yml`.
|
||||
|
||||
name: Issues
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled, unlabeled]
|
||||
discussion:
|
||||
types: [labeled, unlabeled]
|
||||
|
||||
jobs:
|
||||
label:
|
||||
name: Label Actions
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Label Actions
|
||||
uses: dessant/label-actions@v3
|
||||
with:
|
||||
github-token: ${{ secrets.GH_BOT_TOKEN }}
|
||||
98
.github/workflows/localize.yml
vendored
Normal file
98
.github/workflows/localize.yml
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
---
|
||||
name: localize
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [nightly]
|
||||
paths: # prevents workflow from running unless these files change
|
||||
- '.github/workflows/localize.yml'
|
||||
- 'src/**'
|
||||
- 'locale/sunshine.po'
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
file: ./locale/sunshine.po
|
||||
|
||||
jobs:
|
||||
localize:
|
||||
name: Update Localization
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Install Python 3.9
|
||||
uses: actions/setup-python@v4 # https://github.com/actions/setup-python
|
||||
with:
|
||||
python-version: '3.9'
|
||||
|
||||
- name: Set up Python 3.9 Dependencies
|
||||
run: |
|
||||
cd ./scripts
|
||||
python -m pip install --upgrade pip setuptools
|
||||
python -m pip install -r requirements.txt
|
||||
|
||||
- name: Set up xgettext
|
||||
run: |
|
||||
sudo apt-get update -y && \
|
||||
sudo apt-get --reinstall install -y \
|
||||
gettext
|
||||
|
||||
- name: Update Strings
|
||||
run: |
|
||||
# first, try to remove existing file as xgettext does not remove unused translations
|
||||
if [ -f "${{ env.file }}" ];
|
||||
then
|
||||
rm ${{ env.file }}
|
||||
echo "new_file=false" >> $GITHUB_ENV
|
||||
else
|
||||
echo "new_file=true" >> $GITHUB_ENV
|
||||
fi
|
||||
|
||||
# extract the new strings
|
||||
python ./scripts/_locale.py --extract
|
||||
|
||||
- name: git diff
|
||||
if: ${{ env.new_file == 'false' }}
|
||||
run: |
|
||||
# disable the pager
|
||||
git config --global pager.diff false
|
||||
|
||||
# print the git diff
|
||||
git diff locale/sunshine.po
|
||||
|
||||
# set the variable with minimal output, replacing `\t` with ` `
|
||||
OUTPUT=$(git diff --numstat locale/sunshine.po | sed -e "s#\t# #g")
|
||||
echo "git_diff=${OUTPUT}" >> $GITHUB_ENV
|
||||
|
||||
- name: git reset
|
||||
# only run if a single line changed (date/time) and file already existed
|
||||
if: ${{ env.git_diff == '1 1 locale/sunshine.po' && env.new_file == 'false' }}
|
||||
run: |
|
||||
git reset --hard
|
||||
|
||||
- name: Get current date
|
||||
id: date
|
||||
run: echo "date=$(date +'%Y-%m-%d')" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Create/Update Pull Request
|
||||
uses: peter-evans/create-pull-request@v4
|
||||
with:
|
||||
add-paths: |
|
||||
locale/*.po
|
||||
token: ${{ secrets.GH_BOT_TOKEN }} # must trigger PR tests
|
||||
commit-message: New localization template
|
||||
branch: localize/update
|
||||
delete-branch: true
|
||||
base: nightly
|
||||
title: New Babel Updates
|
||||
body: |
|
||||
Update report
|
||||
- Updated ${{ steps.date.outputs.date }}
|
||||
- Auto-generated by [create-pull-request][1]
|
||||
|
||||
[1]: https://github.com/peter-evans/create-pull-request
|
||||
labels: |
|
||||
babel
|
||||
l10n
|
||||
32
.github/workflows/pull-requests.yml
vendored
Normal file
32
.github/workflows/pull-requests.yml
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Ensure PRs are made against `nightly` branch.
|
||||
|
||||
name: Pull Requests
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened, synchronize, edited, reopened]
|
||||
|
||||
# no concurrency for pull_request_target events
|
||||
|
||||
jobs:
|
||||
check-pull-request:
|
||||
name: Check Pull Request
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: Vankka/pr-target-branch-action@v2
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
with:
|
||||
target: master
|
||||
exclude: nightly # Don't prevent going from nightly -> master
|
||||
change-to: nightly
|
||||
comment: |
|
||||
Your PR was set to `master`, PRs should be sent to `nightly`.
|
||||
The base branch of this PR has been automatically changed to `nightly`.
|
||||
Please check that there are no merge conflicts
|
||||
38
.github/workflows/python-flake8.yml
vendored
Normal file
38
.github/workflows/python-flake8.yml
vendored
Normal file
@@ -0,0 +1,38 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Lint python files with flake8.
|
||||
|
||||
name: flake8
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [master, nightly]
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
flake8:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4 # https://github.com/actions/setup-python
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
# pin flake8 before v6.0.0 due to removal of support for type comments (required for Python 2.7 type hints)
|
||||
python -m pip install --upgrade pip setuptools "flake8<6"
|
||||
|
||||
- name: Test with flake8
|
||||
run: |
|
||||
python -m flake8 --verbose
|
||||
22
.github/workflows/release-notifier-moonlight.yml
vendored
Normal file
22
.github/workflows/release-notifier-moonlight.yml
vendored
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
name: Release Notifications (Moonlight)
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
discord:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: discord
|
||||
uses: sarisia/actions-status-discord@v1 # https://github.com/sarisia/actions-status-discord
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_RELEASE_WEBHOOK_MOONLIGHT }}
|
||||
nodetail: true
|
||||
nofail: false
|
||||
username: ${{ secrets.DISCORD_USERNAME }}
|
||||
avatar_url: ${{ secrets.ORG_LOGO_URL }}
|
||||
title: ${{ github.event.repository.name }} ${{ github.ref_name }} Released
|
||||
description: ${{ github.event.release.body }}
|
||||
color: 0xFF4500
|
||||
88
.github/workflows/release-notifier.yml
vendored
Normal file
88
.github/workflows/release-notifier.yml
vendored
Normal file
@@ -0,0 +1,88 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Send release notification to various platforms.
|
||||
|
||||
name: Release Notifications
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
# https://docs.github.com/en/actions/learn-github-actions/workflow-syntax-for-github-actions#onevent_nametypes
|
||||
|
||||
jobs:
|
||||
discord:
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: discord
|
||||
uses: sarisia/actions-status-discord@v1 # https://github.com/sarisia/actions-status-discord
|
||||
with:
|
||||
webhook: ${{ secrets.DISCORD_RELEASE_WEBHOOK }}
|
||||
nodetail: true
|
||||
nofail: false
|
||||
username: ${{ secrets.DISCORD_USERNAME }}
|
||||
avatar_url: ${{ secrets.ORG_LOGO_URL }}
|
||||
title: ${{ github.event.repository.name }} ${{ github.ref_name }} Released
|
||||
description: ${{ github.event.release.body }}
|
||||
color: 0xFF4500
|
||||
|
||||
facebook_group:
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: facebook-post-action
|
||||
uses: ReenigneArcher/facebook-post-action@v1 # https://github.com/ReenigneArcher/facebook-post-action
|
||||
with:
|
||||
page_id: ${{ secrets.FACEBOOK_GROUP_ID }}
|
||||
access_token: ${{ secrets.FACEBOOK_ACCESS_TOKEN }}
|
||||
message: |
|
||||
${{ github.event.repository.name }} ${{ github.ref_name }} Released
|
||||
${{ github.event.release.body }}
|
||||
url: ${{ github.event.release.html_url }}
|
||||
|
||||
facebook_page:
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: facebook-post-action
|
||||
uses: ReenigneArcher/facebook-post-action@v1 # https://github.com/ReenigneArcher/facebook-post-action
|
||||
with:
|
||||
page_id: ${{ secrets.FACEBOOK_PAGE_ID }}
|
||||
access_token: ${{ secrets.FACEBOOK_ACCESS_TOKEN }}
|
||||
message: |
|
||||
${{ github.event.repository.name }} ${{ github.ref_name }} Released
|
||||
${{ github.event.release.body }}
|
||||
url: ${{ github.event.release.html_url }}
|
||||
|
||||
reddit:
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: reddit
|
||||
uses: bluwy/release-for-reddit-action@v2 # https://github.com/bluwy/release-for-reddit-action
|
||||
with:
|
||||
username: ${{ secrets.REDDIT_USERNAME }}
|
||||
password: ${{ secrets.REDDIT_PASSWORD }}
|
||||
app-id: ${{ secrets.REDDIT_CLIENT_ID }}
|
||||
app-secret: ${{ secrets.REDDIT_CLIENT_SECRET }}
|
||||
subreddit: ${{ secrets.REDDIT_SUBREDDIT }}
|
||||
title: ${{ github.event.repository.name }} ${{ github.ref_name }} Released
|
||||
url: ${{ github.event.release.html_url }}
|
||||
flair-id: ${{ secrets.REDDIT_FLAIR_ID }} # https://www.reddit.com/r/<subreddit>>/api/link_flair.json
|
||||
comment: ${{ github.event.release.body }}
|
||||
|
||||
twitter:
|
||||
if: startsWith(github.repository, 'LizardByte/')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: twitter
|
||||
uses: ethomson/send-tweet-action@v1 # https://github.com/ethomson/send-tweet-action
|
||||
with:
|
||||
consumer-key: ${{ secrets.TWITTER_API_KEY }}
|
||||
consumer-secret: ${{ secrets.TWITTER_API_SECRET }}
|
||||
access-token: ${{ secrets.TWITTER_ACCESS_TOKEN }}
|
||||
access-token-secret: ${{ secrets.TWITTER_ACCESS_TOKEN_SECRET }}
|
||||
status: ${{ github.event.release.html_url }}
|
||||
66
.github/workflows/yaml-lint.yml
vendored
Normal file
66
.github/workflows/yaml-lint.yml
vendored
Normal file
@@ -0,0 +1,66 @@
|
||||
---
|
||||
# This action is centrally managed in https://github.com/<organization>/.github/
|
||||
# Don't make changes to this file in this repo as they will be overwritten with changes made to the same file in
|
||||
# the above-mentioned repo.
|
||||
|
||||
# Lint yaml files.
|
||||
|
||||
name: yaml lint
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: [master, nightly]
|
||||
types: [opened, synchronize, reopened]
|
||||
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
yaml-lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Find additional files
|
||||
id: find-files
|
||||
run: |
|
||||
# space separated list of files
|
||||
FILES=.clang-format
|
||||
|
||||
# empty placeholder
|
||||
FOUND=""
|
||||
|
||||
for FILE in ${FILES}; do
|
||||
if [ -f "$FILE" ]
|
||||
then
|
||||
FOUND="$FOUND $FILE"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "found=${FOUND}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: yaml lint
|
||||
id: yaml-lint
|
||||
uses: ibiqlik/action-yamllint@v3
|
||||
with:
|
||||
# https://yamllint.readthedocs.io/en/stable/configuration.html#default-configuration
|
||||
config_data: |
|
||||
extends: default
|
||||
rules:
|
||||
comments:
|
||||
level: error
|
||||
line-length:
|
||||
max: 120
|
||||
truthy:
|
||||
# GitHub uses "on" for workflow event triggers
|
||||
# .clang-format file has options of "Yes" "No" that will be caught by this, so changed to "warning"
|
||||
allowed-values: ['true', 'false', 'on']
|
||||
check-keys: true
|
||||
level: warning
|
||||
file_or_dir: . ${{ steps.find-files.outputs.found }}
|
||||
|
||||
- name: Log
|
||||
run: |
|
||||
cat "${{ steps.yaml-lint.outputs.logfile }}" >> $GITHUB_STEP_SUMMARY
|
||||
14
.gitignore
vendored
14
.gitignore
vendored
@@ -1,8 +1,18 @@
|
||||
build
|
||||
cmake-build-*
|
||||
cmake-build*
|
||||
.DS_Store
|
||||
|
||||
.vscode
|
||||
.vs
|
||||
*.swp
|
||||
*.kdev4
|
||||
|
||||
.cache
|
||||
.idea
|
||||
|
||||
# npm
|
||||
node_modules/
|
||||
package-lock.json
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
56
.gitmodules
vendored
56
.gitmodules
vendored
@@ -1,12 +1,48 @@
|
||||
[submodule "moonlight-common-c"]
|
||||
path = moonlight-common-c
|
||||
[submodule "third-party/moonlight-common-c"]
|
||||
path = third-party/moonlight-common-c
|
||||
url = https://github.com/moonlight-stream/moonlight-common-c.git
|
||||
[submodule "Simple-Web-Server"]
|
||||
path = Simple-Web-Server
|
||||
url = https://github.com/loki-47-6F-64/Simple-Web-Server.git
|
||||
[submodule "ViGEmClient"]
|
||||
path = ViGEmClient
|
||||
branch = master
|
||||
[submodule "third-party/Simple-Web-Server"]
|
||||
path = third-party/Simple-Web-Server
|
||||
url = https://gitlab.com/eidheim/Simple-Web-Server.git
|
||||
branch = master
|
||||
[submodule "third-party/ViGEmClient"]
|
||||
path = third-party/ViGEmClient
|
||||
url = https://github.com/ViGEm/ViGEmClient
|
||||
[submodule "pre-compiled"]
|
||||
path = pre-compiled
|
||||
url = https://bitbucket.org/Loki-47-6F-64/pre-compiled.git
|
||||
branch = master
|
||||
[submodule "third-party/miniupnp"]
|
||||
path = third-party/miniupnp
|
||||
url = https://github.com/miniupnp/miniupnp
|
||||
branch = master
|
||||
[submodule "third-party/nv-codec-headers"]
|
||||
path = third-party/nv-codec-headers
|
||||
url = https://github.com/FFmpeg/nv-codec-headers
|
||||
branch = sdk/11.1
|
||||
[submodule "third-party/TPCircularBuffer"]
|
||||
path = third-party/TPCircularBuffer
|
||||
url = https://github.com/michaeltyson/TPCircularBuffer
|
||||
branch = master
|
||||
[submodule "third-party/ffmpeg-windows-x86_64"]
|
||||
path = third-party/ffmpeg-windows-x86_64
|
||||
url = https://github.com/LizardByte/build-deps
|
||||
branch = ffmpeg-windows-x86_64
|
||||
[submodule "third-party/ffmpeg-macos-x86_64"]
|
||||
path = third-party/ffmpeg-macos-x86_64
|
||||
url = https://github.com/LizardByte/build-deps
|
||||
branch = ffmpeg-macos-x86_64
|
||||
[submodule "third-party/ffmpeg-linux-x86_64"]
|
||||
path = third-party/ffmpeg-linux-x86_64
|
||||
url = https://github.com/LizardByte/build-deps
|
||||
branch = ffmpeg-linux-x86_64
|
||||
[submodule "third-party/ffmpeg-linux-aarch64"]
|
||||
path = third-party/ffmpeg-linux-aarch64
|
||||
url = https://github.com/LizardByte/build-deps
|
||||
branch = ffmpeg-linux-aarch64
|
||||
[submodule "third-party/ffmpeg-macos-aarch64"]
|
||||
path = third-party/ffmpeg-macos-aarch64
|
||||
url = https://github.com/LizardByte/build-deps
|
||||
branch = ffmpeg-macos-aarch64
|
||||
[submodule "third-party/nanors"]
|
||||
path = third-party/nanors
|
||||
url = https://github.com/sleepybishop/nanors.git
|
||||
branch = master
|
||||
|
||||
1
.prettierrc.json
Normal file
1
.prettierrc.json
Normal file
@@ -0,0 +1 @@
|
||||
{}
|
||||
44
.readthedocs.yaml
Normal file
44
.readthedocs.yaml
Normal file
@@ -0,0 +1,44 @@
|
||||
---
|
||||
# .readthedocs.yaml
|
||||
# Read the Docs configuration file
|
||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
# Required
|
||||
version: 2
|
||||
|
||||
# Set the version of Python
|
||||
build:
|
||||
os: ubuntu-20.04
|
||||
tools:
|
||||
python: "3.10"
|
||||
|
||||
## apt packages required packages to run cmake on sunshine, note that additional packages are required
|
||||
# apt_packages:
|
||||
# - cmake
|
||||
# - libboost-filesystem-dev
|
||||
# - libboost-log-dev
|
||||
# - libboost-thread-dev
|
||||
|
||||
## run cmake
|
||||
# jobs:
|
||||
# pre_build:
|
||||
# - cmake .
|
||||
|
||||
## Include the submodules, required for cmake
|
||||
# submodules:
|
||||
# include: all
|
||||
# recursive: true
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
builder: html
|
||||
configuration: docs/source/conf.py
|
||||
fail_on_warning: true
|
||||
|
||||
# Using Sphinx, build docs in additional formats
|
||||
formats: all
|
||||
|
||||
python:
|
||||
install:
|
||||
- requirements: ./docs/requirements.txt
|
||||
system_packages: true
|
||||
319
CHANGELOG.md
Normal file
319
CHANGELOG.md
Normal file
@@ -0,0 +1,319 @@
|
||||
# Changelog
|
||||
|
||||
## [0.18.2] - 2023-02-13
|
||||
### Fixed
|
||||
- (Video/KMV/Linux) Fixed wayland capture on Nvidia for KMS
|
||||
- (Video/Linux) Implement vaSyncBuffer stuf for libva <2.9.0
|
||||
- (UI) Fix issue where mime type was not being set for node_modules when using a reverse proxy
|
||||
- (UI/macOS) Added missing audio sink config options
|
||||
- (Linux) Specify correct Boost dependency versions
|
||||
- (Video/AMF) Add missing encoder tunables
|
||||
|
||||
## [0.18.1] - 2023-01-31
|
||||
### Fixed
|
||||
- (Linux) Fixed missing dependencies for deb and rpm packages
|
||||
- (Linux) Use dynamic boost
|
||||
|
||||
## [0.18.0] - 2023-01-29
|
||||
Attention, this release contains critical security fixes. Please update as soon as possible. Additionally, we are
|
||||
encouraging users to change your Sunshine password, especially if you expose the web UI (i.e. port 47790 by default)
|
||||
to the internet, or have ever uploaded your logs with verbose output to a public resource.
|
||||
|
||||
### Added
|
||||
- (Windows) Add support for Intel QuickSync
|
||||
- (Linux) Added aarch64 deb and rpm packages
|
||||
- (Windows) Add support for hybrid graphics systems, such as laptops with both integrated and discrete GPUs
|
||||
- (Linux) Add support for streaming from Steam Deck Gaming Mode
|
||||
- (Windows) Add HDR support, see https://docs.lizardbyte.dev/projects/sunshine/en/latest/about/usage.html#hdr-support
|
||||
### Fixed
|
||||
- (Network) Refactor code for UPnP port forwarding
|
||||
- (Video) Enforce 10 FPS encoding frame rate minimum to improve static image quality
|
||||
- (Linux) deb and rpm packages are now specific to destination distro and version
|
||||
- (Docs) Add nvidia/nvenc preset migration guide
|
||||
- (Network) Performance optimizations
|
||||
- (Video/Windows) Fix streaming to multiple clients from hardware encoder
|
||||
- (Linux) Fix child process spawning
|
||||
- (Security) Fix security vulnerability in implementation of SimpleWebServer
|
||||
- (Misc) Rename "Steam BigPicture" to "Steam Big Picture" in default apps.json
|
||||
- (Security) Scrub basic authorization header from logs
|
||||
- (Linux) The systemd service will now restart in the event of a crash
|
||||
- (Video/KMS/Linux) Fixed error: `couldn't import RGB Image: 00003002 and 00003004`
|
||||
- (Video/Windows) Fix stream freezing triggered by the resolution changed
|
||||
- (Installer/Windows) Fixes silent installation and other miscellaneous improvements
|
||||
- (CPU) Significantly improved CPU usage
|
||||
|
||||
## [0.17.0] - 2023-01-08
|
||||
If you are running Sunshine as a service on Windows, we are strongly urging you to update to v0.17.0 as soon as
|
||||
possible. Older Windows versions of Sunshine had a security flaw in which the binary was located in a user-writable
|
||||
location which is problematic when running as a service or on a multi-user system. Additionally, when running Sunshine
|
||||
as a service, games and applications were launched as SYSTEM. This could lead to issues with save files and other game
|
||||
settings. In v0.17.0, games now run under your user account without elevated privileges.
|
||||
|
||||
### Breaking
|
||||
- (Apps) Removed automatic desktop entry (Re-add by adding an empty application named "Desktop" with no commands, "desktop.png" can be added as the image.)
|
||||
- (Windows) Improved user upgrade experience (Suggest to manually uninstall existing Sunshine version before this upgrade. Do NOT select to remove everything, if prompted. Make a backup of config files before uninstall.)
|
||||
- (Windows) Move config files to specific directory (files will be migrated automatically if using Windows installer)
|
||||
- (Dependencies) Fix npm path (breaking change for package maintainers)
|
||||
### Added
|
||||
- (macOS) Added initial support for arm64 on macOS through Macports portfile
|
||||
- (Input) Added support for foreign keyboard input
|
||||
- (Misc) Logs inside the WebUI and log to file
|
||||
- (UI/Windows) Added an Apply button to configuration page when running as a service
|
||||
- (Input/Windows) Enable Mouse Keys while streaming for systems with no physical mouse
|
||||
### Fixed
|
||||
- (Video) Improved capture performance
|
||||
- (Audio) Improved audio bitrate and quality handling
|
||||
- (Apps/Windows) Fixed PATH environment variable handling
|
||||
- (Apps/Windows) Use the proper environment variable for the Program Files (x86) folder
|
||||
- (Service/Windows) Fix SunshineSvc hanging if an error occurs during startup
|
||||
- (Service/Windows) Spawn Sunshine.exe in a job object, so it is terminated if SunshineSvc.exe dies
|
||||
- (Video) windows/vram: fix fringing in NV12 colour conversion
|
||||
- (Apps/Windows) Launch games under the correct user account
|
||||
- (Video) nvenc, amdvce: rework all user presets/options
|
||||
- (Network) Generate certificates with unique serial numbers
|
||||
- (Service/Windows) Graceful termination on shutdown, logoff, and service stop
|
||||
- (Apps/Windows) Fix launching apps when Sunshine is running as admin
|
||||
- (Misc) Remove/fix calls to std::abort()
|
||||
- (Misc) Remove prompt to press enter after Sunshine exits
|
||||
- (Misc) Make log priority consistent for execution messages
|
||||
- (Apps) Applications in Moonlight clients are now updated automatically after editing
|
||||
- (Video/Linux) Fix wayland capture on nvidia
|
||||
- (Audio) Fix 7.1 surround channel mapping
|
||||
- (Video) Fix NVENC profile values not applying
|
||||
- (Network) Fix origin_web_ui_allowed binding
|
||||
- (Service/Windows) Self terminate/restart service if process hangs for 10 seconds
|
||||
- (Input/Windows) Fix Windows masked cursor blending with GPU encoders
|
||||
- (Video) Color conversion fixes and BT.2020 support
|
||||
### Dependencies
|
||||
- Bump ffmpeg from 4.4 to 5.1
|
||||
- ffmpeg_patches: add amfenc delay/buffering fix
|
||||
- CBS moved to ffmpeg submodules
|
||||
- Migrate to upstream Simple-Web-Server submodule
|
||||
- Bump third-party/TPCircularBuffer from `bce9170` to `8833b3a`
|
||||
- Bump third-party/moonlight-common-c from `8169a31` to `ef9ad52`
|
||||
- Bump third-party/miniupnp from `6f848ae` to `207cf44`
|
||||
- Bump third-party/ViGEmClient from `f719a1d` to `9e842ba`
|
||||
- Bump bootstrap from 5.0.0 to 5.2.3
|
||||
- Bump @fortawesome/fontawesome-free from 6.2.0 to 6.2.1
|
||||
|
||||
## [0.16.0] - 2022-12-13
|
||||
### Added
|
||||
- Add cover finder
|
||||
- (Docker) Add arm64 docker image
|
||||
- (Flatpak) Add installation helper scripts
|
||||
- (Windows) Add support for Unicode input messages
|
||||
### Fixed
|
||||
- (Linux) Reintroduce Ubuntu 20.04 and 22.04 specific deb packages
|
||||
- (Linux) Fixed udev and systemd file locations
|
||||
### Dependencies
|
||||
- Bump babel from 2.10.3 to 2.11.0
|
||||
- Bump sphinx-copybutton from 0.5.0 to 0.5.1
|
||||
- Bump KSXGitHub/github-actions-deploy-aur from 2.5.0 to 2.6.0
|
||||
- Use npm for web dependencies (breaking change for third-party package maintainers)
|
||||
- Update moonlight-common-c
|
||||
- Use pre-built ffmpeg from LizardByte/build-deps for all sunshine builds (breaking change for third-party package maintainers)
|
||||
- Bump furo from 2022.9.29 to 2022.12.7
|
||||
### Misc
|
||||
- Misc org level workflow updates
|
||||
- Fix misc typos in docs
|
||||
- Fix winget release
|
||||
|
||||
## [0.15.0] - 2022-10-30
|
||||
### Added
|
||||
- (Windows) Add firewall rules scripts
|
||||
- (Windows) Automatically add and remove firewall rules at install/uninstall
|
||||
- (Windows) Automatically add and remove service at install/uninstall
|
||||
- (Docker) Official image added
|
||||
- (Linux) Add aarch64 flatpak package
|
||||
### Changed
|
||||
- (Windows/Linux/MacOS) - Move default config and apps file to assets directory
|
||||
- (MacOS) Bump boost to 1.80 for macport builds
|
||||
- (Linux) Remove backup and restore of config files
|
||||
### Fixed
|
||||
- (Linux) - Create sunshine config directory if it doesn't exist
|
||||
- (Linux) Remove portable home and config directories for AppImage
|
||||
- (Windows) Include service install and uninstall scripts again
|
||||
- (Windows) Automatically delete start menu entry upon uninstall
|
||||
- (Windows) Automatically delete program install directory upon uninstall, with user prompt
|
||||
- (Linux) Handle the case of no default audio sink
|
||||
- (Windows/Linux/MacOS) Fix default image paths
|
||||
- (Linux) Fix CUDA RGBA to NV12 conversion
|
||||
|
||||
## [0.14.1] - 2022-08-09
|
||||
### Added
|
||||
- (Linux) Flatpak package added
|
||||
- (Linux) AUR package automated updates
|
||||
- (Windows) Winget package automated updates
|
||||
### Changed
|
||||
- (General) Moved repo to @LizardByte GitHub org
|
||||
- (WebUI) Fixed button spacing on home page
|
||||
- (WebUI) Added Discord WidgetBot Crate
|
||||
### Fixed
|
||||
- (Linux/Mac) Default config and app files now copied to user home directory
|
||||
- (Windows) Default config and app files now copied to working directory
|
||||
|
||||
## [0.14.0] - 2022-06-15
|
||||
### Added
|
||||
- (Documentation) Added Sphinx documentation available at https://sunshinestream.readthedocs.io/en/latest/
|
||||
- (Development) Initial support for Localization
|
||||
- (Linux) Add rpm package as release asset
|
||||
- (macOS) Add Portfile as release asset
|
||||
- (Windows) Add DwmFlush() call to improve capture
|
||||
- (Windows) Add Windows installer
|
||||
### Fixed
|
||||
- (AMD) Fixed hwdevice being destroyed before context
|
||||
- (Linux) Added missing dependencies to AppImage
|
||||
- (Linux) Fixed rumble events causing game to freeze
|
||||
- (Linux) Improved Pulse/Pipewire compatibility
|
||||
- (Linux) Moved to single deb package
|
||||
- (macOS) Fixed missing TPCircularBuffer submodule
|
||||
- (Stream) Properly catch exceptions in stream broadcast handlers
|
||||
- (Stream/Video) AVPacket fix
|
||||
|
||||
## [0.13.0] - 2022-02-27
|
||||
### Added
|
||||
- (macOS) Initial support for macOS (#40)
|
||||
|
||||
## [0.12.0] - 2022-02-13
|
||||
### Added
|
||||
- New command line argument `--version`
|
||||
- Custom png poster support
|
||||
### Changed
|
||||
- Correct software bitrate calculation
|
||||
- Increase vbv-bufsize to 1/10 of requested bitrate
|
||||
- Improvements to Web UI
|
||||
|
||||
## [0.11.1] - 2021-10-04
|
||||
### Changed
|
||||
- (Linux) Fix search path for config file and assets
|
||||
|
||||
## [0.11.0] - 2021-10-04
|
||||
### Added
|
||||
- (Linux) Added support for wlroots based compositors on Wayland.
|
||||
- (Windows) Added an icon for the executable
|
||||
### Changed
|
||||
- Fixed a bug causing segfault when connecting multiple controllers.
|
||||
- (Linux) Improved NVENC, it now offloads converting images from RGB to NV12
|
||||
- (Linux) Fixed a bug causes stuttering
|
||||
|
||||
## [0.10.1] - 2021-08-21
|
||||
### Changed
|
||||
- (Linux) Re-enabled KMS
|
||||
|
||||
## [0.10.0] - 2021-08-20
|
||||
### Added
|
||||
- Added support for Rumble with gamepads.
|
||||
- Added support for keyboard shortcuts <--- See the README for details.
|
||||
- (Windows) A very basic script has been added in Sunshine-Windows\tools <-- This will start Sunshine at boot with the highest privileges which is needed to display the login prompt.
|
||||
### Changed
|
||||
- Some cosmetic changes to the WebUI.
|
||||
- The first time the WebUI is opened, it will request the creation of a username/password pair from the user.
|
||||
- Fixed audio crackling introduced in version 0.8.0
|
||||
- (Linux) VAAPI hardware encoding now works on Intel i7-6700 at least. <-- For the best experience, using ffmpeg version 4.3 or higher is recommended.
|
||||
- (Windows) Installing from debian package shouldn't overwrite your configuration files anymore. <-- It's recommended that you back up `/etc/sunshine/` before testing this.
|
||||
|
||||
## [0.9.0] - 2021-07-11
|
||||
### Added
|
||||
- Added audio encryption
|
||||
- (Linux) Added basic NVENC support on Linux
|
||||
- (Windows) The Windows version can now capture the lock screen and the UAC prompt as long as it's run through `PsExec.exe` https://docs.microsoft.com/en-us/sysinternals/downloads/psexec
|
||||
### Changed
|
||||
- Sunshine will now accept expired or not-yet-valid certificates, as long as they are signed properly.
|
||||
- Fixed compatibility with iOS version of Moonlight
|
||||
- Drastically reduced chance of being forced to skip error correction due to video frame size
|
||||
- (Linux) sunshine.service will be installed automatically.
|
||||
|
||||
## [0.8.0] - 2021-06-30
|
||||
### Added
|
||||
- Added mDNS support: Moonlight will automatically find Sunshine.
|
||||
- Added UPnP support. It's off by default.
|
||||
|
||||
## [0.7.7] - 2021-06-24
|
||||
### Added
|
||||
- (Linux) Added installation package for Debian
|
||||
### Changed
|
||||
- Fixed incorrect scaling for absolute mouse coordinates when using multiple monitors.
|
||||
- Fixed incorrect colors when scaling for software encoder
|
||||
|
||||
## [0.7.1] - 2021-06-18
|
||||
### Changed
|
||||
- (Linux) Fixed an issue where it was impossible to start sunshine on ubuntu 20.04
|
||||
|
||||
## [0.7.0] - 2021-06-16
|
||||
### Added
|
||||
- Added a Web Manager. Accessible through: https://localhost:47990 or https://<ip of your pc>:47990
|
||||
- (Linux) Added hardware encoding support for AMD on Linux
|
||||
### Changed
|
||||
- (Linux) Moved certificates and saved pairings generated during runtime to .config/sunshine on Linux
|
||||
|
||||
## [0.6.0] - 2021-05-26
|
||||
### Added
|
||||
- Added support for surround audio
|
||||
### Changed
|
||||
- Maintain aspect ratio when scaling video
|
||||
- Fix issue where Sunshine is forced to drop frames when they are too large
|
||||
|
||||
## [0.5.0] - 2021-05-13
|
||||
### Added
|
||||
- Added support for absolute mouse coordinates
|
||||
- (Linux) Added support for streaming specific monitor on Linux
|
||||
- (Windows) Added support for AMF on Windows
|
||||
|
||||
## [0.4.0] - 2020-05-03
|
||||
### Changed
|
||||
- prep-cmd is now optional in apps.json
|
||||
- Fixed bug causing video artifacts
|
||||
- Fixed bug preventing Moonlight from closing app on exit
|
||||
- Fixed bug causing preventing keyboard keys from repeating on latest version of Moonlight
|
||||
- Fixed bug causing segfault when another session of sunshine was already running
|
||||
- Fixed bug causing crash when monitor has resolution 1366x768
|
||||
|
||||
## [0.3.1] - 2020-04-24
|
||||
### Changed
|
||||
- Fix a memory leak.
|
||||
|
||||
## [0.3.0] - 2020-04-23
|
||||
### Changed
|
||||
- Hardware acceleration on NVidia GPU's for Video encoding on Windows
|
||||
|
||||
## [0.2.0] - 2020-03-21
|
||||
### Changed
|
||||
- Multicasting is now supported: You can set the maximum simultaneous connections with the configurable option: channels
|
||||
- Configuration variables can be overwritten on the command line: "name=value" --> it can be useful to set min_log_level=debug without modifying the configuration file
|
||||
- Switches to make testing the pairing mechanism more convenient has been added, see "sunshine --help" for details
|
||||
|
||||
## [0.1.1] - 2020-01-30
|
||||
### Added
|
||||
- (Linux) Added deb package and service for Linux
|
||||
|
||||
## [0.1.0] - 2020-01-27
|
||||
### Added
|
||||
- The first official release for Sunshine!
|
||||
|
||||
[0.1.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.1.0
|
||||
[0.1.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.1.1
|
||||
[0.2.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.2.0
|
||||
[0.3.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.3.0
|
||||
[0.3.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.3.1
|
||||
[0.4.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.4.0
|
||||
[0.5.0]: https://github.com/LizardByte/Sunshine/releases/tag/0.5.0
|
||||
[0.6.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.6.0
|
||||
[0.7.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.7.0
|
||||
[0.7.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.7.1
|
||||
[0.7.7]: https://github.com/LizardByte/Sunshine/releases/tag/v0.7.7
|
||||
[0.8.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.8.0
|
||||
[0.9.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.9.0
|
||||
[0.10.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.10.0
|
||||
[0.10.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.10.1
|
||||
[0.11.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.11.0
|
||||
[0.11.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.11.1
|
||||
[0.12.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.12.0
|
||||
[0.13.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.13.0
|
||||
[0.14.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.14.0
|
||||
[0.14.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.14.1
|
||||
[0.15.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.15.0
|
||||
[0.16.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.16.0
|
||||
[0.17.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.17.0
|
||||
[0.18.0]: https://github.com/LizardByte/Sunshine/releases/tag/v0.18.0
|
||||
[0.18.1]: https://github.com/LizardByte/Sunshine/releases/tag/v0.18.1
|
||||
[0.18.2]: https://github.com/LizardByte/Sunshine/releases/tag/v0.18.2
|
||||
969
CMakeLists.txt
969
CMakeLists.txt
File diff suppressed because it is too large
Load Diff
134
DOCKER_README.md
Normal file
134
DOCKER_README.md
Normal file
@@ -0,0 +1,134 @@
|
||||
# Docker
|
||||
|
||||
## Important note
|
||||
Starting with v0.18.0, tag names have changed. You may no longer use `latest`, `master`, `vX.X.X`.
|
||||
|
||||
## Build your own containers
|
||||
This image provides a method for you to easily use the latest Sunshine release in your own docker projects. It is not
|
||||
intended to use as a standalone container at this point, and should be considered experimental.
|
||||
|
||||
```dockerfile
|
||||
ARG SUNSHINE_VERSION=latest
|
||||
ARG SUNSHINE_OS=ubuntu-22.04
|
||||
FROM lizardbyte/sunshine:${SUNSHINE_VERSION}-${SUNSHINE_OS}
|
||||
|
||||
# install Steam, Wayland, etc.
|
||||
|
||||
ENTRYPOINT steam && sunshine
|
||||
```
|
||||
|
||||
### SUNSHINE_VERSION
|
||||
- `latest`, `master`, `vX.X.X`
|
||||
- `nightly`
|
||||
- commit hash
|
||||
|
||||
### SUNSHINE_OS
|
||||
Sunshine images are available, based on the following base images.
|
||||
|
||||
- `debian-bullseye`
|
||||
- `fedora-36`
|
||||
- `fedora-37`
|
||||
- `ubuntu-20.04`
|
||||
- `ubuntu-22.04`
|
||||
|
||||
### Tags
|
||||
You must combine the `SUNSHINE_VERSION` and `SUNSHINE_OS` to determine the tag to pull. The format should be
|
||||
`<SUNSHINE_VERSION>-<SUNSHINE_OS>`. For example, `latest-ubuntu-22.04`.
|
||||
|
||||
See all our available tags on [docker hub](https://hub.docker.com/r/lizardbyte/sunshine/tags) or
|
||||
[ghcr](https://github.com/LizardByte/Sunshine/pkgs/container/sunshine/versions) for more info.
|
||||
|
||||
## Where used
|
||||
This is a list of docker projects using Sunshine. Something missing? Let us know about it!
|
||||
|
||||
- [Games on Whales](https://games-on-whales.github.io)
|
||||
|
||||
## Port and Volume mappings
|
||||
Examples are below of the required mappings. The configuration file will be saved to `/config` in the container.
|
||||
|
||||
### Using docker run
|
||||
Create and run the container (substitute your `<values>`):
|
||||
|
||||
```bash
|
||||
docker run -d \
|
||||
--name=<image_name> \
|
||||
--restart=unless-stopped
|
||||
-e PUID=<uid> \
|
||||
-e PGID=<gid> \
|
||||
-e TZ=<timezone> \
|
||||
-v <path to data>:/config \
|
||||
-p 47984-47990:47984-47990/tcp \
|
||||
-p 48010:48010 \
|
||||
-p 47998-48000:47998-48000/udp \
|
||||
<image>
|
||||
```
|
||||
|
||||
### Using docker-compose
|
||||
Create a `docker-compose.yml` file with the following contents (substitute your `<values>`):
|
||||
|
||||
```yaml
|
||||
version: '3'
|
||||
services:
|
||||
<image_name>:
|
||||
image: <image>
|
||||
container_name: sunshine
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- <path to data>:/config
|
||||
environment:
|
||||
- PUID=<uid>
|
||||
- PGID=<gid>
|
||||
- TZ=<timezone>
|
||||
ports:
|
||||
- "47984-47990:47984-47990/tcp"
|
||||
- "48010:48010"
|
||||
- "47998-48000:47998-48000/udp"
|
||||
```
|
||||
|
||||
### Parameters
|
||||
You must substitute the `<values>` with your own settings.
|
||||
|
||||
Parameters are split into two halves separated by a colon. The left side represents the host and the right side the
|
||||
container.
|
||||
|
||||
**Example:** `-p external:internal` - This shows the port mapping from internal to external of the container.
|
||||
Therefore `-p 47990:47990` would expose port `47990` from inside the container to be accessible from the host's IP on
|
||||
port `47990` (e.g. `http://<host_ip>:47990`). The internal port must be `47990`, but the external port may be changed
|
||||
(e.g. `-p 8080:47990`). All the ports listed in the `docker run` and `docker-compose` examples are required.
|
||||
|
||||
|
||||
| Parameter | Function | Example Value | Required |
|
||||
|-----------------------------|----------------------|--------------------|----------|
|
||||
| `-p <port>:47990` | Web UI Port | `47990` | True |
|
||||
| `-v <path to data>:/config` | Volume mapping | `/home/sunshine` | True |
|
||||
| `-e PUID=<uid>` | User ID | `1001` | False |
|
||||
| `-e PGID=<gid>` | Group ID | `1001` | False |
|
||||
| `-e TZ=<timezone>` | Lookup [TZ value][1] | `America/New_York` | False |
|
||||
|
||||
[1]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
#### User / Group Identifiers:
|
||||
When using data volumes (-v flags) permissions issues can arise between the host OS and the container. To avoid this
|
||||
issue you can specify the user PUID and group PGID. Ensure the data volume directory on the host is owned by the same
|
||||
user you specify.
|
||||
|
||||
In this instance `PUID=1001` and `PGID=1001`. To find yours use id user as below:
|
||||
|
||||
```bash
|
||||
$ id dockeruser
|
||||
uid=1001(dockeruser) gid=1001(dockergroup) groups=1001(dockergroup)
|
||||
```
|
||||
|
||||
If you want to change the PUID or PGID after the image has been built, it will require rebuilding the image.
|
||||
|
||||
## Supported Architectures
|
||||
|
||||
Specifying `lizardbyte/sunshine:latest-<SUNSHINE_OS>` or `ghcr.io/lizardbyte/sunshine:latest-<SUNSHINE_OS>` should
|
||||
retrieve the correct image for your architecture.
|
||||
|
||||
The architectures supported by these images are:
|
||||
|
||||
| Architecture | Available |
|
||||
|:---------------:|:---------:|
|
||||
| amd64 / x86_64 | ✅ |
|
||||
| arm64 / aarch64 | ✅ |
|
||||
144
FindFFmpeg.cmake
144
FindFFmpeg.cmake
@@ -1,144 +0,0 @@
|
||||
# - Try to find FFMPEG
|
||||
# Once done this will define
|
||||
# FFMPEG_FOUND - System has FFMPEG
|
||||
# FFMPEG_INCLUDE_DIRS - The FFMPEG include directories
|
||||
# FFMPEG_LIBRARIES - The libraries needed to use FFMPEG
|
||||
# FFMPEG_LIBRARY_DIRS - The directory to find FFMPEG libraries
|
||||
#
|
||||
# written by Roy Shilkrot 2013 http://www.morethantechnical.com/
|
||||
#
|
||||
|
||||
find_package(PkgConfig)
|
||||
|
||||
|
||||
MACRO(FFMPEG_FIND varname shortname headername)
|
||||
|
||||
IF(NOT WIN32)
|
||||
PKG_CHECK_MODULES(PC_${varname} ${shortname})
|
||||
|
||||
FIND_PATH(${varname}_INCLUDE_DIR "${shortname}/${headername}"
|
||||
HINTS ${PC_${varname}_INCLUDEDIR} ${PC_${varname}_INCLUDE_DIRS}
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
ELSE()
|
||||
FIND_PATH(${varname}_INCLUDE_DIR "${shortname}/${headername}")
|
||||
ENDIF()
|
||||
|
||||
IF(${varname}_INCLUDE_DIR STREQUAL "${varname}_INCLUDE_DIR-NOTFOUND")
|
||||
message(STATUS "look for newer strcture")
|
||||
IF(NOT WIN32)
|
||||
PKG_CHECK_MODULES(PC_${varname} "lib${shortname}")
|
||||
|
||||
FIND_PATH(${varname}_INCLUDE_DIR "lib${shortname}/${headername}"
|
||||
HINTS ${PC_${varname}_INCLUDEDIR} ${PC_${varname}_INCLUDE_DIRS}
|
||||
NO_DEFAULT_PATH
|
||||
)
|
||||
ELSE()
|
||||
FIND_PATH(${varname}_INCLUDE_DIR "lib${shortname}/${headername}")
|
||||
IF(${${varname}_INCLUDE_DIR} STREQUAL "${varname}_INCLUDE_DIR-NOTFOUND")
|
||||
#Desperate times call for desperate measures
|
||||
MESSAGE(STATUS "globbing...")
|
||||
FILE(GLOB_RECURSE ${varname}_INCLUDE_DIR "/ffmpeg*/${headername}")
|
||||
MESSAGE(STATUS "found: ${${varname}_INCLUDE_DIR}")
|
||||
IF(${varname}_INCLUDE_DIR)
|
||||
GET_FILENAME_COMPONENT(${varname}_INCLUDE_DIR "${${varname}_INCLUDE_DIR}" PATH)
|
||||
GET_FILENAME_COMPONENT(${varname}_INCLUDE_DIR "${${varname}_INCLUDE_DIR}" PATH)
|
||||
ELSE()
|
||||
SET(${varname}_INCLUDE_DIR "${varname}_INCLUDE_DIR-NOTFOUND")
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
ENDIF()
|
||||
|
||||
|
||||
IF(${${varname}_INCLUDE_DIR} STREQUAL "${varname}_INCLUDE_DIR-NOTFOUND")
|
||||
MESSAGE(STATUS "Can't find includes for ${shortname}...")
|
||||
ELSE()
|
||||
MESSAGE(STATUS "Found ${shortname} include dirs: ${${varname}_INCLUDE_DIR}")
|
||||
|
||||
#GET_DIRECTORY_PROPERTY(FFMPEG_PARENT DIRECTORY ${${varname}_INCLUDE_DIR} PARENT_DIRECTORY)
|
||||
GET_FILENAME_COMPONENT(FFMPEG_PARENT ${${varname}_INCLUDE_DIR} PATH)
|
||||
MESSAGE(STATUS "Using FFMpeg dir parent as hint: ${FFMPEG_PARENT}")
|
||||
|
||||
IF(NOT WIN32)
|
||||
FIND_LIBRARY(${varname}_LIBRARIES NAMES ${shortname}
|
||||
HINTS ${PC_${varname}_LIBDIR} ${PC_${varname}_LIBRARY_DIR} ${FFMPEG_PARENT})
|
||||
ELSE()
|
||||
FIND_PATH(${varname}_LIBRARIES "${shortname}.dll.a" HINTS ${FFMPEG_PARENT})
|
||||
# FILE(GLOB_RECURSE ${varname}_LIBRARIES "${FFMPEG_PARENT}/*${shortname}.lib")
|
||||
# GLOBing is very bad... but windows sux, this is the only thing that works
|
||||
ENDIF()
|
||||
|
||||
IF(${varname}_LIBRARIES STREQUAL "${varname}_LIBRARIES-NOTFOUND")
|
||||
MESSAGE(STATUS "look for newer structure for library")
|
||||
FIND_LIBRARY(${varname}_LIBRARIES NAMES lib${shortname}
|
||||
HINTS ${PC_${varname}_LIBDIR} ${PC_${varname}_LIBRARY_DIR} ${FFMPEG_PARENT})
|
||||
ENDIF()
|
||||
|
||||
|
||||
IF(${varname}_LIBRARIES STREQUAL "${varname}_LIBRARIES-NOTFOUND")
|
||||
MESSAGE(STATUS "Can't find lib for ${shortname}...")
|
||||
ELSE()
|
||||
MESSAGE(STATUS "Found ${shortname} libs: ${${varname}_LIBRARIES}")
|
||||
ENDIF()
|
||||
|
||||
|
||||
IF(NOT ${varname}_INCLUDE_DIR STREQUAL "${varname}_INCLUDE_DIR-NOTFOUND"
|
||||
AND NOT ${varname}_LIBRARIES STREQUAL ${varname}_LIBRARIES-NOTFOUND)
|
||||
|
||||
MESSAGE(STATUS "found ${shortname}: include ${${varname}_INCLUDE_DIR} lib ${${varname}_LIBRARIES}")
|
||||
SET(FFMPEG_${varname}_FOUND 1)
|
||||
SET(FFMPEG_${varname}_INCLUDE_DIRS ${${varname}_INCLUDE_DIR})
|
||||
SET(FFMPEG_${varname}_LIBS ${${varname}_LIBRARIES})
|
||||
ELSE()
|
||||
MESSAGE(STATUS "Can't find ${shortname}")
|
||||
ENDIF()
|
||||
|
||||
ENDIF()
|
||||
|
||||
ENDMACRO(FFMPEG_FIND)
|
||||
|
||||
FFMPEG_FIND(LIBAVFORMAT avformat avformat.h)
|
||||
FFMPEG_FIND(LIBAVDEVICE avdevice avdevice.h)
|
||||
FFMPEG_FIND(LIBAVCODEC avcodec avcodec.h)
|
||||
FFMPEG_FIND(LIBAVUTIL avutil avutil.h)
|
||||
FFMPEG_FIND(LIBSWSCALE swscale swscale.h)
|
||||
|
||||
SET(FFMPEG_FOUND "NO")
|
||||
IF (FFMPEG_LIBAVFORMAT_FOUND AND
|
||||
FFMPEG_LIBAVDEVICE_FOUND AND
|
||||
FFMPEG_LIBAVCODEC_FOUND AND
|
||||
FFMPEG_LIBAVUTIL_FOUND AND
|
||||
FFMPEG_LIBSWSCALE_FOUND
|
||||
)
|
||||
|
||||
|
||||
SET(FFMPEG_FOUND "YES")
|
||||
|
||||
SET(FFMPEG_INCLUDE_DIRS ${FFMPEG_LIBAVFORMAT_INCLUDE_DIRS})
|
||||
|
||||
SET(FFMPEG_LIBRARY_DIRS ${FFMPEG_LIBAVFORMAT_LIBRARY_DIRS})
|
||||
|
||||
SET(FFMPEG_LIBRARIES
|
||||
${FFMPEG_LIBAVFORMAT_LIBS}
|
||||
${FFMPEG_LIBAVDEVICE_LIBS}
|
||||
${FFMPEG_LIBAVCODEC_LIBS}
|
||||
${FFMPEG_LIBAVUTIL_LIBS}
|
||||
${FFMPEG_LIBSWSCALE_LIBS}
|
||||
)
|
||||
|
||||
ELSE ()
|
||||
|
||||
MESSAGE(STATUS "Could not find FFMPEG")
|
||||
|
||||
ENDIF()
|
||||
|
||||
message(STATUS ${FFMPEG_LIBRARIES} ${FFMPEG_LIBAVFORMAT_LIBRARIES})
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
# handle the QUIETLY and REQUIRED arguments and set FFMPEG_FOUND to TRUE
|
||||
# if all listed variables are TRUE
|
||||
find_package_handle_standard_args(FFMPEG DEFAULT_MSG
|
||||
FFMPEG_LIBRARIES FFMPEG_INCLUDE_DIRS)
|
||||
|
||||
mark_as_advanced(FFMPEG_INCLUDE_DIRS FFMPEG_LIBRARY_DIRS FFMPEG_LIBRARIES)
|
||||
3
NOTICE
Normal file
3
NOTICE
Normal file
@@ -0,0 +1,3 @@
|
||||
©2018 Valve Corporation. Steam and the Steam logo are trademarks and/or
|
||||
registered trademarks of Valve Corporation in the U.S. and/or other countries. All
|
||||
rights reserved.
|
||||
185
README.md
185
README.md
@@ -1,185 +0,0 @@
|
||||
# Introduction
|
||||
Sunshine is a Gamestream host for Moonlight
|
||||
|
||||
- [Building](README.md#building)
|
||||
- [Credits](README.md#credits)
|
||||
|
||||
# Building
|
||||
- [Linux](README.md#linux)
|
||||
- [Windows](README.md#windows-10)
|
||||
|
||||
## Linux
|
||||
|
||||
### Requirements:
|
||||
Ubuntu 20.04:
|
||||
|
||||
sudo apt install cmake libssl-dev libavdevice-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libpulse-dev libopus-dev libxtst-dev libx11-dev libxfixes-dev libevdev-dev libxcb1-dev libxcb-shm0-dev libxcb-xfixes0-dev
|
||||
|
||||
### Compilation:
|
||||
- `git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules`
|
||||
- `cd sunshine && mkdir build && cd build`
|
||||
- `cmake ..`
|
||||
- `make`: It is suggested to use the `-j C#` flags with this command, `C#` being the number of cores your PC has
|
||||
|
||||
|
||||
### Setup:
|
||||
sunshine needs access to uinput to create mouse and gamepad events:
|
||||
- Add user to group 'input': "usermod -a -G input username
|
||||
- Create a file: "/etc/udev/rules.d/85-sunshine-input.rules"
|
||||
- The contents of the file is as follows:
|
||||
KERNEL=="uinput", GROUP="input", mode="0660"
|
||||
- assets/sunshine.conf is an example configuration file. Modify it as you see fit and use it by running: "sunshine path/to/sunshine.conf"
|
||||
- path/to/build/dir/sunshine.service is used to start sunshine in the background:
|
||||
- `cp sunshine.service $HOME/.config/systemd/user/`
|
||||
- Modify $HOME/.config/systemd/user/sunshine.conf to point to the sunshine executable
|
||||
- `systemctl --user start sunshine`
|
||||
|
||||
- assets/apps.json is an [example](README.md#application-list) of a list of applications that are started just before running a stream
|
||||
|
||||
### Trouleshooting:
|
||||
* If you get "Could not create Sunshine Gamepad: Permission Denied", ensure you are part of the group "input":
|
||||
* groups
|
||||
* If Sunshine sends audio from the microphone instead of the speaker, try the following steps:
|
||||
* pacmd list-sources | grep "name:"
|
||||
* Copy the name to the configuration option "audio_sink"
|
||||
* restart sunshine
|
||||
|
||||
|
||||
|
||||
## Windows 10
|
||||
|
||||
### Requirements:
|
||||
|
||||
MSYS2 : mingw-w64-x86_64-openssl mingw-w64-x86_64-cmake mingw-w64-x86_64-toolchain mingw-w64-x86_64-ffmpeg mingw-w64-x86_64-boost
|
||||
|
||||
### Compilation:
|
||||
- `git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules`
|
||||
- `cd sunshine && mkdir build && cd build`
|
||||
- `cmake -G"Unix Makefiles" ..`
|
||||
- `make`
|
||||
|
||||
### Setup:
|
||||
- **OPTIONAL** Gamepad support: Download and run 'ViGEmBus_Setup_1.16.116.exe' from [https://github.com/ViGEm/ViGEmBus/releases]
|
||||
|
||||
### Static build
|
||||
#### Requirements:
|
||||
MSYS2 : mingw-w64-x86_64-openssl mingw-w64-x86_64-cmake mingw-w64-x86_64-toolchain mingw-w64-x86_64-ffmpeg mingw-w64-x86_64-boost git-lfs
|
||||
|
||||
#### Compilation:
|
||||
- `git lfs install`
|
||||
- `git clone https://github.com/loki-47-6F-64/sunshine.git --recurse-submodules`
|
||||
- `cd sunshine && mkdir build && cd build`
|
||||
- `cmake -DSUNSHINE_STANDALONE=ON -G"Unix Makefiles" ..`
|
||||
- `make`
|
||||
|
||||
|
||||
|
||||
# Common
|
||||
|
||||
## Usage:
|
||||
- run "sunshine path/to/sunshine.conf"
|
||||
- In Moonlight: Add PC manually
|
||||
- When Moonlight request you insert the correct pin on sunshine, either:
|
||||
- Type in the URL bar of your browser: `xxx.xxx.xxx.xxx:47989/pin/####`
|
||||
- `wget xxx.xxx.xxx.xxx:47989/pin/####`
|
||||
- The x's are the IP of your instance, `####` is the pin
|
||||
- Click on one of the Applications listed
|
||||
- Have fun :)
|
||||
|
||||
|
||||
## Note:
|
||||
- The Windows key is not passed through by Moonlight, therefore Sunshine maps Right-Alt key to the Windows key
|
||||
- If you set Video Bitrate to 0.5Mb/s:
|
||||
- Sunshine will use CRF or QP to controll the quality of the stream. (See example configuration file for more details)
|
||||
- This is less CPU intensive and it has lower average bandwith requirements compared to manually setting bitrate to acceptable quality
|
||||
- However, it has higher peak bitrates, forcing Sunshine to drop entire frames when streaming 1080P due to their size.
|
||||
- When this happens, the video portion of the stream appears to be frozen.
|
||||
- This is rare enough that using this for the desktop environment is tolerable (in my opinion), however for gaming not so much.
|
||||
|
||||
|
||||
## Credits:
|
||||
- [Simple-Web-Server](https://gitlab.com/eidheim/Simple-Web-Server)
|
||||
- [Moonlight](https://github.com/moonlight-stream)
|
||||
- [Looking-Glass](https://github.com/gnif/LookingGlass) (For showing me how to properly capture frames on Windows, saving me a lot of time :)
|
||||
|
||||
## Application List:
|
||||
- You can use Environment variables in place of values
|
||||
- $(HOME) will be replaced by the value of $HOME
|
||||
- $$ will be replaced by $ --> $$(HOME) will be replaced by $(HOME)
|
||||
- env: Adds or overwrites Environment variables for the commands/applications run by Sunshine.
|
||||
- "Variable name":"Variable value"
|
||||
- apps: The list of applications
|
||||
- Example:
|
||||
```json
|
||||
{
|
||||
"name":"An App",
|
||||
"cmd":"command to open app",
|
||||
"prep-cmd":[
|
||||
{
|
||||
"do":"somecommand",
|
||||
"undo":"undothatcommand"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
- name: Self explanatory
|
||||
- output <optional>: The file where the output of the command is stored
|
||||
- If it is not specified, the output is ignored
|
||||
- prep-cmd: A list of commands to be run before/after the application
|
||||
- If any of the prep-commands fail, starting the application is aborted
|
||||
- do: Run before the application
|
||||
- If it fails, all 'undo' commands of the previously succeeded 'do' commands are run
|
||||
- undo <optional>: Run after the application has terminated
|
||||
- This should not fail considering it is supposed to undo the 'do' commands.
|
||||
- If it fails, Sunshine is terminated
|
||||
- cmd <optional>: The main application
|
||||
- If not specified, a processs is started that sleeps indefinitely
|
||||
|
||||
1. When an application is started, if there is an application already running, it will be terminated.
|
||||
2. When the application has been shutdown, the stream shuts down as well.
|
||||
3. In addition to the apps listed, one app "Desktop" is hardcoded into Sunshine. It does not start an application, instead it simply starts a stream.
|
||||
|
||||
Linux
|
||||
```json
|
||||
{
|
||||
"env":{
|
||||
"DISPLAY":":0",
|
||||
"DRI_PRIME":"1",
|
||||
"XAUTHORITY":"$(HOME)/.Xauthority",
|
||||
"PATH":"$(PATH):$(HOME)/.local/bin"
|
||||
},
|
||||
"apps":[
|
||||
{
|
||||
"name":"Low Res Desktop",
|
||||
"prep-cmd":[
|
||||
{ "do":"xrandr --output HDMI-1 --mode 1920x1080", "undo":"xrandr --output HDMI-1 --mode 1920x1200" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name":"Steam BigPicture",
|
||||
|
||||
"output":"steam.txt",
|
||||
"cmd":"steam -bigpicture",
|
||||
"prep-cmd":[]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
Windows
|
||||
```json
|
||||
{
|
||||
"env":{
|
||||
"PATH":"$(PATH);C:\\Program Files (x86)\\Steam"
|
||||
},
|
||||
"apps":[
|
||||
{
|
||||
"name":"Steam BigPicture",
|
||||
|
||||
"output":"steam.txt",
|
||||
"prep-cmd":[
|
||||
{"do":"steam \"steam://open/bigpicture\""}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
132
README.rst
Normal file
132
README.rst
Normal file
@@ -0,0 +1,132 @@
|
||||
Overview
|
||||
========
|
||||
LizardByte has the full documentation hosted on `Read the Docs <https://sunshinestream.readthedocs.io/>`_.
|
||||
|
||||
About
|
||||
-----
|
||||
Sunshine is a self-hosted game stream host for Moonlight.
|
||||
Offering low latency, cloud gaming server capabilities with support for AMD, Intel, and Nvidia GPUs for hardware
|
||||
encoding. Software encoding is also available. You can connect to Sunshine from any Moonlight client on a variety of
|
||||
devices. A web UI is provided to allow configuration, and client pairing, from your favorite web browser. Pair from
|
||||
the local server or any mobile device.
|
||||
|
||||
System Requirements
|
||||
-------------------
|
||||
|
||||
.. warning:: This table is a work in progress. Do not purchase hardware based on this.
|
||||
|
||||
**Minimum Requirements**
|
||||
|
||||
+------------+------------------------------------------------------------+
|
||||
| GPU | AMD: VCE 1.0 or higher, see `obs-amd hardware support`_ |
|
||||
| +------------------------------------------------------------+
|
||||
| | Intel: VAAPI-compatible, see: `VAAPI hardware support`_ |
|
||||
| +------------------------------------------------------------+
|
||||
| | Nvidia: NVENC enabled cards, see `nvenc support matrix`_ |
|
||||
+------------+------------------------------------------------------------+
|
||||
| CPU | AMD: Ryzen 3 or higher |
|
||||
| +------------------------------------------------------------+
|
||||
| | Intel: Core i3 or higher |
|
||||
+------------+------------------------------------------------------------+
|
||||
| RAM | 4GB or more |
|
||||
+------------+------------------------------------------------------------+
|
||||
| OS | Windows: 10+ (Windows Server not supported) |
|
||||
| +------------------------------------------------------------+
|
||||
| | macOS: 11.7+ |
|
||||
| +------------------------------------------------------------+
|
||||
| | Linux/Debian: 11 (bullseye) |
|
||||
| +------------------------------------------------------------+
|
||||
| | Linux/Fedora: 36+ |
|
||||
| +------------------------------------------------------------+
|
||||
| | Linux/Ubuntu: 20.04+ (focal) |
|
||||
+------------+------------------------------------------------------------+
|
||||
| Network | Host: 5GHz, 802.11ac |
|
||||
| +------------------------------------------------------------+
|
||||
| | Client: 5GHz, 802.11ac |
|
||||
+------------+------------------------------------------------------------+
|
||||
|
||||
**4k Suggestions**
|
||||
|
||||
+------------+------------------------------------------------------------+
|
||||
| GPU | AMD: Video Coding Engine 3.1 or higher |
|
||||
| +------------------------------------------------------------+
|
||||
| | Intel: HD Graphics 510 or higher |
|
||||
| +------------------------------------------------------------+
|
||||
| | Nvidia: GeForce GTX 1080 or higher |
|
||||
+------------+------------------------------------------------------------+
|
||||
| CPU | AMD: Ryzen 5 or higher |
|
||||
| +------------------------------------------------------------+
|
||||
| | Intel: Core i5 or higher |
|
||||
+------------+------------------------------------------------------------+
|
||||
| Network | Host: CAT5e ethernet or better |
|
||||
| +------------------------------------------------------------+
|
||||
| | Client: CAT5e ethernet or better |
|
||||
+------------+------------------------------------------------------------+
|
||||
|
||||
**HDR Suggestions**
|
||||
|
||||
+------------+------------------------------------------------------------+
|
||||
| GPU | AMD: Video Coding Engine 3.4 or higher |
|
||||
| +------------------------------------------------------------+
|
||||
| | Intel: UHD Graphics 730 or higher |
|
||||
| +------------------------------------------------------------+
|
||||
| | Nvidia: Pascal-based GPU (GTX 10-series) or higher |
|
||||
+------------+------------------------------------------------------------+
|
||||
| CPU | AMD: todo |
|
||||
| +------------------------------------------------------------+
|
||||
| | Intel: todo |
|
||||
+------------+------------------------------------------------------------+
|
||||
| Network | Host: CAT5e ethernet or better |
|
||||
| +------------------------------------------------------------+
|
||||
| | Client: CAT5e ethernet or better |
|
||||
+------------+------------------------------------------------------------+
|
||||
|
||||
Integrations
|
||||
------------
|
||||
|
||||
.. image:: https://img.shields.io/github/actions/workflow/status/lizardbyte/sunshine/CI.yml.svg?branch=master&label=CI%20build&logo=github&style=for-the-badge
|
||||
:alt: GitHub Workflow Status (CI)
|
||||
:target: https://github.com/LizardByte/Sunshine/actions/workflows/CI.yml?query=branch%3Amaster
|
||||
|
||||
.. image:: https://img.shields.io/github/actions/workflow/status/lizardbyte/sunshine/localize.yml.svg?branch=nightly&label=localize%20build&logo=github&style=for-the-badge
|
||||
:alt: GitHub Workflow Status (localize)
|
||||
:target: https://github.com/LizardByte/Sunshine/actions/workflows/localize.yml?query=branch%3Anightly
|
||||
|
||||
.. image:: https://img.shields.io/readthedocs/sunshinestream?label=Docs&style=for-the-badge&logo=readthedocs
|
||||
:alt: Read the Docs
|
||||
:target: http://sunshinestream.readthedocs.io/
|
||||
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=localized&style=for-the-badge&query=%24.progress..data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json&logo=crowdin
|
||||
:alt: CrowdIn
|
||||
:target: https://crowdin.com/project/sunshinestream
|
||||
|
||||
Support
|
||||
-------
|
||||
|
||||
Our support methods are listed in our
|
||||
`LizardByte Docs <https://lizardbyte.readthedocs.io/en/latest/about/support.html>`_.
|
||||
|
||||
Downloads
|
||||
---------
|
||||
|
||||
.. image:: https://img.shields.io/github/downloads/lizardbyte/sunshine/total?style=for-the-badge&logo=github
|
||||
:alt: GitHub Releases
|
||||
:target: https://github.com/LizardByte/Sunshine/releases/latest
|
||||
|
||||
.. image:: https://img.shields.io/docker/pulls/lizardbyte/sunshine?style=for-the-badge&logo=docker
|
||||
:alt: Docker
|
||||
:target: https://hub.docker.com/r/lizardbyte/sunshine
|
||||
|
||||
Stats
|
||||
------
|
||||
.. image:: https://img.shields.io/github/stars/lizardbyte/sunshine?logo=github&style=for-the-badge
|
||||
:alt: GitHub stars
|
||||
:target: https://github.com/LizardByte/Sunshine
|
||||
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=AUR&style=for-the-badge&query=$.results.0.NumVotes&url=https%3A%2F%2Fapp.lizardbyte.dev%2Funo%2Faur%2Fsunshine.json&logo=archlinux
|
||||
:alt: AUR votes
|
||||
:target: https://aur.archlinux.org/packages/sunshine
|
||||
|
||||
.. _nvenc support matrix: https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new
|
||||
.. _obs-amd hardware support: https://github.com/obsproject/obs-amd-encoder/wiki/Hardware-Support
|
||||
.. _VAAPI hardware support: https://www.intel.com/content/www/us/en/developer/articles/technical/linuxmedia-vaapi.html
|
||||
Submodule Simple-Web-Server deleted from f37a41d48b
Submodule ViGEmClient deleted from 52682b59c4
38
appveyor.yml
38
appveyor.yml
@@ -1,38 +0,0 @@
|
||||
image:
|
||||
- Ubuntu
|
||||
- Visual Studio 2019
|
||||
|
||||
environment:
|
||||
matrix:
|
||||
- BUILD_TYPE: Debug
|
||||
- BUILD_TYPE: Release
|
||||
|
||||
install:
|
||||
- sh: sudo apt update
|
||||
- sh: sudo apt install -y build-essential cmake libssl-dev libavdevice-dev libboost-thread-dev libboost-filesystem-dev libboost-log-dev libpulse-dev libopus-dev libxtst-dev libx11-dev libxfixes-dev libevdev-dev libxcb1-dev libxcb-shm0-dev libxcb-xfixes0-dev
|
||||
- cmd: C:\msys64\usr\bin\bash -lc "pacman --needed --noconfirm -S mingw-w64-x86_64-openssl mingw-w64-x86_64-cmake mingw-w64-x86_64-toolchain mingw-w64-x86_64-opus mingw-w64-x86_64-x265 mingw-w64-x86_64-boost git yasm nasm diffutils make"
|
||||
|
||||
before_build:
|
||||
- git submodule update --init --recursive
|
||||
- mkdir build
|
||||
- cd build
|
||||
|
||||
build_script:
|
||||
- cmd: set OLDPATH=%PATH%
|
||||
- cmd: set PATH=C:\msys64\mingw64\bin
|
||||
- sh: cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DSUNSHINE_EXECUTABLE_PATH=sunshine -DSUNSHINE_ASSETS_DIR=/etc/sunshine ..
|
||||
- cmd: cmake -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DSUNSHINE_STANDALONE=ON -DSUNSHINE_ASSETS_DIR=assets -G "MinGW Makefiles" ..
|
||||
- sh: make -j$(nproc)
|
||||
- cmd: mingw32-make -j2
|
||||
- cmd: set PATH=%OLDPATH%
|
||||
|
||||
after_build:
|
||||
- sh: ./gen-deb
|
||||
- cmd: Del ..\assets\apps_linux.json
|
||||
- cmd: 7z a Sunshine-Windows.zip ..\assets
|
||||
- cmd: 7z a Sunshine-Windows.zip sunshine.exe
|
||||
- cmd: 7z a Sunshine-Windows.zip tools\dxgi-info.exe
|
||||
- cmd: 7z a Sunshine-Windows.zip tools\audio-info.exe
|
||||
- cmd: appveyor PushArtifact Sunshine-Windows.zip
|
||||
- sh: appveyor PushArtifact package-deb/sunshine.deb
|
||||
- sh: appveyor PushArtifact sunshine.service
|
||||
@@ -1,19 +0,0 @@
|
||||
{
|
||||
"env":{
|
||||
"PATH":"$(PATH):$(HOME)/.local/bin"
|
||||
},
|
||||
"apps":[
|
||||
{
|
||||
"name":"Low Res Desktop",
|
||||
"prep-cmd":[
|
||||
{ "do":"xrandr --output HDMI-1 --mode 1920x1080", "undo":"xrandr --output HDMI-1 --mode 1920x1200" }
|
||||
]
|
||||
},
|
||||
{
|
||||
"name":"Steam BigPicture",
|
||||
|
||||
"output":"steam.txt",
|
||||
"cmd":"steam -bigpicture"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
{
|
||||
"env":{
|
||||
"PATH":"$(PATH);C:\\Program Files (x86)\\Steam"
|
||||
},
|
||||
"apps":[
|
||||
{
|
||||
"name":"Steam BigPicture",
|
||||
|
||||
"output":"steam.txt",
|
||||
"prep-cmd":[
|
||||
{"do":"steam \"steam://open/bigpicture\""}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -1,182 +0,0 @@
|
||||
# If no external IP address is given, the local IP address is used
|
||||
# external_ip = 123.456.789.12
|
||||
|
||||
# The private key must be 2048 bits
|
||||
# pkey = /dir/pkey.pem
|
||||
|
||||
# The certificate must be signed with a 2048 bit key
|
||||
# cert = /dir/cert.pem
|
||||
|
||||
# The name displayed by Moonlight
|
||||
# If not specified, the PC's hostname is used
|
||||
# sunshine_name = Sunshine
|
||||
|
||||
# The minimum log level printed to standard out
|
||||
#
|
||||
# none -> no logs are printed to standard out
|
||||
#
|
||||
# verbose = [0]
|
||||
# debug = [1]
|
||||
# info = [2]
|
||||
# warning = [3]
|
||||
# error = [4]
|
||||
# fatal = [5]
|
||||
# none = [6]
|
||||
#
|
||||
# min_log_level = info
|
||||
|
||||
# The origin of the remote endpoint address that is not denied for HTTP method /pin
|
||||
# Could be any of the following values:
|
||||
# pc|lan|wan
|
||||
# pc: Only localhost may access /pin
|
||||
# lan: Only those in LAN may access /pin
|
||||
# wan: Anyone may access /pin
|
||||
#
|
||||
# origin_pin_allowed = lan
|
||||
|
||||
# The file where current state of Sunshine is stored
|
||||
# file_state = sunshine_state.json
|
||||
|
||||
# How long to wait in milliseconds for data from moonlight before shutting down the stream
|
||||
# ping_timeout = 2000
|
||||
|
||||
# The file where configuration for the different applications that Sunshine can run during a stream
|
||||
# file_apps = apps.json
|
||||
|
||||
# How much error correcting packets must be send for every video
|
||||
# This is just some random number, don't know the optimal value
|
||||
# The higher fec_percentage, the lower space for the actual data to send per frame there is
|
||||
#
|
||||
# The value must be greater than 0 and lower than or equal to 100
|
||||
# fec_percentage = 10
|
||||
|
||||
# When multicasting, it could be usefull to have different configurations for each connected Client.
|
||||
# For example:
|
||||
# Clients connected through WAN and LAN have different bitrate contstraints.
|
||||
# Decoders may require different settings for color
|
||||
#
|
||||
# Unlike simply broadcasting to multiple Client, this will generate distinct video streams.
|
||||
# Note, CPU usage increases for each distinct video stream generated
|
||||
# channels = 1
|
||||
|
||||
# The back/select button on the controller
|
||||
# On the Shield, the home and powerbutton are not passed to Moonlight
|
||||
# If, after the timeout, the back button is still pressed down, Home/Guide button press is emulated.
|
||||
# If back_button_timeout < 0, then the Home/Guide button will not be emulated
|
||||
# back_button_timeout = 2000
|
||||
|
||||
# !! Windows only !!
|
||||
# Control how fast keys will repeat themselves
|
||||
# The initial delay in milliseconds before repeating keys
|
||||
# key_repeat_delay = 500
|
||||
#
|
||||
# How often keys repeat every second
|
||||
# This configurable option supports decimals
|
||||
# key_repeat_frequency = 24.9
|
||||
|
||||
# The name of the audio sink used for Audio Loopback
|
||||
# If you do not specify this variable, pulseaudio will select the default monitor device.
|
||||
#
|
||||
# You can find the name of the audio sink using the following command:
|
||||
# !! Linux only !!
|
||||
# pacmd list-sources | grep "name:"
|
||||
# audio_sink = alsa_output.pci-0000_09_00.3.analog-stereo.monitor
|
||||
#
|
||||
# !! Windows only !!
|
||||
# tools\audio-info.exe
|
||||
# audio_sink = {0.0.0.00000000}.{FD47D9CC-4218-4135-9CE2-0C195C87405B}
|
||||
|
||||
# !! Windows only !!
|
||||
# You can select the video card you want to stream:
|
||||
# The appropriate values can be found using the following command:
|
||||
# tools\dxgi-info.exe
|
||||
# adapter_name = Radeon RX 580 Series
|
||||
# output_name = \\.\DISPLAY1
|
||||
|
||||
|
||||
###############################################
|
||||
# FFmpeg software encoding parameters
|
||||
# Honestly, I have no idea what the optimal values would be.
|
||||
# Play around with this :)
|
||||
|
||||
# Constant Rate Factor. Between 1 and 52. It allows QP to go up during motion and down with still image, resulting in constant perceived quality
|
||||
# Higher value means more compression, but less quality
|
||||
# If crf == 0, then use QP directly instead
|
||||
# crf = 0
|
||||
|
||||
# Quantitization Parameter
|
||||
# Higher value means more compression, but less quality
|
||||
# If crf != 0, then this parameter is ignored
|
||||
# qp = 28
|
||||
|
||||
# Minimum number of threads used by ffmpeg to encode the video.
|
||||
# Increasing the value slightly reduces encoding efficiency, but the tradeoff is usually
|
||||
# worth it to gain the use of more CPU cores for encoding. The ideal value is the lowest
|
||||
# value that can reliably encode at your desired streaming settings on your hardware.
|
||||
# min_threads = 1
|
||||
|
||||
# Allows the client to request HEVC Main or HEVC Main10 video streams.
|
||||
# HEVC is more CPU-intensive to encode, so enabling this may reduce performance when using software encoding.
|
||||
# If set to 0 (default), Sunshine will specify support for HEVC based on encoder
|
||||
# If set to 1, Sunshine will not advertise support for HEVC
|
||||
# If set to 2, Sunshine will advertise support for HEVC Main profile
|
||||
# If set to 3, Sunshine will advertise support for HEVC Main and Main10 (HDR) profiles
|
||||
# hevc_mode = 0
|
||||
|
||||
# Force a specific encoder, otherwise Sunshine will use the first encoder that is available
|
||||
# supported encoders:
|
||||
# nvenc
|
||||
# software
|
||||
#
|
||||
# encoder = nvenc
|
||||
##################################### Software #####################################
|
||||
# See x264 --fullhelp for the different presets
|
||||
# sw_preset = superfast
|
||||
# sw_tune = zerolatency
|
||||
#
|
||||
|
||||
##################################### NVENC #####################################
|
||||
###### presets ###########
|
||||
# default
|
||||
# hp -- high performance
|
||||
# hq -- high quality
|
||||
# slow -- hq 2 passes
|
||||
# medium -- hq 1 pass
|
||||
# fast -- hp 1 pass
|
||||
# bd
|
||||
# ll -- low latency
|
||||
# llhq
|
||||
# llhp
|
||||
# lossless
|
||||
# losslesshp
|
||||
##########################
|
||||
# nv_preset = llhq
|
||||
#
|
||||
####### rate control #####
|
||||
# auto -- let ffmpeg decide rate control
|
||||
# constqp -- constant QP mode
|
||||
# vbr -- variable bitrate
|
||||
# cbr -- constant bitrate
|
||||
# cbr_hq -- cbr high quality
|
||||
# cbr_ld_hq -- cbr low delay high quality
|
||||
# vbr_hq -- vbr high quality
|
||||
##########################
|
||||
# nv_rc = auto
|
||||
|
||||
###### h264 entropy ######
|
||||
# auto -- let ffmpeg nvenc decide the entropy encoding
|
||||
# cabac
|
||||
# cavlc
|
||||
##########################
|
||||
# nv_coder = auto
|
||||
|
||||
|
||||
##############################################
|
||||
# Some configurable parameters, are merely toggles for specific features
|
||||
# The first occurrence turns it on, the second occurence turns it off, the third occurence turns it on again, etc, etc
|
||||
# Here, you change the default state of any flag
|
||||
#
|
||||
# To set the initial state of flags -0 and -1 to on, set the following flags:
|
||||
# flags = 01
|
||||
#
|
||||
# See: sunshine --help for all options under the header: flags
|
||||
21
cmake/FindLIBCAP.cmake
Normal file
21
cmake/FindLIBCAP.cmake
Normal file
@@ -0,0 +1,21 @@
|
||||
# - Try to find Libcap
|
||||
# Once done this will define
|
||||
#
|
||||
# LIBCAP_FOUND - system has Libcap
|
||||
# LIBCAP_INCLUDE_DIRS - the Libcap include directory
|
||||
# LIBCAP_LIBRARIES - the libraries needed to use Libcap
|
||||
# LIBCAP_DEFINITIONS - Compiler switches required for using Libcap
|
||||
|
||||
# Use pkg-config to get the directories and then use these values
|
||||
# in the find_path() and find_library() calls
|
||||
find_package(PkgConfig)
|
||||
pkg_check_modules(PC_LIBCAP libcap)
|
||||
|
||||
set(LIBCAP_DEFINITIONS ${PC_LIBCAP_CFLAGS})
|
||||
|
||||
find_path(LIBCAP_INCLUDE_DIRS sys/capability.h PATHS ${PC_LIBCAP_INCLUDEDIR} ${PC_LIBCAP_INCLUDE_DIRS})
|
||||
find_library(LIBCAP_LIBRARIES NAMES libcap.so PATHS ${PC_LIBCAP_LIBDIR} ${PC_LIBCAP_LIBRARY_DIRS})
|
||||
mark_as_advanced(LIBCAP_INCLUDE_DIRS LIBCAP_LIBRARIES)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LIBCAP REQUIRED_VARS LIBCAP_LIBRARIES LIBCAP_INCLUDE_DIRS)
|
||||
21
cmake/FindLIBDRM.cmake
Normal file
21
cmake/FindLIBDRM.cmake
Normal file
@@ -0,0 +1,21 @@
|
||||
# - Try to find Libdrm
|
||||
# Once done this will define
|
||||
#
|
||||
# LIBDRM_FOUND - system has Libdrm
|
||||
# LIBDRM_INCLUDE_DIRS - the Libdrm include directory
|
||||
# LIBDRM_LIBRARIES - the libraries needed to use Libdrm
|
||||
# LIBDRM_DEFINITIONS - Compiler switches required for using Libdrm
|
||||
|
||||
# Use pkg-config to get the directories and then use these values
|
||||
# in the find_path() and find_library() calls
|
||||
find_package(PkgConfig)
|
||||
pkg_check_modules(PC_LIBDRM libdrm)
|
||||
|
||||
set(LIBDRM_DEFINITIONS ${PC_LIBDRM_CFLAGS})
|
||||
|
||||
find_path(LIBDRM_INCLUDE_DIRS drm.h PATHS ${PC_LIBDRM_INCLUDEDIR} ${PC_LIBDRM_INCLUDE_DIRS} PATH_SUFFIXES libdrm)
|
||||
find_library(LIBDRM_LIBRARIES NAMES libdrm.so PATHS ${PC_LIBDRM_LIBDIR} ${PC_LIBDRM_LIBRARY_DIRS})
|
||||
mark_as_advanced(LIBDRM_INCLUDE_DIRS LIBDRM_LIBRARIES)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(LIBDRM REQUIRED_VARS LIBDRM_LIBRARIES LIBDRM_INCLUDE_DIRS)
|
||||
80
cmake/FindWayland.cmake
Normal file
80
cmake/FindWayland.cmake
Normal file
@@ -0,0 +1,80 @@
|
||||
# Try to find Wayland on a Unix system
|
||||
#
|
||||
# This will define:
|
||||
#
|
||||
# WAYLAND_FOUND - True if Wayland is found
|
||||
# WAYLAND_LIBRARIES - Link these to use Wayland
|
||||
# WAYLAND_INCLUDE_DIRS - Include directory for Wayland
|
||||
# WAYLAND_DEFINITIONS - Compiler flags for using Wayland
|
||||
#
|
||||
# In addition the following more fine grained variables will be defined:
|
||||
#
|
||||
# Wayland_Client_FOUND WAYLAND_CLIENT_INCLUDE_DIRS WAYLAND_CLIENT_LIBRARIES
|
||||
# Wayland_Server_FOUND WAYLAND_SERVER_INCLUDE_DIRS WAYLAND_SERVER_LIBRARIES
|
||||
# Wayland_EGL_FOUND WAYLAND_EGL_INCLUDE_DIRS WAYLAND_EGL_LIBRARIES
|
||||
# Wayland_Cursor_FOUND WAYLAND_CURSOR_INCLUDE_DIRS WAYLAND_CURSOR_LIBRARIES
|
||||
#
|
||||
# Copyright (c) 2013 Martin Gräßlin <mgraesslin@kde.org>
|
||||
# 2020 Georges Basile Stavracas Neto <georges.stavracas@gmail.com>
|
||||
#
|
||||
# Redistribution and use is allowed according to the terms of the BSD license.
|
||||
# For details see the accompanying COPYING-CMAKE-SCRIPTS file.
|
||||
|
||||
IF (NOT WIN32)
|
||||
|
||||
# Use pkg-config to get the directories and then use these values
|
||||
# in the find_path() and find_library() calls
|
||||
find_package(PkgConfig)
|
||||
PKG_CHECK_MODULES(PKG_WAYLAND QUIET wayland-client wayland-server wayland-egl wayland-cursor)
|
||||
|
||||
set(WAYLAND_DEFINITIONS ${PKG_WAYLAND_CFLAGS})
|
||||
|
||||
find_path(WAYLAND_CLIENT_INCLUDE_DIRS NAMES wayland-client.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
|
||||
find_library(WAYLAND_CLIENT_LIBRARIES NAMES wayland-client HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
|
||||
if(WAYLAND_CLIENT_INCLUDE_DIRS AND WAYLAND_CLIENT_LIBRARIES)
|
||||
set(Wayland_Client_FOUND TRUE) # cmake-lint: disable=C0103
|
||||
else()
|
||||
set(Wayland_Client_FOUND FALSE) # cmake-lint: disable=C0103
|
||||
endif()
|
||||
mark_as_advanced(WAYLAND_CLIENT_INCLUDE_DIRS WAYLAND_CLIENT_LIBRARIES)
|
||||
|
||||
find_path(WAYLAND_CURSOR_INCLUDE_DIRS NAMES wayland-cursor.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
|
||||
find_library(WAYLAND_CURSOR_LIBRARIES NAMES wayland-cursor HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
|
||||
if(WAYLAND_CURSOR_INCLUDE_DIRS AND WAYLAND_CURSOR_LIBRARIES)
|
||||
set(Wayland_Cursor_FOUND TRUE) # cmake-lint: disable=C0103
|
||||
else()
|
||||
set(Wayland_Cursor_FOUND FALSE) # cmake-lint: disable=C0103
|
||||
endif()
|
||||
mark_as_advanced(WAYLAND_CURSOR_INCLUDE_DIRS WAYLAND_CURSOR_LIBRARIES)
|
||||
|
||||
find_path(WAYLAND_EGL_INCLUDE_DIRS NAMES wayland-egl.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
|
||||
find_library(WAYLAND_EGL_LIBRARIES NAMES wayland-egl HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
|
||||
if(WAYLAND_EGL_INCLUDE_DIRS AND WAYLAND_EGL_LIBRARIES)
|
||||
set(Wayland_EGL_FOUND TRUE) # cmake-lint: disable=C0103
|
||||
else()
|
||||
set(Wayland_EGL_FOUND FALSE) # cmake-lint: disable=C0103
|
||||
endif()
|
||||
mark_as_advanced(WAYLAND_EGL_INCLUDE_DIRS WAYLAND_EGL_LIBRARIES)
|
||||
|
||||
find_path(WAYLAND_SERVER_INCLUDE_DIRS NAMES wayland-server.h HINTS ${PKG_WAYLAND_INCLUDE_DIRS})
|
||||
find_library(WAYLAND_SERVER_LIBRARIES NAMES wayland-server HINTS ${PKG_WAYLAND_LIBRARY_DIRS})
|
||||
if(WAYLAND_SERVER_INCLUDE_DIRS AND WAYLAND_SERVER_LIBRARIES)
|
||||
set(Wayland_Server_FOUND TRUE) # cmake-lint: disable=C0103
|
||||
else()
|
||||
set(Wayland_Server_FOUND FALSE) # cmake-lint: disable=C0103
|
||||
endif()
|
||||
mark_as_advanced(WAYLAND_SERVER_INCLUDE_DIRS WAYLAND_SERVER_LIBRARIES)
|
||||
|
||||
set(WAYLAND_INCLUDE_DIRS ${WAYLAND_CLIENT_INCLUDE_DIRS} ${WAYLAND_SERVER_INCLUDE_DIRS}
|
||||
${WAYLAND_EGL_INCLUDE_DIRS} ${WAYLAND_CURSOR_INCLUDE_DIRS})
|
||||
set(WAYLAND_LIBRARIES ${WAYLAND_CLIENT_LIBRARIES} ${WAYLAND_SERVER_LIBRARIES}
|
||||
${WAYLAND_EGL_LIBRARIES} ${WAYLAND_CURSOR_LIBRARIES})
|
||||
mark_as_advanced(WAYLAND_INCLUDE_DIRS WAYLAND_LIBRARIES)
|
||||
|
||||
list(REMOVE_DUPLICATES WAYLAND_INCLUDE_DIRS)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
find_package_handle_standard_args(Wayland REQUIRED_VARS WAYLAND_LIBRARIES WAYLAND_INCLUDE_DIRS HANDLE_COMPONENTS)
|
||||
|
||||
ENDIF ()
|
||||
22
crowdin.yml
Normal file
22
crowdin.yml
Normal file
@@ -0,0 +1,22 @@
|
||||
---
|
||||
"base_path": "."
|
||||
"base_url": "https://api.crowdin.com" # optional (for Crowdin Enterprise only)
|
||||
"preserve_hierarchy": false # flatten tree on crowdin
|
||||
"pull_request_labels": [
|
||||
"crowdin",
|
||||
"l10n"
|
||||
]
|
||||
|
||||
"files": [
|
||||
{
|
||||
"source": "/locale/*.po",
|
||||
"translation": "/locale/%two_letters_code%/LC_MESSAGES/%original_file_name%",
|
||||
"languages_mapping": {
|
||||
"two_letters_code": {
|
||||
# map non-two letter codes here, left side is crowdin designation, right side is babel designation
|
||||
"en-GB": "en_GB",
|
||||
"en-US": "en_US"
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
156
docker/debian-bullseye.dockerfile
Normal file
156
docker/debian-bullseye.dockerfile
Normal file
@@ -0,0 +1,156 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# artifacts: true
|
||||
# platforms: linux/amd64,linux/arm64/v8
|
||||
# platforms_pr: linux/amd64
|
||||
ARG BASE=debian
|
||||
ARG TAG=bullseye
|
||||
FROM ${BASE}:${TAG} AS sunshine-base
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
FROM sunshine-base as sunshine-build
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
RUN echo "target_platform: ${TARGETPLATFORM}"
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# install dependencies
|
||||
RUN <<_DEPS
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential=12.9* \
|
||||
cmake=3.18.4* \
|
||||
libavdevice-dev=7:4.3.* \
|
||||
libboost-filesystem-dev=1.74.0* \
|
||||
libboost-log-dev=1.74.0* \
|
||||
libboost-program-options-dev=1.74.0* \
|
||||
libboost-thread-dev=1.74.0* \
|
||||
libcap-dev=1:2.44* \
|
||||
libcurl4-openssl-dev=7.74.0* \
|
||||
libdrm-dev=2.4.104* \
|
||||
libevdev-dev=1.11.0* \
|
||||
libnuma-dev=2.0.12* \
|
||||
libopus-dev=1.3.1* \
|
||||
libpulse-dev=14.2* \
|
||||
libssl-dev=1.1.1* \
|
||||
libva-dev=2.10.0* \
|
||||
libvdpau-dev=1.4* \
|
||||
libwayland-dev=1.18.0* \
|
||||
libx11-dev=2:1.7.2* \
|
||||
libxcb-shm0-dev=1.14* \
|
||||
libxcb-xfixes0-dev=1.14* \
|
||||
libxcb1-dev=1.14* \
|
||||
libxfixes-dev=1:5.0.3* \
|
||||
libxrandr-dev=2:1.5.1* \
|
||||
libxtst-dev=2:1.2.3* \
|
||||
nodejs=12.22* \
|
||||
npm=7.5.2* \
|
||||
wget=1.21*
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev=21.1.0*
|
||||
fi
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_DEPS
|
||||
|
||||
# install cuda
|
||||
WORKDIR /build/cuda
|
||||
# versions: https://developer.nvidia.com/cuda-toolkit-archive
|
||||
ENV CUDA_VERSION="11.8.0"
|
||||
ENV CUDA_BUILD="520.61.05"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CUDA
|
||||
#!/bin/bash
|
||||
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
|
||||
cuda_suffix=""
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cuda_suffix="_sbsa"
|
||||
fi
|
||||
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
|
||||
echo "cuda url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
|
||||
chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
_INSTALL_CUDA
|
||||
|
||||
# copy repository
|
||||
WORKDIR /build/sunshine/
|
||||
COPY .. .
|
||||
|
||||
# setup npm dependencies
|
||||
RUN npm install
|
||||
|
||||
# setup build directory
|
||||
WORKDIR /build/sunshine/build
|
||||
|
||||
# cmake and cpack
|
||||
RUN <<_MAKE
|
||||
#!/bin/bash
|
||||
cmake \
|
||||
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=share/sunshine \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-DSUNSHINE_ENABLE_WAYLAND=ON \
|
||||
-DSUNSHINE_ENABLE_X11=ON \
|
||||
-DSUNSHINE_ENABLE_DRM=ON \
|
||||
-DSUNSHINE_ENABLE_CUDA=ON \
|
||||
/build/sunshine
|
||||
make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
ARG TARGETARCH
|
||||
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
|
||||
|
||||
FROM sunshine-base as sunshine
|
||||
|
||||
# copy deb from builder
|
||||
COPY --from=artifacts /sunshine*.deb /sunshine.deb
|
||||
|
||||
# install sunshine
|
||||
RUN <<_INSTALL_SUNSHINE
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends /sunshine.deb
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_INSTALL_SUNSHINE
|
||||
|
||||
# network setup
|
||||
EXPOSE 47984-47990/tcp
|
||||
EXPOSE 48010
|
||||
EXPOSE 47998-48000/udp
|
||||
|
||||
# setup user
|
||||
ARG PGID=1000
|
||||
ENV PGID=${PGID}
|
||||
ARG PUID=1000
|
||||
ENV PUID=${PUID}
|
||||
ENV TZ="UTC"
|
||||
ARG UNAME=lizard
|
||||
ENV UNAME=${UNAME}
|
||||
|
||||
ENV HOME=/home/$UNAME
|
||||
|
||||
# setup user
|
||||
RUN <<_SETUP_USER
|
||||
groupadd -f -g "${PGID}" "${UNAME}"
|
||||
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
|
||||
mkdir -p ${HOME}/.config/sunshine
|
||||
ln -s ${HOME}/.config/sunshine /config
|
||||
chown -R ${UNAME} ${HOME}
|
||||
_SETUP_USER
|
||||
|
||||
USER ${UNAME}
|
||||
WORKDIR ${HOME}
|
||||
|
||||
# entrypoint
|
||||
ENTRYPOINT ["/usr/bin/sunshine"]
|
||||
154
docker/fedora-36.dockerfile
Normal file
154
docker/fedora-36.dockerfile
Normal file
@@ -0,0 +1,154 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# artifacts: true
|
||||
# platforms: linux/amd64,linux/arm64/v8
|
||||
# platforms_pr: linux/amd64
|
||||
ARG BASE=fedora
|
||||
ARG TAG=36
|
||||
FROM ${BASE}:${TAG} AS sunshine-base
|
||||
|
||||
FROM sunshine-base as sunshine-build
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
RUN echo "target_platform: ${TARGETPLATFORM}"
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# install dependencies
|
||||
# hadolint ignore=DL3041
|
||||
RUN <<_DEPS
|
||||
#!/bin/bash
|
||||
dnf -y update
|
||||
dnf -y group install "Development Tools"
|
||||
dnf -y install \
|
||||
boost-devel-1.76.0* \
|
||||
cmake-3.22.2* \
|
||||
gcc-12.0.1* \
|
||||
gcc-c++-12.0.1* \
|
||||
libcap-devel-2.48* \
|
||||
libcurl-devel-7.82.0* \
|
||||
libdrm-devel-2.4.110* \
|
||||
libevdev-devel-1.12.0* \
|
||||
libva-devel-2.14.0* \
|
||||
libvdpau-devel-1.5* \
|
||||
libX11-devel-1.7.3* \
|
||||
libxcb-devel-1.13.1* \
|
||||
libXcursor-devel-1.2.0* \
|
||||
libXfixes-devel-6.0.0* \
|
||||
libXi-devel-1.8* \
|
||||
libXinerama-devel-1.1.4* \
|
||||
libXrandr-devel-1.5.2* \
|
||||
libXtst-devel-1.2.3* \
|
||||
mesa-libGL-devel-22.0.1* \
|
||||
npm-8.3.1* \
|
||||
numactl-devel-2.0.14* \
|
||||
openssl-devel-3.0.2* \
|
||||
opus-devel-1.3.1* \
|
||||
pulseaudio-libs-devel-15.0* \
|
||||
rpm-build-4.17.0* \
|
||||
wget-1.21.3* \
|
||||
which-2.21*
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
dnf -y install intel-mediasdk-devel-22.3.0*
|
||||
fi
|
||||
dnf clean all
|
||||
rm -rf /var/cache/yum
|
||||
_DEPS
|
||||
|
||||
# install cuda
|
||||
WORKDIR /build/cuda
|
||||
# versions: https://developer.nvidia.com/cuda-toolkit-archive
|
||||
ENV CUDA_VERSION="12.0.0"
|
||||
ENV CUDA_BUILD="525.60.13"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CUDA
|
||||
#!/bin/bash
|
||||
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
|
||||
cuda_suffix=""
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cuda_suffix="_sbsa"
|
||||
fi
|
||||
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
|
||||
echo "cuda url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
|
||||
chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
_INSTALL_CUDA
|
||||
|
||||
# copy repository
|
||||
WORKDIR /build/sunshine/
|
||||
COPY .. .
|
||||
|
||||
# setup npm dependencies
|
||||
RUN npm install
|
||||
|
||||
# setup build directory
|
||||
WORKDIR /build/sunshine/build
|
||||
|
||||
# cmake and cpack
|
||||
RUN <<_MAKE
|
||||
#!/bin/bash
|
||||
cmake \
|
||||
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=share/sunshine \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-DSUNSHINE_ENABLE_WAYLAND=ON \
|
||||
-DSUNSHINE_ENABLE_X11=ON \
|
||||
-DSUNSHINE_ENABLE_DRM=ON \
|
||||
-DSUNSHINE_ENABLE_CUDA=ON \
|
||||
/build/sunshine
|
||||
make -j "$(nproc)"
|
||||
cpack -G RPM
|
||||
_MAKE
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
ARG TARGETARCH
|
||||
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.rpm /sunshine-${BASE}-${TAG}-${TARGETARCH}.rpm
|
||||
|
||||
FROM sunshine-base as sunshine
|
||||
|
||||
# copy deb from builder
|
||||
COPY --from=artifacts /sunshine*.rpm /sunshine.rpm
|
||||
|
||||
# install sunshine
|
||||
RUN <<_INSTALL_SUNSHINE
|
||||
#!/bin/bash
|
||||
dnf -y update
|
||||
dnf -y install /sunshine.rpm
|
||||
dnf clean all
|
||||
rm -rf /var/cache/yum
|
||||
_INSTALL_SUNSHINE
|
||||
|
||||
# network setup
|
||||
EXPOSE 47984-47990/tcp
|
||||
EXPOSE 48010
|
||||
EXPOSE 47998-48000/udp
|
||||
|
||||
# setup user
|
||||
ARG PGID=1000
|
||||
ENV PGID=${PGID}
|
||||
ARG PUID=1000
|
||||
ENV PUID=${PUID}
|
||||
ENV TZ="UTC"
|
||||
ARG UNAME=lizard
|
||||
ENV UNAME=${UNAME}
|
||||
|
||||
ENV HOME=/home/$UNAME
|
||||
|
||||
# setup user
|
||||
RUN <<_SETUP_USER
|
||||
groupadd -f -g "${PGID}" "${UNAME}"
|
||||
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
|
||||
mkdir -p ${HOME}/.config/sunshine
|
||||
ln -s ${HOME}/.config/sunshine /config
|
||||
chown -R ${UNAME} ${HOME}
|
||||
_SETUP_USER
|
||||
|
||||
USER ${UNAME}
|
||||
WORKDIR ${HOME}
|
||||
|
||||
# entrypoint
|
||||
ENTRYPOINT ["/usr/bin/sunshine"]
|
||||
154
docker/fedora-37.dockerfile
Normal file
154
docker/fedora-37.dockerfile
Normal file
@@ -0,0 +1,154 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# artifacts: true
|
||||
# platforms: linux/amd64,linux/arm64/v8
|
||||
# platforms_pr: linux/amd64
|
||||
ARG BASE=fedora
|
||||
ARG TAG=37
|
||||
FROM ${BASE}:${TAG} AS sunshine-base
|
||||
|
||||
FROM sunshine-base as sunshine-build
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
RUN echo "target_platform: ${TARGETPLATFORM}"
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# install dependencies
|
||||
# hadolint ignore=DL3041
|
||||
RUN <<_DEPS
|
||||
#!/bin/bash
|
||||
dnf -y update
|
||||
dnf -y group install "Development Tools"
|
||||
dnf -y install \
|
||||
boost-devel-1.78.0* \
|
||||
cmake-3.24.1* \
|
||||
gcc-12.2.1* \
|
||||
gcc-c++-12.2.1* \
|
||||
libcap-devel-2.48* \
|
||||
libcurl-devel-7.85.0* \
|
||||
libdrm-devel-2.4.112* \
|
||||
libevdev-devel-1.13.0* \
|
||||
libva-devel-2.15.0* \
|
||||
libvdpau-devel-1.5* \
|
||||
libX11-devel-1.8.1* \
|
||||
libxcb-devel-1.13.1* \
|
||||
libXcursor-devel-1.2.1* \
|
||||
libXfixes-devel-6.0.0* \
|
||||
libXi-devel-1.8* \
|
||||
libXinerama-devel-1.1.4* \
|
||||
libXrandr-devel-1.5.2* \
|
||||
libXtst-devel-1.2.3* \
|
||||
mesa-libGL-devel-22.2.2* \
|
||||
npm-8.15.0* \
|
||||
numactl-devel-2.0.14* \
|
||||
openssl-devel-3.0.5* \
|
||||
opus-devel-1.3.1* \
|
||||
pulseaudio-libs-devel-16.1* \
|
||||
rpm-build-4.18.0* \
|
||||
wget-1.21.3* \
|
||||
which-2.21*
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
dnf -y install intel-mediasdk-devel-22.4.4*
|
||||
fi
|
||||
dnf clean all
|
||||
rm -rf /var/cache/yum
|
||||
_DEPS
|
||||
|
||||
# install cuda
|
||||
WORKDIR /build/cuda
|
||||
# versions: https://developer.nvidia.com/cuda-toolkit-archive
|
||||
ENV CUDA_VERSION="12.0.0"
|
||||
ENV CUDA_BUILD="525.60.13"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CUDA
|
||||
#!/bin/bash
|
||||
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
|
||||
cuda_suffix=""
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cuda_suffix="_sbsa"
|
||||
fi
|
||||
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
|
||||
echo "cuda url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
|
||||
chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
_INSTALL_CUDA
|
||||
|
||||
# copy repository
|
||||
WORKDIR /build/sunshine/
|
||||
COPY .. .
|
||||
|
||||
# setup npm dependencies
|
||||
RUN npm install
|
||||
|
||||
# setup build directory
|
||||
WORKDIR /build/sunshine/build
|
||||
|
||||
# cmake and cpack
|
||||
RUN <<_MAKE
|
||||
#!/bin/bash
|
||||
cmake \
|
||||
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=share/sunshine \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-DSUNSHINE_ENABLE_WAYLAND=ON \
|
||||
-DSUNSHINE_ENABLE_X11=ON \
|
||||
-DSUNSHINE_ENABLE_DRM=ON \
|
||||
-DSUNSHINE_ENABLE_CUDA=ON \
|
||||
/build/sunshine
|
||||
make -j "$(nproc)"
|
||||
cpack -G RPM
|
||||
_MAKE
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
ARG TARGETARCH
|
||||
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.rpm /sunshine-${BASE}-${TAG}-${TARGETARCH}.rpm
|
||||
|
||||
FROM sunshine-base as sunshine
|
||||
|
||||
# copy deb from builder
|
||||
COPY --from=artifacts /sunshine*.rpm /sunshine.rpm
|
||||
|
||||
# install sunshine
|
||||
RUN <<_INSTALL_SUNSHINE
|
||||
#!/bin/bash
|
||||
dnf -y update
|
||||
dnf -y install /sunshine.rpm
|
||||
dnf clean all
|
||||
rm -rf /var/cache/yum
|
||||
_INSTALL_SUNSHINE
|
||||
|
||||
# network setup
|
||||
EXPOSE 47984-47990/tcp
|
||||
EXPOSE 48010
|
||||
EXPOSE 47998-48000/udp
|
||||
|
||||
# setup user
|
||||
ARG PGID=1000
|
||||
ENV PGID=${PGID}
|
||||
ARG PUID=1000
|
||||
ENV PUID=${PUID}
|
||||
ENV TZ="UTC"
|
||||
ARG UNAME=lizard
|
||||
ENV UNAME=${UNAME}
|
||||
|
||||
ENV HOME=/home/$UNAME
|
||||
|
||||
# setup user
|
||||
RUN <<_SETUP_USER
|
||||
groupadd -f -g "${PGID}" "${UNAME}"
|
||||
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
|
||||
mkdir -p ${HOME}/.config/sunshine
|
||||
ln -s ${HOME}/.config/sunshine /config
|
||||
chown -R ${UNAME} ${HOME}
|
||||
_SETUP_USER
|
||||
|
||||
USER ${UNAME}
|
||||
WORKDIR ${HOME}
|
||||
|
||||
# entrypoint
|
||||
ENTRYPOINT ["/usr/bin/sunshine"]
|
||||
210
docker/ubuntu-18.04.dockerfile-todo
Normal file
210
docker/ubuntu-18.04.dockerfile-todo
Normal file
@@ -0,0 +1,210 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# artifacts: true
|
||||
# platforms: linux/amd64,linux/arm64/v8
|
||||
# platforms_pr: linux/amd64
|
||||
ARG BASE=ubuntu
|
||||
ARG TAG=18.04
|
||||
FROM ${BASE}:${TAG} AS sunshine-base
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
FROM sunshine-base as sunshine-build
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
RUN echo "target_platform: ${TARGETPLATFORM}"
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# install dependencies
|
||||
RUN <<_DEPS
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends \
|
||||
software-properties-common=0.96.24.32.18
|
||||
add-apt-repository ppa:ubuntu-toolchain-r/test
|
||||
apt-get install -y --no-install-recommends \
|
||||
bison=2:3.0.4* \
|
||||
build-essential=12.4* \
|
||||
gcc-10=10.3.0* \
|
||||
g++-10=10.3.0* \
|
||||
libavdevice-dev=7:3.4.* \
|
||||
libcap-dev=1:2.25* \
|
||||
libcurl-openssl1.0-dev=7.58.0* \
|
||||
libdrm-dev=2.4.101* \
|
||||
libevdev-dev=1.5.8* \
|
||||
libnuma-dev=2.0.11* \
|
||||
libopus-dev=1.1.2* \
|
||||
libpulse-dev=1:11.1* \
|
||||
libssl1.0-dev=1.0.2* \
|
||||
libva-dev=2.1.0* \
|
||||
libvdpau-dev=1.1.1* \
|
||||
libwayland-dev=1.16.0* \
|
||||
libx11-dev=2:1.6.4* \
|
||||
libxcb-shm0-dev=1.13* \
|
||||
libxcb-xfixes0-dev=1.13* \
|
||||
libxcb1-dev=1.13* \
|
||||
libxfixes-dev=1:5.0.3* \
|
||||
libxrandr-dev=2:1.5.1* \
|
||||
libxtst-dev=2:1.2.3* \
|
||||
npm=3.5.2* \
|
||||
node-gyp=3.6.2* \
|
||||
nodejs-dev=8.10.0* \
|
||||
wget=1.19.4*
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_DEPS
|
||||
|
||||
# Update gcc alias
|
||||
# https://stackoverflow.com/a/70653945/11214013
|
||||
RUN <<_GCC_ALIAS
|
||||
#!/bin/bash
|
||||
update-alternatives --install \
|
||||
/usr/bin/gcc gcc /usr/bin/gcc-10 100 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-10 \
|
||||
--slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
|
||||
_GCC_ALIAS
|
||||
|
||||
# install boost
|
||||
# cannot install boost for aarch64 using ppa:savoury1/boost-defaults-1.71
|
||||
# otherwise add the repository and the following packages
|
||||
# libboost-filesystem1.71-dev=1.71.0* \
|
||||
# libboost-log1.71-dev=1.71.0* \
|
||||
# libboost-program-options1.71-dev=1.71.0* \
|
||||
# libboost-regex1.71-dev=1.71.0* \
|
||||
# libboost-thread1.71-dev=1.71.0* \
|
||||
WORKDIR /build/tmp
|
||||
RUN <<_INSTALL_BOOST
|
||||
url="https://boostorg.jfrog.io/artifactory/main/release/1.74.0/source/boost_1_74_0.tar.bz2"
|
||||
wget "${url}" --progress=bar:force:noscroll -q --show-progress -O ./boost.tar.bz2
|
||||
tar --bzip2 -xf boost.tar.bz2 --directory /build
|
||||
mv /build/boost_*/ /build/boost
|
||||
ls -a /build/boost
|
||||
cd /build/boost
|
||||
./bootstrap.sh --with-libraries=system,thread,log,program_options && \
|
||||
./b2 install variant=release link=static,shared runtime-link=shared -j "$(nproc)"
|
||||
_INSTALL_BOOST
|
||||
|
||||
# install cmake
|
||||
# sunshine requires cmake >= 3.18
|
||||
WORKDIR /build/cmake
|
||||
# https://cmake.org/download/
|
||||
ENV CMAKE_VERSION="3.25.1"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CMAKE
|
||||
#!/bin/bash
|
||||
cmake_prefix="https://github.com/Kitware/CMake/releases/download/v"
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
cmake_arch="x86_64"
|
||||
elif [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cmake_arch="aarch64"
|
||||
fi
|
||||
url="${cmake_prefix}${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-${cmake_arch}.sh"
|
||||
echo "cmake url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cmake.sh
|
||||
sh ./cmake.sh --prefix=/usr/local --skip-license
|
||||
cmake --version
|
||||
_INSTALL_CMAKE
|
||||
|
||||
# install cuda
|
||||
WORKDIR /build/cuda
|
||||
# versions: https://developer.nvidia.com/cuda-toolkit-archive
|
||||
ENV CUDA_VERSION="11.8.0"
|
||||
ENV CUDA_BUILD="520.61.05"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CUDA
|
||||
#!/bin/bash
|
||||
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
|
||||
cuda_suffix=""
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cuda_suffix="_sbsa"
|
||||
fi
|
||||
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
|
||||
echo "cuda url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
|
||||
chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
_INSTALL_CUDA
|
||||
|
||||
# todo - install libmfx
|
||||
# https://github.com/Intel-Media-SDK/MediaSDK
|
||||
|
||||
# copy repository
|
||||
WORKDIR /build/sunshine/
|
||||
COPY .. .
|
||||
|
||||
# setup npm dependencies
|
||||
RUN npm install
|
||||
|
||||
# setup build directory
|
||||
WORKDIR /build/sunshine/build
|
||||
|
||||
# cmake and cpack
|
||||
RUN <<_MAKE
|
||||
#!/bin/bash
|
||||
cmake \
|
||||
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=share/sunshine \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-DSUNSHINE_ENABLE_WAYLAND=ON \
|
||||
-DSUNSHINE_ENABLE_X11=ON \
|
||||
-DSUNSHINE_ENABLE_DRM=ON \
|
||||
-DSUNSHINE_ENABLE_CUDA=ON \
|
||||
/build/sunshine
|
||||
make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
ARG TARGETARCH
|
||||
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
|
||||
|
||||
FROM sunshine-base as sunshine
|
||||
|
||||
# copy deb from builder
|
||||
COPY --from=artifacts /sunshine*.deb /sunshine.deb
|
||||
|
||||
# install sunshine
|
||||
RUN <<_INSTALL_SUNSHINE
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends /sunshine.deb
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_INSTALL_SUNSHINE
|
||||
|
||||
# network setup
|
||||
EXPOSE 47984-47990/tcp
|
||||
EXPOSE 48010
|
||||
EXPOSE 47998-48000/udp
|
||||
|
||||
# setup user
|
||||
ARG PGID=1000
|
||||
ENV PGID=${PGID}
|
||||
ARG PUID=1000
|
||||
ENV PUID=${PUID}
|
||||
ENV TZ="UTC"
|
||||
ARG UNAME=lizard
|
||||
ENV UNAME=${UNAME}
|
||||
|
||||
ENV HOME=/home/$UNAME
|
||||
|
||||
# setup user
|
||||
RUN <<_SETUP_USER
|
||||
groupadd -f -g "${PGID}" "${UNAME}"
|
||||
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
|
||||
mkdir -p ${HOME}/.config/sunshine
|
||||
ln -s ${HOME}/.config/sunshine /config
|
||||
chown -R ${UNAME} ${HOME}
|
||||
_SETUP_USER
|
||||
|
||||
USER ${UNAME}
|
||||
WORKDIR ${HOME}
|
||||
|
||||
# entrypoint
|
||||
ENTRYPOINT ["/usr/bin/sunshine"]
|
||||
190
docker/ubuntu-20.04.dockerfile
Normal file
190
docker/ubuntu-20.04.dockerfile
Normal file
@@ -0,0 +1,190 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# artifacts: true
|
||||
# platforms: linux/amd64,linux/arm64/v8
|
||||
# platforms_pr: linux/amd64
|
||||
ARG BASE=ubuntu
|
||||
ARG TAG=20.04
|
||||
FROM ${BASE}:${TAG} AS sunshine-base
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
FROM sunshine-base as sunshine-build
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
RUN echo "target_platform: ${TARGETPLATFORM}"
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# install dependencies
|
||||
RUN <<_DEPS
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential=12.8* \
|
||||
gcc-10=10.3.0* \
|
||||
g++-10=10.3.0* \
|
||||
libavdevice-dev=7:4.2.* \
|
||||
libboost-filesystem-dev=1.71.0* \
|
||||
libboost-log-dev=1.71.0* \
|
||||
libboost-program-options-dev=1.71.0* \
|
||||
libboost-thread-dev=1.71.0* \
|
||||
libcap-dev=1:2.32* \
|
||||
libcurl4-openssl-dev=7.68.0* \
|
||||
libdrm-dev=2.4.107* \
|
||||
libevdev-dev=1.9.0* \
|
||||
libnuma-dev=2.0.12* \
|
||||
libopus-dev=1.3.1* \
|
||||
libpulse-dev=1:13.99.1* \
|
||||
libssl-dev=1.1.1* \
|
||||
libva-dev=2.7.0* \
|
||||
libvdpau-dev=1.3* \
|
||||
libwayland-dev=1.18.0* \
|
||||
libx11-dev=2:1.6.9* \
|
||||
libxcb-shm0-dev=1.14* \
|
||||
libxcb-xfixes0-dev=1.14* \
|
||||
libxcb1-dev=1.14* \
|
||||
libxfixes-dev=1:5.0.3* \
|
||||
libxrandr-dev=2:1.5.2* \
|
||||
libxtst-dev=2:1.2.3* \
|
||||
nodejs=10.19.0* \
|
||||
npm=6.14.4* \
|
||||
wget=1.20.3*
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev=20.1.0*
|
||||
fi
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_DEPS
|
||||
|
||||
# Update gcc alias
|
||||
# https://stackoverflow.com/a/70653945/11214013
|
||||
RUN <<_GCC_ALIAS
|
||||
#!/bin/bash
|
||||
update-alternatives --install \
|
||||
/usr/bin/gcc gcc /usr/bin/gcc-10 100 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-10 \
|
||||
--slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
|
||||
_GCC_ALIAS
|
||||
|
||||
# install cmake
|
||||
# sunshine requires cmake >= 3.18
|
||||
WORKDIR /build/cmake
|
||||
# https://cmake.org/download/
|
||||
ENV CMAKE_VERSION="3.25.1"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CMAKE
|
||||
#!/bin/bash
|
||||
cmake_prefix="https://github.com/Kitware/CMake/releases/download/v"
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
cmake_arch="x86_64"
|
||||
elif [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cmake_arch="aarch64"
|
||||
fi
|
||||
url="${cmake_prefix}${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-${cmake_arch}.sh"
|
||||
echo "cmake url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cmake.sh
|
||||
sh ./cmake.sh --prefix=/usr/local --skip-license
|
||||
cmake --version
|
||||
_INSTALL_CMAKE
|
||||
|
||||
# install cuda
|
||||
WORKDIR /build/cuda
|
||||
# versions: https://developer.nvidia.com/cuda-toolkit-archive
|
||||
ENV CUDA_VERSION="11.8.0"
|
||||
ENV CUDA_BUILD="520.61.05"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CUDA
|
||||
#!/bin/bash
|
||||
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
|
||||
cuda_suffix=""
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cuda_suffix="_sbsa"
|
||||
fi
|
||||
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
|
||||
echo "cuda url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
|
||||
chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
_INSTALL_CUDA
|
||||
|
||||
# copy repository
|
||||
WORKDIR /build/sunshine/
|
||||
COPY .. .
|
||||
|
||||
# setup npm dependencies
|
||||
RUN npm install
|
||||
|
||||
# setup build directory
|
||||
WORKDIR /build/sunshine/build
|
||||
|
||||
# cmake and cpack
|
||||
RUN <<_MAKE
|
||||
#!/bin/bash
|
||||
cmake \
|
||||
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=share/sunshine \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-DSUNSHINE_ENABLE_WAYLAND=ON \
|
||||
-DSUNSHINE_ENABLE_X11=ON \
|
||||
-DSUNSHINE_ENABLE_DRM=ON \
|
||||
-DSUNSHINE_ENABLE_CUDA=ON \
|
||||
/build/sunshine
|
||||
make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
ARG TARGETARCH
|
||||
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
|
||||
|
||||
FROM sunshine-base as sunshine
|
||||
|
||||
# copy deb from builder
|
||||
COPY --from=artifacts /sunshine*.deb /sunshine.deb
|
||||
|
||||
# install sunshine
|
||||
RUN <<_INSTALL_SUNSHINE
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends /sunshine.deb
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_INSTALL_SUNSHINE
|
||||
|
||||
# network setup
|
||||
EXPOSE 47984-47990/tcp
|
||||
EXPOSE 48010
|
||||
EXPOSE 47998-48000/udp
|
||||
|
||||
# setup user
|
||||
ARG PGID=1000
|
||||
ENV PGID=${PGID}
|
||||
ARG PUID=1000
|
||||
ENV PUID=${PUID}
|
||||
ENV TZ="UTC"
|
||||
ARG UNAME=lizard
|
||||
ENV UNAME=${UNAME}
|
||||
|
||||
ENV HOME=/home/$UNAME
|
||||
|
||||
# setup user
|
||||
RUN <<_SETUP_USER
|
||||
groupadd -f -g "${PGID}" "${UNAME}"
|
||||
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
|
||||
mkdir -p ${HOME}/.config/sunshine
|
||||
ln -s ${HOME}/.config/sunshine /config
|
||||
chown -R ${UNAME} ${HOME}
|
||||
_SETUP_USER
|
||||
|
||||
USER ${UNAME}
|
||||
WORKDIR ${HOME}
|
||||
|
||||
# entrypoint
|
||||
ENTRYPOINT ["/usr/bin/sunshine"]
|
||||
156
docker/ubuntu-22.04.dockerfile
Normal file
156
docker/ubuntu-22.04.dockerfile
Normal file
@@ -0,0 +1,156 @@
|
||||
# syntax=docker/dockerfile:1.4
|
||||
# artifacts: true
|
||||
# platforms: linux/amd64,linux/arm64/v8
|
||||
# platforms_pr: linux/amd64
|
||||
ARG BASE=ubuntu
|
||||
ARG TAG=22.04
|
||||
FROM ${BASE}:${TAG} AS sunshine-base
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
FROM sunshine-base as sunshine-build
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
RUN echo "target_platform: ${TARGETPLATFORM}"
|
||||
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
# install dependencies
|
||||
RUN <<_DEPS
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends \
|
||||
build-essential=12.9* \
|
||||
cmake=3.22.1* \
|
||||
libavdevice-dev=7:4.4.* \
|
||||
libboost-filesystem-dev=1.74.0* \
|
||||
libboost-log-dev=1.74.0* \
|
||||
libboost-program-options-dev=1.74.0* \
|
||||
libboost-thread-dev=1.74.0* \
|
||||
libcap-dev=1:2.44* \
|
||||
libcurl4-openssl-dev=7.81.0* \
|
||||
libdrm-dev=2.4.113* \
|
||||
libevdev-dev=1.12.1* \
|
||||
libnuma-dev=2.0.14* \
|
||||
libopus-dev=1.3.1* \
|
||||
libpulse-dev=1:15.99.1* \
|
||||
libssl-dev=3.0.2* \
|
||||
libva-dev=2.14.0* \
|
||||
libvdpau-dev=1.4* \
|
||||
libwayland-dev=1.20.0* \
|
||||
libx11-dev=2:1.7.5* \
|
||||
libxcb-shm0-dev=1.14* \
|
||||
libxcb-xfixes0-dev=1.14* \
|
||||
libxcb1-dev=1.14* \
|
||||
libxfixes-dev=1:6.0.0* \
|
||||
libxrandr-dev=2:1.5.2* \
|
||||
libxtst-dev=2:1.2.3* \
|
||||
nodejs=12.22.9* \
|
||||
npm=8.5.1* \
|
||||
wget=1.21.2*
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/amd64' ]]; then
|
||||
apt-get install -y --no-install-recommends \
|
||||
libmfx-dev=22.3.0*
|
||||
fi
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_DEPS
|
||||
|
||||
# install cuda
|
||||
WORKDIR /build/cuda
|
||||
# versions: https://developer.nvidia.com/cuda-toolkit-archive
|
||||
ENV CUDA_VERSION="11.8.0"
|
||||
ENV CUDA_BUILD="520.61.05"
|
||||
# hadolint ignore=SC3010
|
||||
RUN <<_INSTALL_CUDA
|
||||
#!/bin/bash
|
||||
cuda_prefix="https://developer.download.nvidia.com/compute/cuda/"
|
||||
cuda_suffix=""
|
||||
if [[ "${TARGETPLATFORM}" == 'linux/arm64' ]]; then
|
||||
cuda_suffix="_sbsa"
|
||||
fi
|
||||
url="${cuda_prefix}${CUDA_VERSION}/local_installers/cuda_${CUDA_VERSION}_${CUDA_BUILD}_linux${cuda_suffix}.run"
|
||||
echo "cuda url: ${url}"
|
||||
wget "$url" --progress=bar:force:noscroll -q --show-progress -O ./cuda.run
|
||||
chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/build/cuda --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
_INSTALL_CUDA
|
||||
|
||||
# copy repository
|
||||
WORKDIR /build/sunshine/
|
||||
COPY .. .
|
||||
|
||||
# setup npm dependencies
|
||||
RUN npm install
|
||||
|
||||
# setup build directory
|
||||
WORKDIR /build/sunshine/build
|
||||
|
||||
# cmake and cpack
|
||||
RUN <<_MAKE
|
||||
#!/bin/bash
|
||||
cmake \
|
||||
-DCMAKE_CUDA_COMPILER:PATH=/build/cuda/bin/nvcc \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DCMAKE_INSTALL_PREFIX=/usr \
|
||||
-DSUNSHINE_ASSETS_DIR=share/sunshine \
|
||||
-DSUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-DSUNSHINE_ENABLE_WAYLAND=ON \
|
||||
-DSUNSHINE_ENABLE_X11=ON \
|
||||
-DSUNSHINE_ENABLE_DRM=ON \
|
||||
-DSUNSHINE_ENABLE_CUDA=ON \
|
||||
/build/sunshine
|
||||
make -j "$(nproc)"
|
||||
cpack -G DEB
|
||||
_MAKE
|
||||
|
||||
FROM scratch AS artifacts
|
||||
ARG BASE
|
||||
ARG TAG
|
||||
ARG TARGETARCH
|
||||
COPY --from=sunshine-build /build/sunshine/build/cpack_artifacts/Sunshine.deb /sunshine-${BASE}-${TAG}-${TARGETARCH}.deb
|
||||
|
||||
FROM sunshine-base as sunshine
|
||||
|
||||
# copy deb from builder
|
||||
COPY --from=artifacts /sunshine*.deb /sunshine.deb
|
||||
|
||||
# install sunshine
|
||||
RUN <<_INSTALL_SUNSHINE
|
||||
#!/bin/bash
|
||||
apt-get update -y
|
||||
apt-get install -y --no-install-recommends /sunshine.deb
|
||||
apt-get clean
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
_INSTALL_SUNSHINE
|
||||
|
||||
# network setup
|
||||
EXPOSE 47984-47990/tcp
|
||||
EXPOSE 48010
|
||||
EXPOSE 47998-48000/udp
|
||||
|
||||
# setup user
|
||||
ARG PGID=1000
|
||||
ENV PGID=${PGID}
|
||||
ARG PUID=1000
|
||||
ENV PUID=${PUID}
|
||||
ENV TZ="UTC"
|
||||
ARG UNAME=lizard
|
||||
ENV UNAME=${UNAME}
|
||||
|
||||
ENV HOME=/home/$UNAME
|
||||
|
||||
# setup user
|
||||
RUN <<_SETUP_USER
|
||||
groupadd -f -g "${PGID}" "${UNAME}"
|
||||
useradd -lm -d ${HOME} -s /bin/bash -g "${PGID}" -G input -u "${PUID}" "${UNAME}"
|
||||
mkdir -p ${HOME}/.config/sunshine
|
||||
ln -s ${HOME}/.config/sunshine /config
|
||||
chown -R ${UNAME} ${HOME}
|
||||
_SETUP_USER
|
||||
|
||||
USER ${UNAME}
|
||||
WORKDIR ${HOME}
|
||||
|
||||
# entrypoint
|
||||
ENTRYPOINT ["/usr/bin/sunshine"]
|
||||
20
docs/Makefile
Normal file
20
docs/Makefile
Normal file
@@ -0,0 +1,20 @@
|
||||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = source
|
||||
BUILDDIR = build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%: Makefile
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
@@ -0,0 +1,35 @@
|
||||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=source
|
||||
set BUILDDIR=build
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.https://www.sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
||||
4
docs/requirements.txt
Normal file
4
docs/requirements.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
furo==2022.12.7
|
||||
m2r2==0.3.3.post2
|
||||
Sphinx==6.1.3
|
||||
sphinx-copybutton==0.5.1
|
||||
1296
docs/source/about/advanced_usage.rst
Normal file
1296
docs/source/about/advanced_usage.rst
Normal file
File diff suppressed because it is too large
Load Diff
3
docs/source/about/docker.rst
Normal file
3
docs/source/about/docker.rst
Normal file
@@ -0,0 +1,3 @@
|
||||
:github_url: https://github.com/LizardByte/Sunshine/tree/nightly/docs/DOCKER_README.md
|
||||
|
||||
.. mdinclude:: ../../../DOCKER_README.md
|
||||
243
docs/source/about/installation.rst
Normal file
243
docs/source/about/installation.rst
Normal file
@@ -0,0 +1,243 @@
|
||||
Installation
|
||||
============
|
||||
The recommended method for running Sunshine is to use the `binaries`_ bundled with the `latest release`_.
|
||||
|
||||
.. Attention:: Additional setup is required after installation. See
|
||||
:ref:`Setup <about/usage:setup>`.
|
||||
|
||||
Binaries
|
||||
--------
|
||||
Binaries of Sunshine are created for each release. They are available for Linux, macOS, and Windows.
|
||||
Binaries can be found in the `latest release`_.
|
||||
|
||||
.. Tip:: Some third party packages also exist. See
|
||||
:ref:`Third Party Packages <about/third_party_packages:third party packages>`.
|
||||
|
||||
Docker
|
||||
------
|
||||
Docker images are available on `Dockerhub.io`_ and `ghcr.io`_.
|
||||
|
||||
See :ref:`Docker <about/docker:docker>` for additional information.
|
||||
|
||||
Linux
|
||||
-----
|
||||
Follow the instructions for your preferred package type below.
|
||||
|
||||
**CUDA Compatibility**
|
||||
|
||||
CUDA is used for NVFBC capture.
|
||||
|
||||
.. Tip:: See `CUDA GPUS <https://developer.nvidia.com/cuda-gpus>`_ to cross reference Compute Capability to your GPU.
|
||||
|
||||
.. table::
|
||||
:widths: auto
|
||||
|
||||
=========================================== ============== ============== ================================
|
||||
Package CUDA Version Min Driver CUDA Compute Capabilities
|
||||
=========================================== ============== ============== ================================
|
||||
https://aur.archlinux.org/packages/sunshine User dependent User dependent User dependent
|
||||
sunshine.AppImage 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
|
||||
sunshine_{arch}.flatpak 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
|
||||
sunshine-debian-bullseye-{arch}.deb 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
|
||||
sunshine-fedora-36-{arch}.rpm 12.0.0 525.60.13 50;52;60;61;62;70;75;80;86;90
|
||||
sunshine-fedora-37-{arch}.rpm 12.0.0 525.60.13 50;52;60;61;62;70;75;80;86;90
|
||||
sunshine-ubuntu-20.04-{arch}.deb 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
|
||||
sunshine-ubuntu-22.04-{arch}.deb 11.8.0 450.80.02 50;52;60;61;62;70;75;80;86;90;35
|
||||
=========================================== ============== ============== ================================
|
||||
|
||||
AppImage
|
||||
^^^^^^^^
|
||||
According to AppImageLint the supported distro matrix of the AppImage is below.
|
||||
|
||||
- [✖] Debian oldstable (buster)
|
||||
- [✔] Debian stable (bullseye)
|
||||
- [✔] Debian testing (bookworm)
|
||||
- [✔] Debian unstable (sid)
|
||||
- [✔] Ubuntu kinetic
|
||||
- [✔] Ubuntu jammy
|
||||
- [✔] Ubuntu focal
|
||||
- [✖] Ubuntu bionic
|
||||
- [✖] Ubuntu xenial
|
||||
- [✖] Ubuntu trusty
|
||||
- [✖] CentOS 7
|
||||
|
||||
#. Download ``sunshine.AppImage`` to your home directory.
|
||||
#. Open terminal and run the following code.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
./sunshine.AppImage --install
|
||||
|
||||
Start:
|
||||
.. code-block:: bash
|
||||
|
||||
./sunshine.AppImage --install && ./sunshine.AppImage
|
||||
|
||||
Uninstall:
|
||||
.. code-block:: bash
|
||||
|
||||
./sunshine.AppImage --remove
|
||||
|
||||
AUR Package
|
||||
^^^^^^^^^^^
|
||||
#. Open terminal and run the following code.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://aur.archlinux.org/sunshine.git
|
||||
cd sunshine
|
||||
makepkg -fi
|
||||
|
||||
Uninstall:
|
||||
.. code-block:: bash
|
||||
|
||||
pacman -R sunshine
|
||||
|
||||
Debian Package
|
||||
^^^^^^^^^^^^^^
|
||||
#. Download ``sunshine-{ubuntu-version}.deb`` and run the following code.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt install -f ./sunshine-{ubuntu-version}.deb
|
||||
|
||||
.. Note:: The ``{ubuntu-version}`` is the version of ubuntu we built the package on. If you are not using Ubuntu and
|
||||
have an issue with one package, you can try another.
|
||||
|
||||
.. Tip:: You can double click the deb file to see details about the package and begin installation.
|
||||
|
||||
Uninstall:
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt remove sunshine
|
||||
|
||||
Flatpak Package
|
||||
^^^^^^^^^^^^^^^
|
||||
#. Install `Flatpak <https://flatpak.org/setup/>`_ as required.
|
||||
#. Download ``sunshine_{arch}.flatpak`` and run the following code.
|
||||
|
||||
.. Note:: Be sure to replace ``{arch}`` with the architecture for your operating system.
|
||||
|
||||
System level (recommended)
|
||||
.. code-block:: bash
|
||||
|
||||
flatpak install --system ./sunshine_{arch}.flatpak
|
||||
|
||||
User level
|
||||
.. code-block:: bash
|
||||
|
||||
flatpak install --user ./sunshine_{arch}.flatpak
|
||||
|
||||
Additional installation (required)
|
||||
.. code-block:: bash
|
||||
|
||||
flatpak run --command=additional-install.sh dev.lizardbyte.sunshine
|
||||
|
||||
Start:
|
||||
X11 and NVFBC capture (X11 Only)
|
||||
.. code-block:: bash
|
||||
|
||||
flatpak run dev.lizardbyte.sunshine
|
||||
|
||||
KMS capture (Wayland & X11)
|
||||
.. code-block:: bash
|
||||
|
||||
sudo -i PULSE_SERVER=unix:$(pactl info | awk '/Server String/{print$3}') flatpak run dev.lizardbyte.sunshine
|
||||
|
||||
Uninstall:
|
||||
.. code-block:: bash
|
||||
|
||||
flatpak run --command=remove-additional-install.sh dev.lizardbyte.sunshine
|
||||
flatpak uninstall --delete-data dev.lizardbyte.sunshine
|
||||
|
||||
RPM Package
|
||||
^^^^^^^^^^^
|
||||
#. Add `rpmfusion` repositories by running the following code.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo dnf install https://mirrors.rpmfusion.org/free/fedora/rpmfusion-free-release-$(rpm -E %fedora).noarch.rpm \
|
||||
https://mirrors.rpmfusion.org/nonfree/fedora/rpmfusion-nonfree-release-$(rpm -E %fedora).noarch.rpm
|
||||
|
||||
#. Download ``sunshine.rpm`` and run the following code.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo dnf install ./sunshine.rpm
|
||||
|
||||
.. Tip:: You can double click the rpm file to see details about the package and begin installation.
|
||||
|
||||
Uninstall:
|
||||
.. code-block:: bash
|
||||
|
||||
sudo dnf remove sunshine
|
||||
|
||||
macOS
|
||||
-----
|
||||
Sunshine on macOS is experimental. Gamepads do not work. Other features may not work as expected.
|
||||
|
||||
pkg
|
||||
^^^
|
||||
.. Warning:: The `pkg` does not include runtime dependencies.
|
||||
|
||||
#. Download the ``sunshine.pkg`` file and install it as normal.
|
||||
|
||||
Uninstall:
|
||||
.. code-block:: bash
|
||||
|
||||
cd /etc/sunshine/assets
|
||||
uninstall_pkg.sh
|
||||
|
||||
Portfile
|
||||
^^^^^^^^
|
||||
#. Install `MacPorts <https://www.macports.org>`_
|
||||
#. Update the Macports sources.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
sudo nano /opt/local/etc/macports/sources.conf
|
||||
|
||||
Add this line, replacing your username, below the line that starts with ``rsync``.
|
||||
``file:///Users/<username>/ports``
|
||||
|
||||
``Ctrl+x``, then ``Y`` to exit and save changes.
|
||||
|
||||
#. Download the ``Portfile`` to ``~/Downloads`` and run the following code.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
mkdir -p ~/ports/multimedia/sunshine
|
||||
mv ~/Downloads/Portfile ~/ports/multimedia/sunshine/
|
||||
cd ~/ports
|
||||
portindex
|
||||
sudo port install sunshine
|
||||
|
||||
#. The first time you start Sunshine, you will be asked to grant access to screen recording and your microphone.
|
||||
|
||||
Uninstall:
|
||||
.. code-block:: bash
|
||||
|
||||
sudo port uninstall sunshine
|
||||
|
||||
Windows
|
||||
-------
|
||||
|
||||
Installer
|
||||
^^^^^^^^^
|
||||
#. Download and install ``sunshine-windows.exe``
|
||||
|
||||
.. Attention:: You should carefully select or unselect the options you want to install. Do not blindly install or enable
|
||||
features.
|
||||
|
||||
To uninstall, find Sunshine in the list `here <ms-settings:installed-apps>`_ and select "Uninstall" from the overflow
|
||||
menu. Different versions of Windows may provide slightly different steps for uninstall.
|
||||
|
||||
Standalone
|
||||
^^^^^^^^^^
|
||||
#. Download and extract ``sunshine-windows.zip``
|
||||
|
||||
To uninstall, delete the extracted directory which contains the ``sunshine.exe`` file.
|
||||
|
||||
.. _latest release: https://github.com/LizardByte/Sunshine/releases/latest
|
||||
.. _Dockerhub.io: https://hub.docker.com/repository/docker/lizardbyte/sunshine
|
||||
.. _ghcr.io: https://github.com/orgs/LizardByte/packages?repo_name=sunshine
|
||||
1
docs/source/about/overview.rst
Normal file
1
docs/source/about/overview.rst
Normal file
@@ -0,0 +1 @@
|
||||
.. include:: ../../../README.rst
|
||||
54
docs/source/about/third_party_packages.rst
Normal file
54
docs/source/about/third_party_packages.rst
Normal file
@@ -0,0 +1,54 @@
|
||||
Third Party Packages
|
||||
====================
|
||||
|
||||
.. Danger:: These packages are not maintained by LizardByte. Use at your own risk.
|
||||
|
||||
Chocolatey
|
||||
----------
|
||||
|
||||
.. image:: https://img.shields.io/chocolatey/v/Sunshine?style=for-the-badge&logo=chocolatey
|
||||
:alt: Chocolatey Version
|
||||
:target: https://community.chocolatey.org/packages/sunshine
|
||||
|
||||
.. image:: https://img.shields.io/chocolatey/dt/sunshine?style=for-the-badge&logo=chocolatey
|
||||
:alt: Chocolatey
|
||||
|
||||
nixpkgs
|
||||
-------
|
||||
.. image:: https://img.shields.io/badge/dynamic/xml?color=orange&label=nixpkgs&style=for-the-badge&prefix=v&query=%2F%2Ftr%5B%40id%3D%27nix_unstable%27%5D%2Ftd%5B3%5D%2Fspan%2Fa&url=https%3A%2F%2Frepology.org%2Fproject%2Fsunshine%2Fversions&logo=nixos
|
||||
:alt: nixpgs Version
|
||||
:target: https://github.com/NixOS/nixpkgs/blob/master/pkgs/servers/sunshine/default.nix
|
||||
|
||||
Scoop
|
||||
-----
|
||||
|
||||
.. image:: https://img.shields.io/scoop/v/sunshine?bucket=extras&style=for-the-badge
|
||||
:alt: Scoop Version (extras bucket)
|
||||
:target: https://scoop.sh/#/apps?s=0&d=1&o=true&q=sunshine
|
||||
|
||||
Solus
|
||||
-----
|
||||
.. image:: https://img.shields.io/badge/dynamic/xml?color=orange&label=Solus&style=for-the-badge&prefix=v&query=%2F%2Ftr%5B%40id%3D%27solus%27%5D%2Ftd%5B3%5D%2Fspan%2Fa&url=https%3A%2F%2Frepology.org%2Fproject%2Fsunshine%2Fversions&logo=solus
|
||||
:alt: Solus Version
|
||||
:target: https://dev.getsol.us/source/sunshine
|
||||
|
||||
Winget
|
||||
------
|
||||
.. image:: https://img.shields.io/badge/dynamic/xml?color=orange&label=Winget&style=for-the-badge&prefix=v&query=%2F%2Ftr%5B%40id%3D%27winget%27%5D%2Ftd%5B3%5D%2Fspan%2Fa&url=https%3A%2F%2Frepology.org%2Fproject%2Fsunshine%2Fversions&logo=microsoft
|
||||
:alt: Winget Version
|
||||
:target: https://github.com/microsoft/winget-pkgs/tree/master/manifests/l/LizardByte/Sunshine
|
||||
|
||||
Legacy GitHub Repo
|
||||
------------------
|
||||
|
||||
.. Attention:: This repo is not maintained. Thank you to Loki for bringing this amazing project to life!
|
||||
|
||||
.. image:: https://img.shields.io/static/v1?label=repo&message=loki-47-6F-64/sunshine&color=blue&style=for-the-badge&logo=github
|
||||
:alt: GitHub Maintainer
|
||||
:target: https://github.com/loki-47-6F-64/sunshine/releases
|
||||
|
||||
.. image:: https://img.shields.io/github/last-commit/loki-47-6F-64/sunshine?style=for-the-badge&logo=github
|
||||
:alt: GitHub last commit
|
||||
|
||||
.. image:: https://img.shields.io/github/release-date/loki-47-6F-64/sunshine?style=for-the-badge&logo=github
|
||||
:alt: GitHub Release Date
|
||||
281
docs/source/about/usage.rst
Normal file
281
docs/source/about/usage.rst
Normal file
@@ -0,0 +1,281 @@
|
||||
Usage
|
||||
=====
|
||||
#. See the `setup`_ section for your specific OS.
|
||||
#. If you did not install the service, then start sunshine with the following command, unless a start command is listed
|
||||
in the specified package :ref:`installation <about/installation:installation>` instructions.
|
||||
|
||||
.. Note:: A service is a process that runs in the background. Running multiple instances of Sunshine is not
|
||||
advised.
|
||||
|
||||
**Basic usage**
|
||||
.. code-block:: bash
|
||||
|
||||
sunshine
|
||||
|
||||
**Specify config file**
|
||||
.. code-block:: bash
|
||||
|
||||
sunshine <directory of conf file>/sunshine.conf
|
||||
|
||||
.. Note:: You do not need to specify a config file. If no config file is entered the default location will be used.
|
||||
|
||||
.. Attention:: The configuration file specified will be created if it doesn't exist.
|
||||
|
||||
#. Configure Sunshine in the web ui
|
||||
|
||||
The web ui is available on `https://localhost:47990 <https://localhost:47990>`_ by default. You may replace
|
||||
`localhost` with your internal ip address.
|
||||
|
||||
.. Attention:: Ignore any warning given by your browser about "insecure website". This is due to the SSL certificate
|
||||
being self signed.
|
||||
|
||||
.. Caution:: If running for the first time, make sure to note the username and password that you created.
|
||||
|
||||
**Add games and applications.**
|
||||
This can be configured in the web ui.
|
||||
|
||||
.. Note:: Additionally, apps can be configured manually. `src_assets/<os>/config/apps.json` is an example of a
|
||||
list of applications that are started just before running a stream. This is the directory within the GitHub
|
||||
repo.
|
||||
|
||||
#. In Moonlight, you may need to add the PC manually.
|
||||
#. When Moonlight request you insert the correct pin on sunshine:
|
||||
|
||||
- Login to the web ui
|
||||
- Go to "PIN" in the Navbar
|
||||
- Type in your PIN and press Enter, you should get a Success Message
|
||||
- In Moonlight, select one of the Applications listed
|
||||
|
||||
Network
|
||||
-------
|
||||
The Sunshine user interface will be available on port 47990 by default.
|
||||
|
||||
.. Warning:: Exposing ports to the internet can be dangerous. Do this at your own risk.
|
||||
|
||||
Arguments
|
||||
---------
|
||||
To get a list of available arguments run the following:
|
||||
.. code-block:: bash
|
||||
|
||||
sunshine --help
|
||||
|
||||
Setup
|
||||
-----
|
||||
|
||||
Linux
|
||||
^^^^^
|
||||
The `deb`, `rpm`, `Flatpak` and `AppImage` packages handle these steps automatically. Third party packages may not.
|
||||
|
||||
Sunshine needs access to `uinput` to create mouse and gamepad events.
|
||||
|
||||
#. Add user to group `input`, if this is the first time installing.
|
||||
.. code-block:: bash
|
||||
|
||||
sudo usermod -a -G input $USER
|
||||
|
||||
#. Create `udev` rules.
|
||||
.. code-block::
|
||||
|
||||
echo 'KERNEL=="uinput", GROUP="input", MODE="0660", OPTIONS+="static_node=uinput"' | \
|
||||
sudo tee /etc/udev/rules.d/85-sunshine-input.rules
|
||||
|
||||
#. Optionally, configure autostart service
|
||||
|
||||
- filename: ``~/.config/systemd/user/sunshine.service``
|
||||
- contents:
|
||||
.. code-block::
|
||||
|
||||
[Unit]
|
||||
Description=Sunshine self-hosted game stream host for Moonlight.
|
||||
StartLimitIntervalSec=500
|
||||
StartLimitBurst=5
|
||||
|
||||
[Service]
|
||||
ExecStart=<see table>
|
||||
Restart=on-failure
|
||||
RestartSec=5s
|
||||
#Flatpak Only
|
||||
#ExecStop=flatpak kill dev.lizardbyte.sunshine
|
||||
|
||||
[Install]
|
||||
WantedBy=graphical-session.target
|
||||
|
||||
.. table::
|
||||
:widths: auto
|
||||
|
||||
======== ============================================== ===============
|
||||
package ExecStart Auto Configured
|
||||
======== ============================================== ===============
|
||||
aur /usr/bin/sunshine ✔
|
||||
deb /usr/bin/sunshine ✔
|
||||
rpm /usr/bin/sunshine ✔
|
||||
AppImage ~/sunshine.AppImage ✔
|
||||
Flatpak flatpak run dev.lizardbyte.sunshine ✔
|
||||
======== ============================================== ===============
|
||||
|
||||
**Start once**
|
||||
.. code-block:: bash
|
||||
|
||||
systemctl --user start sunshine
|
||||
|
||||
**Start on boot**
|
||||
.. code-block:: bash
|
||||
|
||||
systemctl --user enable sunshine
|
||||
|
||||
#. Additional Setup for KMS
|
||||
.. Note:: ``cap_sys_admin`` may as well be root, except you don't need to be root to run it. It is necessary to
|
||||
allow Sunshine to use KMS.
|
||||
|
||||
**Enable**
|
||||
.. code-block:: bash
|
||||
|
||||
sudo setcap cap_sys_admin+p $(readlink -f $(which sunshine))
|
||||
|
||||
**Disable (for Xorg/X11)**
|
||||
.. code-block:: bash
|
||||
|
||||
sudo setcap -r $(readlink -f $(which sunshine))
|
||||
|
||||
#. Reboot
|
||||
.. code-block:: bash
|
||||
|
||||
sudo reboot now
|
||||
|
||||
macOS
|
||||
^^^^^
|
||||
Sunshine can only access microphones on macOS due to system limitations. To stream system audio use
|
||||
`Soundflower <https://github.com/mattingalls/Soundflower>`_ or
|
||||
`BlackHole <https://github.com/ExistentialAudio/BlackHole>`_.
|
||||
|
||||
.. Note:: Command Keys are not forwarded by Moonlight. Right Option-Key is mapped to CMD-Key.
|
||||
|
||||
.. Caution:: Gamepads are not currently supported.
|
||||
|
||||
Configure autostart service
|
||||
**MacPorts**
|
||||
.. code-block:: bash
|
||||
|
||||
sudo port load Sunshine
|
||||
|
||||
Windows
|
||||
^^^^^^^
|
||||
For gamepad support, install `ViGEmBus <https://github.com/ViGEm/ViGEmBus/releases/latest>`_
|
||||
|
||||
Sunshine firewall
|
||||
**Add rule**
|
||||
.. code-block:: batch
|
||||
|
||||
cd /d "C:\Program Files\Sunshine\scripts"
|
||||
add-firewall-rule.bat
|
||||
|
||||
**Remove rule**
|
||||
.. code-block:: batch
|
||||
|
||||
cd /d "C:\Program Files\Sunshine\scripts"
|
||||
remove-firewall-rule.bat
|
||||
|
||||
Sunshine service
|
||||
**Enable**
|
||||
.. code-block:: batch
|
||||
|
||||
cd /d "C:\Program Files\Sunshine\scripts"
|
||||
install-service.bat
|
||||
|
||||
**Disable**
|
||||
.. code-block:: batch
|
||||
|
||||
cd /d "C:\Program Files\Sunshine\scripts"
|
||||
uninstall-service.bat
|
||||
|
||||
Shortcuts
|
||||
---------
|
||||
All shortcuts start with ``CTRL + ALT + SHIFT``, just like Moonlight
|
||||
|
||||
- ``CTRL + ALT + SHIFT + N`` - Hide/Unhide the cursor (This may be useful for Remote Desktop Mode for Moonlight)
|
||||
- ``CTRL + ALT + SHIFT + F1/F12`` - Switch to different monitor for Streaming
|
||||
|
||||
Application List
|
||||
----------------
|
||||
- Applications should be configured via the web UI.
|
||||
- A basic understanding of working directories and commands is required.
|
||||
- You can use Environment variables in place of values
|
||||
- ``$(HOME)`` will be replaced by the value of ``$HOME``
|
||||
- ``$$`` will be replaced by ``$``, e.g. ``$$(HOME)`` will be become ``$(HOME)``
|
||||
- ``env`` - Adds or overwrites Environment variables for the commands/applications run by Sunshine
|
||||
- ``"Variable name":"Variable value"``
|
||||
- ``apps`` - The list of applications
|
||||
- Advanced users may want to edit the application list manually. The format is ``json``.
|
||||
- Example application:
|
||||
.. code-block:: json
|
||||
|
||||
{
|
||||
"cmd": "command to open app",
|
||||
"detached": [
|
||||
"some-command",
|
||||
"another-command"
|
||||
],
|
||||
"image-path": "/full-path/to/png-image",
|
||||
"name": "An App",
|
||||
"output": "/full-path/to/command-log-file",
|
||||
"prep-cmd": [
|
||||
{
|
||||
"do": "some-command",
|
||||
"undo": "undo-that-command"
|
||||
}
|
||||
],
|
||||
"working-dir": "/full-path/to/working-directory"
|
||||
}
|
||||
|
||||
- ``cmd`` - The main application
|
||||
- ``detached`` - A list of commands to be run and forgotten about
|
||||
|
||||
- If not specified, a process is started that sleeps indefinitely
|
||||
|
||||
- ``image-path`` - The full path to the cover art image to use.
|
||||
- ``name`` - The name of the application/game
|
||||
- ``output`` - The file where the output of the command is stored
|
||||
- ``prep-cmd`` - A list of commands to be run before/after the application
|
||||
|
||||
- If any of the prep-commands fail, starting the application is aborted
|
||||
- ``do`` - Run before the application
|
||||
|
||||
- If it fails, all ``undo`` commands of the previously succeeded ``do`` commands are run
|
||||
|
||||
- ``undo`` - Run after the application has terminated
|
||||
|
||||
- Failures of ``undo`` commands are ignored
|
||||
|
||||
- ``working-dir`` - The working directory to use. If not specified, Sunshine will use the application directory.
|
||||
|
||||
Considerations
|
||||
--------------
|
||||
- When an application is started, if there is an application already running, it will be terminated.
|
||||
- When the application has been shutdown, the stream shuts down as well.
|
||||
|
||||
- For example, if you attempt to run ``steam`` as a ``cmd`` instead of ``detached`` the stream will immediately fail.
|
||||
This is due to the method in which the steam process is executed. Other applications may behave similarly.
|
||||
|
||||
- In addition to the apps listed, one app "Desktop" is hardcoded into Sunshine. It does not start an application,
|
||||
instead it simply starts a stream.
|
||||
- For the Linux flatpak you must prepend commands with ``flatpak-spawn --host``.
|
||||
|
||||
HDR Support
|
||||
-----------
|
||||
Streaming HDR content is supported for Windows hosts with NVIDIA, AMD, or Intel GPUs that support encoding HEVC Main 10.
|
||||
You must have an HDR-capable display or EDID emulator dongle connected to your host PC to activate HDR in Windows.
|
||||
|
||||
- Ensure you enable the HDR option in your Moonlight client settings, otherwise the stream will be SDR.
|
||||
- A good HDR experience relies on proper HDR display calibration both in Windows and in game. HDR calibration can differ significantly between client and host displays.
|
||||
- We recommend calibrating the display by streaming the Windows HDR Calibration app to your client device and saving an HDR calibration profile to use while streaming.
|
||||
- You may also need to tune the brightness slider or HDR calibration options in game to the different HDR brightness capabilities of your client's display.
|
||||
- Older games that use NVIDIA-specific NVAPI HDR rather than native Windows 10 OS HDR support may not display in HDR.
|
||||
- Some GPUs can produce lower image quality or encoding performance when streaming in HDR compared to SDR.
|
||||
|
||||
Tutorials
|
||||
---------
|
||||
Tutorial videos are available `here <https://www.youtube.com/playlist?list=PLMYr5_xSeuXAbhxYHz86hA1eCDugoxXY0>`_.
|
||||
|
||||
.. admonition:: Community!
|
||||
|
||||
Tutorials are community generated. Want to contribute? Reach out to us on our discord server.
|
||||
32
docs/source/building/build.rst
Normal file
32
docs/source/building/build.rst
Normal file
@@ -0,0 +1,32 @@
|
||||
Build
|
||||
=====
|
||||
Sunshine binaries are built using `CMake <https://cmake.org/>`_. Cross compilation is not
|
||||
supported. That means the binaries must be built on the target operating system and architecture.
|
||||
|
||||
Building Locally
|
||||
----------------
|
||||
|
||||
Clone
|
||||
^^^^^
|
||||
Ensure `git <https://git-scm.com/>`_ is installed and run the following:
|
||||
.. code-block:: bash
|
||||
|
||||
git clone https://github.com/lizardbyte/sunshine.git --recurse-submodules
|
||||
cd sunshine && mkdir build && cd build
|
||||
|
||||
Compile
|
||||
^^^^^^^
|
||||
See the section specific to your OS.
|
||||
|
||||
- :ref:`Linux <building/linux:linux>`
|
||||
- :ref:`macOS <building/macos:macos>`
|
||||
- :ref:`Windows <building/windows:windows>`
|
||||
|
||||
Remote Build
|
||||
------------
|
||||
It may be beneficial to build remotely in some cases. This will enable easier building on different operating systems.
|
||||
|
||||
#. Fork the project
|
||||
#. Activate workflows
|
||||
#. Trigger the `CI` workflow manually
|
||||
#. Download the artifacts/binaries from the workflow run summary
|
||||
204
docs/source/building/linux.rst
Normal file
204
docs/source/building/linux.rst
Normal file
@@ -0,0 +1,204 @@
|
||||
Linux
|
||||
=====
|
||||
|
||||
Requirements
|
||||
------------
|
||||
|
||||
Debian Bullseye
|
||||
^^^^^^^^^^^^^^^
|
||||
End of Life: TBD
|
||||
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt update && sudo apt install \
|
||||
build-essential \
|
||||
cmake \
|
||||
libavdevice-dev \
|
||||
libboost-filesystem-dev \
|
||||
libboost-log-dev \
|
||||
libboost-program-options-dev \
|
||||
libboost-thread-dev \
|
||||
libcap-dev \ # KMS
|
||||
libcurl4-openssl-dev \
|
||||
libdrm-dev \ # KMS
|
||||
libevdev-dev \
|
||||
libmfx-dev \ # x86_64 only
|
||||
libnuma-dev \
|
||||
libopus-dev \
|
||||
libpulse-dev \
|
||||
libssl-dev \
|
||||
libva-dev \
|
||||
libvdpau-dev \
|
||||
libwayland-dev \ # Wayland
|
||||
libx11-dev \ # X11
|
||||
libxcb-shm0-dev \ # X11
|
||||
libxcb-xfixes0-dev \ # X11
|
||||
libxcb1-dev \ # X11
|
||||
libxfixes-dev \ # X11
|
||||
libxrandr-dev \ # X11
|
||||
libxtst-dev \ # X11
|
||||
nodejs \
|
||||
npm \
|
||||
nvidia-cuda-dev \ # Cuda, NvFBC
|
||||
nvidia-cuda-toolkit # Cuda, NvFBC
|
||||
|
||||
Fedora 36, 37
|
||||
^^^^^^^^^^^^^
|
||||
End of Life: TBD
|
||||
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
sudo dnf update && \
|
||||
sudo dnf group install "Development Tools" && \
|
||||
sudo dnf install \
|
||||
boost-devel \
|
||||
cmake \
|
||||
gcc \
|
||||
gcc-c++ \
|
||||
intel-mediasdk-devel \ # x86_64 only
|
||||
libcap-devel \
|
||||
libcurl-devel \
|
||||
libdrm-devel \
|
||||
libevdev-devel \
|
||||
libva-devel \
|
||||
libvdpau-devel \
|
||||
libX11-devel \ # X11
|
||||
libxcb-devel \ # X11
|
||||
libXcursor-devel \ # X11
|
||||
libXfixes-devel \ # X11
|
||||
libXi-devel \ # X11
|
||||
libXinerama-devel \ # X11
|
||||
libXrandr-devel \ # X11
|
||||
libXtst-devel \ # X11
|
||||
mesa-libGL-devel \
|
||||
npm \
|
||||
numactl-devel \
|
||||
openssl-devel \
|
||||
opus-devel \
|
||||
pulseaudio-libs-devel \
|
||||
rpm-build \ # if you want to build an RPM binary package
|
||||
wget \ # necessary for cuda install with `run` file
|
||||
which # necessary for cuda install with `run` file
|
||||
|
||||
Ubuntu 20.04
|
||||
^^^^^^^^^^^^
|
||||
End of Life: April 2030
|
||||
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt update && sudo apt install \
|
||||
build-essential \
|
||||
cmake \
|
||||
g++-10 \
|
||||
libavdevice-dev \
|
||||
libboost-filesystem-dev \
|
||||
libboost-log-dev \
|
||||
libboost-thread-dev \
|
||||
libboost-program-options-dev \
|
||||
libcap-dev \ # KMS
|
||||
libdrm-dev \ # KMS
|
||||
libevdev-dev \
|
||||
libmfx-dev \ # x86_64 only
|
||||
libnuma-dev \
|
||||
libopus-dev \
|
||||
libpulse-dev \
|
||||
libssl-dev \
|
||||
libva-dev \
|
||||
libvdpau-dev \
|
||||
libwayland-dev \ # Wayland
|
||||
libx11-dev \ # X11
|
||||
libxcb-shm0-dev \ # X11
|
||||
libxcb-xfixes0-dev \ # X11
|
||||
libxcb1-dev \ # X11
|
||||
libxfixes-dev \ # X11
|
||||
libxrandr-dev \ # X11
|
||||
libxtst-dev \ # X11
|
||||
nodejs \
|
||||
npm \
|
||||
wget # necessary for cuda install with `run` file
|
||||
|
||||
Update gcc alias
|
||||
.. code-block:: bash
|
||||
|
||||
update-alternatives --install \
|
||||
/usr/bin/gcc gcc /usr/bin/gcc-10 100 \
|
||||
--slave /usr/bin/g++ g++ /usr/bin/g++-10 \
|
||||
--slave /usr/bin/gcov gcov /usr/bin/gcov-10 \
|
||||
--slave /usr/bin/gcc-ar gcc-ar /usr/bin/gcc-ar-10 \
|
||||
--slave /usr/bin/gcc-ranlib gcc-ranlib /usr/bin/gcc-ranlib-10
|
||||
|
||||
Ubuntu 22.04
|
||||
^^^^^^^^^^^^
|
||||
End of Life: April 2027
|
||||
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
sudo apt update && sudo apt install \
|
||||
build-essential \
|
||||
cmake \
|
||||
libavdevice-dev \
|
||||
libboost-filesystem-dev \
|
||||
libboost-log-dev \
|
||||
libboost-thread-dev \
|
||||
libboost-program-options-dev \
|
||||
libcap-dev \ # KMS
|
||||
libdrm-dev \ # KMS
|
||||
libevdev-dev \
|
||||
libmfx-dev \ # x86_64 only
|
||||
libnuma-dev \
|
||||
libopus-dev \
|
||||
libpulse-dev \
|
||||
libssl-dev \
|
||||
libwayland-dev \ # Wayland
|
||||
libx11-dev \ # X11
|
||||
libxcb-shm0-dev \ # X11
|
||||
libxcb-xfixes0-dev \ # X11
|
||||
libxcb1-dev \ # X11
|
||||
libxfixes-dev \ # X11
|
||||
libxrandr-dev \ # X11
|
||||
libxtst-dev \ # X11
|
||||
nodejs \
|
||||
npm \
|
||||
nvidia-cuda-dev \ # CUDA, NvFBC
|
||||
nvidia-cuda-toolkit # CUDA, NvFBC
|
||||
|
||||
CUDA
|
||||
----
|
||||
If the version of CUDA available from your distro is not adequate, manually install CUDA.
|
||||
|
||||
.. Tip:: The version of CUDA you use will determine compatibility with various GPU generations.
|
||||
See `CUDA compatibility <https://docs.nvidia.com/deploy/cuda-compatibility/index.html>`_ for more info.
|
||||
|
||||
Select the appropriate run file based on your desired CUDA version and architecture according to
|
||||
`CUDA Toolkit Archive <https://developer.nvidia.com/cuda-toolkit-archive>`_.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
wget https://developer.download.nvidia.com/compute/cuda/11.4.2/local_installers/cuda_11.4.2_470.57.02_linux.run \
|
||||
--progress=bar:force:noscroll -q --show-progress -O ./cuda.run
|
||||
chmod a+x ./cuda.run
|
||||
./cuda.run --silent --toolkit --toolkitpath=/usr --no-opengl-libs --no-man-page --no-drm
|
||||
rm ./cuda.run
|
||||
|
||||
npm dependencies
|
||||
----------------
|
||||
Install npm dependencies.
|
||||
.. code-block:: bash
|
||||
|
||||
npm install
|
||||
|
||||
Build
|
||||
-----
|
||||
.. Attention:: Ensure you are in the build directory created during the clone step earlier before continuing.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cmake ..
|
||||
make -j ${nproc}
|
||||
|
||||
cpack -G DEB # optionally, create a deb package
|
||||
cpack -G RPM # optionally, create a rpm package
|
||||
46
docs/source/building/macos.rst
Normal file
46
docs/source/building/macos.rst
Normal file
@@ -0,0 +1,46 @@
|
||||
macOS
|
||||
=====
|
||||
|
||||
Requirements
|
||||
------------
|
||||
macOS Big Sur and Xcode 12.5+
|
||||
|
||||
Use either `MacPorts <https://www.macports.org>`_ or `Homebrew <https://brew.sh>`_
|
||||
|
||||
MacPorts
|
||||
""""""""
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
sudo port install avahi boost180 cmake curl libopus npm9 pkgconfig
|
||||
|
||||
Homebrew
|
||||
""""""""
|
||||
Install Requirements
|
||||
.. code-block:: bash
|
||||
|
||||
brew install boost cmake node opus
|
||||
# if there are issues with an SSL header that is not found:
|
||||
cd /usr/local/include
|
||||
ln -s ../opt/openssl/include/openssl .
|
||||
|
||||
npm dependencies
|
||||
----------------
|
||||
Install npm dependencies.
|
||||
.. code-block:: bash
|
||||
|
||||
npm install
|
||||
|
||||
Build
|
||||
-----
|
||||
.. Attention:: Ensure you are in the build directory created during the clone step earlier before continuing.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cmake ..
|
||||
make -j ${nproc}
|
||||
|
||||
cpack -G DragNDrop # optionally, create a macOS dmg package
|
||||
|
||||
If cmake fails complaining to find Boost, try to set the path explicitly.
|
||||
``cmake -DBOOST_ROOT=[boost path] ..``, e.g., ``cmake -DBOOST_ROOT=/opt/local/libexec/boost/1.80 ..``
|
||||
41
docs/source/building/windows.rst
Normal file
41
docs/source/building/windows.rst
Normal file
@@ -0,0 +1,41 @@
|
||||
Windows
|
||||
=======
|
||||
|
||||
Requirements
|
||||
------------
|
||||
First you need to install `MSYS2 <https://www.msys2.org>`_, then startup "MSYS2 MinGW 64-bit" and execute the following
|
||||
codes.
|
||||
|
||||
Update all packages:
|
||||
.. code-block:: bash
|
||||
|
||||
pacman -Suy
|
||||
|
||||
Install dependencies:
|
||||
.. code-block:: bash
|
||||
|
||||
pacman -S base-devel cmake diffutils gcc git make mingw-w64-x86_64-binutils \
|
||||
mingw-w64-x86_64-boost mingw-w64-x86_64-cmake mingw-w64-x86_64-curl \
|
||||
mingw-w64-x86_64-libmfx mingw-w64-x86_64-openssl mingw-w64-x86_64-opus \
|
||||
mingw-w64-x86_64-toolchain
|
||||
|
||||
npm dependencies
|
||||
----------------
|
||||
Install nodejs and npm. Downloads available `here <https://nodejs.org/en/download/>`_.
|
||||
|
||||
Install npm dependencies.
|
||||
.. code-block:: bash
|
||||
|
||||
npm install
|
||||
|
||||
Build
|
||||
-----
|
||||
.. Attention:: Ensure you are in the build directory created during the clone step earlier before continuing.
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cmake -G "MinGW Makefiles" ..
|
||||
mingw32-make -j$(nproc)
|
||||
|
||||
cpack -G NSIS # optionally, create a windows installer
|
||||
cpack -G ZIP # optionally, create a windows standalone package
|
||||
85
docs/source/conf.py
Normal file
85
docs/source/conf.py
Normal file
@@ -0,0 +1,85 @@
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
# list see the documentation:
|
||||
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||
|
||||
# standard imports
|
||||
from datetime import datetime
|
||||
import os
|
||||
import re
|
||||
|
||||
|
||||
# -- Path setup --------------------------------------------------------------
|
||||
|
||||
# If extensions (or modules to document with autodoc) are in another directory,
|
||||
# add these directories to sys.path here. If the directory is relative to the
|
||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__)) # the directory of this file
|
||||
source_dir = os.path.dirname(script_dir) # the source folder directory
|
||||
root_dir = os.path.dirname(source_dir) # the root folder directory
|
||||
|
||||
# -- Project information -----------------------------------------------------
|
||||
project = 'Sunshine'
|
||||
copyright = f'{datetime.now ().year}, {project}'
|
||||
author = 'ReenigneArcher'
|
||||
|
||||
# The full version, including alpha/beta/rc tags
|
||||
with open(os.path.join(root_dir, 'CMakeLists.txt'), 'r') as f:
|
||||
version = re.search(r"project\(Sunshine VERSION ((\d+)\.(\d+)\.(\d+))", str(f.read())).group(1)
|
||||
"""
|
||||
To use cmake method for obtaining version instead of regex,
|
||||
1. Within CMakeLists.txt add the following line without backticks:
|
||||
``configure_file(docs/source/conf.py.in "${CMAKE_CURRENT_SOURCE_DIR}/docs/source/conf.py" @ONLY)``
|
||||
2. Rename this file to ``conf.py.in``
|
||||
3. Uncomment the next line
|
||||
"""
|
||||
# version = '@PROJECT_VERSION@' # use this for cmake configure_file method
|
||||
|
||||
# -- General configuration ---------------------------------------------------
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = [
|
||||
'm2r2', # enable markdown files
|
||||
'sphinx.ext.autosectionlabel',
|
||||
'sphinx.ext.todo', # enable to-do sections
|
||||
'sphinx.ext.viewcode', # add links to view source code
|
||||
'sphinx_copybutton', # add a copy button to code blocks
|
||||
]
|
||||
|
||||
# Add any paths that contain templates here, relative to this directory.
|
||||
# templates_path = ['_templates']
|
||||
|
||||
# List of patterns, relative to source directory, that match files and
|
||||
# directories to ignore when looking for source files.
|
||||
# This pattern also affects html_static_path and html_extra_path.
|
||||
exclude_patterns = ['toc.rst']
|
||||
|
||||
# Extensions to include.
|
||||
source_suffix = ['.rst', '.md']
|
||||
|
||||
|
||||
# -- Options for HTML output -------------------------------------------------
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
# html_static_path = ['_static']
|
||||
|
||||
html_logo = os.path.join(root_dir, 'sunshine.png')
|
||||
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
html_theme = 'furo'
|
||||
|
||||
html_theme_options = {
|
||||
"top_of_page_button": "edit",
|
||||
"source_edit_link": "https://github.com/lizardbyte/sunshine/tree/nightly/docs/source/{filename}",
|
||||
}
|
||||
|
||||
# extension config options
|
||||
autosectionlabel_prefix_document = True # Make sure the target is unique
|
||||
todo_include_todos = True
|
||||
5
docs/source/contributing/contributing.rst
Normal file
5
docs/source/contributing/contributing.rst
Normal file
@@ -0,0 +1,5 @@
|
||||
Contributing
|
||||
============
|
||||
|
||||
Read our contribution guide in our organization level
|
||||
`docs <https://lizardbyte.readthedocs.io/en/latest/developers/contributing.html>`_.
|
||||
81
docs/source/contributing/localization.rst
Normal file
81
docs/source/contributing/localization.rst
Normal file
@@ -0,0 +1,81 @@
|
||||
Localization
|
||||
============
|
||||
Sunshine is being localized into various languages. The default language is `en` (English) and is highlighted green.
|
||||
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=de&style=for-the-badge&query=%24.progress.0.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=green&label=en&style=for-the-badge&query=%24.progress.1.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=en-GB&style=for-the-badge&query=%24.progress.2.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=en-US&style=for-the-badge&query=%24.progress.3.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=es-ES&style=for-the-badge&query=%24.progress.4.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=fr&style=for-the-badge&query=%24.progress.5.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=it&style=for-the-badge&query=%24.progress.6.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
.. image:: https://img.shields.io/badge/dynamic/json?color=blue&label=ru&style=for-the-badge&query=%24.progress.7.data.translationProgress&url=https%3A%2F%2Fbadges.awesome-crowdin.com%2Fstats-15178612-503956.json
|
||||
|
||||
Graph
|
||||
.. image:: https://badges.awesome-crowdin.com/translation-15178612-503956.png
|
||||
|
||||
CrowdIn
|
||||
-------
|
||||
The translations occur on
|
||||
`CrowdIn <https://crowdin.com/project/sunshinestream>`_. Feel free to contribute to localization there.
|
||||
Only elements of the API are planned to be translated.
|
||||
|
||||
.. Attention:: The rest API has not yet been implemented.
|
||||
|
||||
**Translations Basics**
|
||||
- The brand names `LizardByte` and `Sunshine` should never be translated.
|
||||
- Other brand names should never be translated.
|
||||
Examples:
|
||||
|
||||
- AMD
|
||||
- Nvidia
|
||||
|
||||
**CrowdIn Integration**
|
||||
How does it work?
|
||||
|
||||
When a change is made to sunshine source code, a workflow generates new translation templates
|
||||
that get pushed to CrowdIn automatically.
|
||||
|
||||
When translations are updated on CrowdIn, a push gets made to the `l10n_nightly` branch and a PR is made against the
|
||||
`nightly` branch. Once PR is merged, all updated translations are part of the project and will be included in the
|
||||
next release.
|
||||
|
||||
Extraction
|
||||
----------
|
||||
There should be minimal cases where strings need to be extracted from source code; however it may be necessary in some
|
||||
situations. For example if a system tray icon is added it should be localized as it is user interfacing.
|
||||
|
||||
- Wrap the string to be extracted in a function as shown.
|
||||
.. code-block:: cpp
|
||||
|
||||
#include <boost/locale.hpp>
|
||||
boost::locale::translate("Hello world!")
|
||||
|
||||
.. Tip:: More examples can be found in the documentation for
|
||||
`boost locale <https://www.boost.org/doc/libs/1_70_0/libs/locale/doc/html/messages_formatting.html>`_.
|
||||
|
||||
.. Warning:: This is for information only. Contributors should never include manually updated template files, or
|
||||
manually compiled language files in Pull Requests.
|
||||
|
||||
Strings are automatically extracted from the code to the `locale/sunshine.po` template file. The generated file is
|
||||
used by CrowdIn to generate language specific template files. The file is generated using the
|
||||
`.github/workflows/localize.yml` workflow and is run on any push event into the `nightly` branch. Jobs are only run if
|
||||
any of the following paths are modified.
|
||||
|
||||
.. code-block:: yaml
|
||||
|
||||
- 'src/**'
|
||||
|
||||
When testing locally it may be desirable to manually extract, initialize, update, and compile strings. Python is
|
||||
required for this, along with the python dependencies in the `./scripts/requirements.txt` file. Additionally,
|
||||
`xgettext <https://www.gnu.org/software/gettext/>`_ must be installed.
|
||||
|
||||
**Extract, initialize, and update**
|
||||
.. code-block:: bash
|
||||
|
||||
python ./scripts/_locale.py --extract --init --update
|
||||
|
||||
**Compile**
|
||||
.. code-block:: bash
|
||||
|
||||
python ./scripts/_locale.py --compile
|
||||
38
docs/source/contributing/testing.rst
Normal file
38
docs/source/contributing/testing.rst
Normal file
@@ -0,0 +1,38 @@
|
||||
Testing
|
||||
=======
|
||||
|
||||
Clang Format
|
||||
------------
|
||||
Source code is tested against the `.clang-format` file for linting errors. The workflow file responsible for clang
|
||||
format testing is `.github/workflows/cpp-clang-format-lint.yml`.
|
||||
|
||||
Test clang-format locally.
|
||||
.. code-block:: bash
|
||||
|
||||
find ./ -iname *.cpp -o -iname *.h -iname *.m -iname *.mm | xargs clang-format -i
|
||||
|
||||
Sphinx
|
||||
------
|
||||
Sunshine uses `Sphinx <https://www.sphinx-doc.org/en/master/>`_ for documentation building. Sphinx, along with other
|
||||
required documentation depencies are included in the `./docs/requirements.txt` file. Python is required to build
|
||||
sphinx docs. Installation and setup of python will not be covered here.
|
||||
|
||||
The config file for Sphinx is `docs/source/conf.py`. This is already included in the repo and should not be modified.
|
||||
|
||||
Test with Sphinx
|
||||
.. code-block:: bash
|
||||
|
||||
cd docs
|
||||
make html
|
||||
|
||||
Alternatively
|
||||
|
||||
.. code-block:: bash
|
||||
|
||||
cd docs
|
||||
sphinx-build -b html source build
|
||||
|
||||
Unit Testing
|
||||
------------
|
||||
.. Todo:: Sunshine does not currently have any unit tests. If you would like to help us improve please get in contact
|
||||
with us, or make a PR with suggested changes.
|
||||
20
docs/source/gamestream/gamestream.rst
Normal file
20
docs/source/gamestream/gamestream.rst
Normal file
@@ -0,0 +1,20 @@
|
||||
GameStream
|
||||
==========
|
||||
Nvidia announced that their GameStream service for Nvidia Games clients will be discontinued in February 2023.
|
||||
Luckily, Sunshine performance is now on par with Nvidia GameStream. Many users have even reported that Sunshine
|
||||
outperforms GameStream, so rest assured that Sunshine will be equally performant moving forward.
|
||||
|
||||
Migration
|
||||
---------
|
||||
We have developed a simple migration tool to help you migrate your GameStream games and apps to Sunshine automatically.
|
||||
Please check out our `GSMS <https://github.com/LizardByte/GSMS>`_ project if you're interested in an automated
|
||||
migration option. At the time of writing this GSMS offers the ability to migrate your custom games and apps. The
|
||||
working directory, command, and image are all set in Sunshine's ``apps.json`` file. The box-art image is also copied
|
||||
to a specified directory.
|
||||
|
||||
Limitations
|
||||
-----------
|
||||
Sunshine does have some limitations, as compared to Nvidia GameStream.
|
||||
|
||||
- Automatic game/application list.
|
||||
- Changing game settings automatically, to optimize streaming.
|
||||
5
docs/source/index.rst
Normal file
5
docs/source/index.rst
Normal file
@@ -0,0 +1,5 @@
|
||||
:github_url: https://github.com/LizardByte/Sunshine/tree/nightly/docs/source/index.rst
|
||||
|
||||
Table of Contents
|
||||
=================
|
||||
.. include:: toc.rst
|
||||
21
docs/source/legal/legal.rst
Normal file
21
docs/source/legal/legal.rst
Normal file
@@ -0,0 +1,21 @@
|
||||
Legal
|
||||
=====
|
||||
.. Attention:: This documentation is for informational purposes only and is not intended as legal advice. If you have
|
||||
any legal questions or concerns about using Sunshine, we recommend consulting with a lawyer.
|
||||
|
||||
Sunshine is licensed under the GPL-3.0 license, which allows for free use and modification of the software.
|
||||
The full text of the license can be reviewed `here <https://github.com/LizardByte/Sunshine/blob/master/LICENSE>`_.
|
||||
|
||||
Commercial Use
|
||||
--------------
|
||||
Sunshine can be used in commercial applications without any limitations. This means that businesses and organizations
|
||||
can use Sunshine to create and sell products or services without needing to seek permission or pay a fee.
|
||||
|
||||
However, it is important to note that the GPL-3.0 license does not grant any rights to distribute or sell the encoders
|
||||
contained within Sunshine. If you plan to sell access to Sunshine as part of their distribution, you are responsible
|
||||
for obtaining the necessary licenses to do so. This may include obtaining a license from the
|
||||
Motion Picture Experts Group (MPEG-LA) and/or any other necessary licensing requirements.
|
||||
|
||||
In summary, while Sunshine is free to use, it is the user's responsibility to ensure compliance with all applicable
|
||||
licensing requirements when redistributing the software as part of a commercial offering. If you have any questions or
|
||||
concerns about using Sunshine in a commercial setting, we recommend consulting with a lawyer.
|
||||
48
docs/source/toc.rst
Normal file
48
docs/source/toc.rst
Normal file
@@ -0,0 +1,48 @@
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: About
|
||||
|
||||
about/overview
|
||||
about/installation
|
||||
about/docker
|
||||
about/third_party_packages
|
||||
about/usage
|
||||
about/advanced_usage
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: GameStream
|
||||
|
||||
gamestream/gamestream
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Troubleshooting
|
||||
|
||||
troubleshooting/general
|
||||
troubleshooting/linux
|
||||
troubleshooting/macos
|
||||
troubleshooting/windows
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Build
|
||||
|
||||
building/build
|
||||
building/linux
|
||||
building/macos
|
||||
building/windows
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Contributing
|
||||
|
||||
contributing/contributing
|
||||
contributing/localization
|
||||
contributing/testing
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Legal
|
||||
|
||||
legal/legal
|
||||
24
docs/source/troubleshooting/general.rst
Normal file
24
docs/source/troubleshooting/general.rst
Normal file
@@ -0,0 +1,24 @@
|
||||
General
|
||||
=======
|
||||
|
||||
Forgotten Credentials
|
||||
---------------------
|
||||
If you forgot your credentials to the web UI, try this.
|
||||
.. code-block:: bash
|
||||
|
||||
sunshine --creds <new username> <new password>
|
||||
|
||||
Web UI Access
|
||||
-------------
|
||||
Can't access the web UI?
|
||||
#. Check firewall rules.
|
||||
|
||||
Nvidia issues
|
||||
-------------
|
||||
NvFBC, NvENC, or general issues with Nvidia graphics card.
|
||||
- Consumer grade Nvidia cards are software limited to a specific number of encodes. See
|
||||
`Video Encode and Decode GPU Support Matrix <https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new>`_
|
||||
for more info.
|
||||
- You can usually bypass the restriction with a driver patch. See Keylase's
|
||||
`Linux <https://github.com/keylase/nvidia-patch>`_
|
||||
or `Windows <https://github.com/keylase/nvidia-patch/blob/master/win>`_ patches for more guidance.
|
||||
9
docs/source/troubleshooting/linux.rst
Normal file
9
docs/source/troubleshooting/linux.rst
Normal file
@@ -0,0 +1,9 @@
|
||||
Linux
|
||||
=====
|
||||
|
||||
KMS Streaming fails
|
||||
-------------------
|
||||
If screencasting fails with KMS, you may need to run the following to force unprivileged screencasting.
|
||||
.. code-block:: bash
|
||||
|
||||
sudo setcap -r $(readlink -f $(which sunshine))
|
||||
13
docs/source/troubleshooting/macos.rst
Normal file
13
docs/source/troubleshooting/macos.rst
Normal file
@@ -0,0 +1,13 @@
|
||||
macOS
|
||||
=====
|
||||
|
||||
Dynamic session lookup failed
|
||||
-----------------------------
|
||||
If you get this error:
|
||||
`Dynamic session lookup supported but failed: launchd did not provide a socket path, verify that
|
||||
org.freedesktop.dbus-session.plist is loaded!`
|
||||
|
||||
Try this.
|
||||
.. code-block:: bash
|
||||
|
||||
launchctl load -w /Library/LaunchAgents/org.freedesktop.dbus-session.plist
|
||||
6
docs/source/troubleshooting/windows.rst
Normal file
6
docs/source/troubleshooting/windows.rst
Normal file
@@ -0,0 +1,6 @@
|
||||
Windows
|
||||
=======
|
||||
|
||||
No gamepad detected
|
||||
-------------------
|
||||
#. Verify that you've installed `ViGEmBus <https://github.com/ViGEm/ViGEmBus/releases/latest>`_.
|
||||
76
gen-deb.in
76
gen-deb.in
@@ -1,76 +0,0 @@
|
||||
#!/bin/sh
|
||||
|
||||
if [ ! "@SUNSHINE_UNDEFINED_VARIABLE@" = "" ]; then
|
||||
echo "Please run gen-deb generated by cmake inside the build directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [ -d package-deb ]; then
|
||||
echo "package-deb already exists: It will be replaced"
|
||||
rm -rf package-deb
|
||||
fi
|
||||
|
||||
export DEBIAN=package-deb/sunshine/DEBIAN
|
||||
export RULES=package-deb/sunshine/etc/udev/rules.d
|
||||
export BIN=package-deb/sunshine/usr/bin
|
||||
export ASSETS=package-deb/sunshine/etc/sunshine
|
||||
|
||||
mkdir -p $DEBIAN
|
||||
mkdir -p $RULES
|
||||
mkdir -p $BIN
|
||||
mkdir -p $ASSETS
|
||||
|
||||
if [ ! -f sunshine ]; then
|
||||
echo "Error: Can't find sunshine"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
cat << 'EOF' > $DEBIAN/conffiles
|
||||
/etc/sunshine/sunshine.conf
|
||||
/etc/sunshine/apps_linux.json
|
||||
EOF
|
||||
|
||||
cat << 'EOF' > $DEBIAN/control
|
||||
Package: sunshine
|
||||
Architecture: amd64
|
||||
Maintainer: @loki
|
||||
Priority: optional
|
||||
Version: 0.2.1
|
||||
Depends: libssl1.1, libavdevice58, libboost-thread1.71.0, libboost-filesystem1.71.0, libboost-log1.71.0, libpulse0, libopus0, libxcb-shm0, libxcb-xfixes0
|
||||
Description: Gamestream host for Moonlight
|
||||
EOF
|
||||
|
||||
cat << 'EOF' > $DEBIAN/postinst
|
||||
#!/bin/sh
|
||||
|
||||
export GROUP_INPUT=input
|
||||
|
||||
if [ -f /etc/group ]; then
|
||||
if ! grep -q $GROUP_INPUT /etc/group; then
|
||||
echo "Creating group $GROUP_INPUT"
|
||||
|
||||
groupadd $GROUP_INPUT
|
||||
fi
|
||||
else
|
||||
echo "Warning: /etc/group not found"
|
||||
fi
|
||||
EOF
|
||||
|
||||
cat << 'EOF' > $RULES/85-sunshine-rules.rules
|
||||
KERNEL=="uinput", GROUP="input", MODE="0660"
|
||||
EOF
|
||||
|
||||
cp sunshine $BIN/sunshine
|
||||
cp @CMAKE_CURRENT_SOURCE_DIR@/assets/apps_linux.json $ASSETS/apps_linux.json
|
||||
cp @CMAKE_CURRENT_SOURCE_DIR@/assets/sunshine.conf $ASSETS/sunshine.conf
|
||||
|
||||
chmod 755 $DEBIAN/postinst
|
||||
chmod 755 $BIN/sunshine
|
||||
chmod 644 $RULES/85-sunshine-rules.rules
|
||||
|
||||
cd package-deb
|
||||
if fakeroot dpkg-deb --build sunshine; then
|
||||
echo "generated debian package: @CMAKE_CURRENT_BINARY_DIR@/package-deb/sunshine.deb"
|
||||
fi
|
||||
cd ..
|
||||
|
||||
Submodule moonlight-common-c deleted from cfeb0ffd90
7
package.json
Normal file
7
package.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-free": "6.2.1",
|
||||
"bootstrap": "5.2.3",
|
||||
"vue": "2.6.12"
|
||||
}
|
||||
}
|
||||
110
packaging/linux/AppImage/AppRun
Normal file
110
packaging/linux/AppImage/AppRun
Normal file
@@ -0,0 +1,110 @@
|
||||
#!/bin/bash
|
||||
|
||||
# custom AppRun for Sunshine AppImage
|
||||
|
||||
# path of the extracted AppRun
|
||||
HERE="$(dirname "$(readlink -f "${0}")")"
|
||||
SUNSHINE_PATH=/usr/bin/sunshine
|
||||
SUNSHINE_BIN_HERE=$HERE/usr/bin/sunshine
|
||||
SUNSHINE_SHARE_HERE=$HERE/usr/share/sunshine
|
||||
|
||||
# Set APPDIR when running directly from the AppDir:
|
||||
if [ -z "$APPDIR" ]; then
|
||||
ARGV0="AppRun"
|
||||
fi
|
||||
|
||||
cd "$HERE" || exit 1
|
||||
|
||||
function help() {
|
||||
echo "
|
||||
------------------------------
|
||||
Sunshine AppImage package.
|
||||
------------------------------
|
||||
|
||||
sunshine.AppImage options
|
||||
------------------------
|
||||
|
||||
Usage: $ARGV0 --help, -h
|
||||
------ # This message
|
||||
|
||||
$ARGV0 --install, -i
|
||||
# Install input rules sunshine.service files. Restart required.
|
||||
|
||||
$ARGV0 --remove, -r
|
||||
# Remove input rules sunshine.service files.
|
||||
|
||||
$ARGV0 --appimage-help
|
||||
# Show available AppImage options
|
||||
|
||||
sunshine options
|
||||
----------------
|
||||
"
|
||||
# print sunshine binary help, replacing the sunshine command in usage statement
|
||||
"$SUNSHINE_BIN_HERE" --help | sed -e "s#$SUNSHINE_BIN_HERE#$ARGV0#g"
|
||||
}
|
||||
|
||||
function install() {
|
||||
# user input rules
|
||||
sudo usermod -a -G input $USER
|
||||
# shellcheck disable=SC2002
|
||||
cat "$SUNSHINE_SHARE_HERE/udev/rules.d/85-sunshine.rules" | sudo tee /etc/udev/85-sunshine.rules
|
||||
|
||||
# sunshine service
|
||||
mkdir -p ~/.config/systemd/user
|
||||
cp -r "$SUNSHINE_SHARE_HERE/systemd/user/" ~/.config/systemd/
|
||||
# patch service executable path
|
||||
sed -i -e "s#$SUNSHINE_PATH#$(readlink -f $ARGV0)#g" ~/.config/systemd/user/sunshine.service
|
||||
|
||||
# setcap
|
||||
sudo setcap cap_sys_admin+p "$(readlink -f "$SUNSHINE_BIN_HERE")"
|
||||
|
||||
while true
|
||||
do
|
||||
read -r -p "This installation requires a reboot. Do you want to reboot NOW? [y/n] " input
|
||||
|
||||
case $input in
|
||||
[yY][eE][sS]|[yY])
|
||||
echo "Yes"
|
||||
sudo reboot now
|
||||
;;
|
||||
[nN][oO]|[nN])
|
||||
echo "No"
|
||||
break
|
||||
;;
|
||||
*)
|
||||
echo "Invalid input..."
|
||||
;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
function remove() {
|
||||
# remove input rules
|
||||
sudo rm -f /etc/udev/rules.d/85-sunshine.rules
|
||||
|
||||
# remove service
|
||||
sudo rm -f ~/.config/systemd/user/sunshine.service
|
||||
}
|
||||
|
||||
# process arguments
|
||||
if [ "x$1" == "xhelp" ] || [ "x$1" == "x--help" ] || [ "x$1" == "x-h" ] ; then
|
||||
help
|
||||
exit $?
|
||||
fi
|
||||
|
||||
if [ "x$1" == "xinstall" ] || [ "x$1" == "x--install" ] || [ "x$1" == "x-i" ] ; then
|
||||
install
|
||||
exit $?
|
||||
fi
|
||||
|
||||
if [ "x$1" == "xremove" ] || [ "x$1" == "x--remove" ] || [ "x$1" == "x-r" ] ; then
|
||||
remove
|
||||
exit $?
|
||||
fi
|
||||
|
||||
# create config directory if it doesn't exist
|
||||
# https://github.com/LizardByte/Sunshine/issues/324
|
||||
mkdir -p ~/.config/sunshine
|
||||
|
||||
# run sunshine
|
||||
"$SUNSHINE_BIN_HERE" $@
|
||||
50
packaging/linux/aur/PKGBUILD
Normal file
50
packaging/linux/aur/PKGBUILD
Normal file
@@ -0,0 +1,50 @@
|
||||
# Edit on github: https://github.com/LizardByte/Sunshine/tree/nightly/packaging/linux/aur/PKGBUILD
|
||||
# Reference: https://wiki.archlinux.org/title/PKGBUILD
|
||||
|
||||
pkgname=@SUNSHINE_AUR_PKG@
|
||||
pkgver=@PROJECT_VERSION@@SUNSHINE_SUB_VERSION@
|
||||
pkgrel=1
|
||||
pkgdesc="@PROJECT_DESCRIPTION@"
|
||||
arch=('x86_64' 'i686')
|
||||
url=@PROJECT_HOMEPAGE_URL@
|
||||
license=('GPL3')
|
||||
|
||||
depends=('avahi' 'boost-libs' 'curl' 'libevdev' 'libmfx' 'libpulse' 'libva' 'libvdpau' 'libx11' 'libxcb' 'libxfixes' 'libxrandr' 'libxtst' 'numactl' 'openssl' 'opus' 'udev')
|
||||
makedepends=('boost' 'cmake' 'git' 'make' 'nodejs' 'npm')
|
||||
optdepends=('cuda: NvFBC capture support'
|
||||
'libcap'
|
||||
'libdrm')
|
||||
|
||||
provides=(@SUNSHINE_AUR_PROVIDES@)
|
||||
conflicts=(@SUNSHINE_AUR_CONFLICTS@)
|
||||
|
||||
source=("$pkgname::git+@GITHUB_CLONE_URL@#commit=@GITHUB_COMMIT@")
|
||||
sha256sums=('SKIP')
|
||||
|
||||
prepare() {
|
||||
cd "$pkgname"
|
||||
git submodule update --recursive --init
|
||||
}
|
||||
|
||||
build() {
|
||||
pushd "$pkgname"
|
||||
npm install
|
||||
popd
|
||||
|
||||
export CFLAGS="${CFLAGS/-Werror=format-security/}"
|
||||
export CXXFLAGS="${CXXFLAGS/-Werror=format-security/}"
|
||||
|
||||
cmake \
|
||||
-S "$pkgname" \
|
||||
-B build \
|
||||
-Wno-dev \
|
||||
-D CMAKE_INSTALL_PREFIX=/usr \
|
||||
-D SUNSHINE_EXECUTABLE_PATH=/usr/bin/sunshine \
|
||||
-D SUNSHINE_ASSETS_DIR="share/sunshine"
|
||||
|
||||
make -C build
|
||||
}
|
||||
|
||||
package() {
|
||||
make -C build install DESTDIR="$pkgdir"
|
||||
}
|
||||
243
packaging/linux/flatpak/dev.lizardbyte.sunshine.yml
Normal file
243
packaging/linux/flatpak/dev.lizardbyte.sunshine.yml
Normal file
@@ -0,0 +1,243 @@
|
||||
---
|
||||
app-id: dev.lizardbyte.sunshine
|
||||
runtime: org.freedesktop.Platform
|
||||
runtime-version: "21.08"
|
||||
sdk: org.freedesktop.Sdk
|
||||
sdk-extensions:
|
||||
- org.freedesktop.Sdk.Extension.node18
|
||||
command: sunshine
|
||||
separate-locales: false
|
||||
finish-args:
|
||||
- --device=all # access all devices
|
||||
- --env=PULSE_PROP_media.category=Manager # allow sunshine to manage audio sinks
|
||||
- --filesystem=home # need to save files in user's home directory
|
||||
- --share=ipc # required for X11 shared memory extension
|
||||
- --share=network # access network
|
||||
- --socket=pulseaudio # play sounds using pulseaudio
|
||||
- --socket=wayland # show windows using Wayland
|
||||
- --socket=x11 # show windows using X11
|
||||
- --system-talk-name=org.freedesktop.Avahi # talk to avahi on the system bus
|
||||
- --talk-name=org.freedesktop.Flatpak # talk to flatpak on the session bus
|
||||
|
||||
cleanup:
|
||||
- /include
|
||||
- /lib/cmake
|
||||
- /lib/pkgconfig
|
||||
- /lib/*.la
|
||||
- /lib/*.a
|
||||
- /share/man
|
||||
|
||||
modules:
|
||||
- name: boost
|
||||
disabled: false
|
||||
buildsystem: simple
|
||||
build-commands:
|
||||
- cd tools/build && bison -y -d -o src/engine/jamgram.cpp src/engine/jamgram.y
|
||||
- ./bootstrap.sh --prefix=$FLATPAK_DEST --with-libraries=system,thread,log,program_options || cat bootstrap.log
|
||||
- ./b2 install variant=release link=shared runtime-link=shared cxxflags="$CXXFLAGS" linkflags="$LDFLAGS"
|
||||
-j $FLATPAK_BUILDER_N_JOBS
|
||||
sources:
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/b/boost1.74/boost1.74_1.74.0.orig.tar.xz
|
||||
sha256: 2467be4af625b5ae4b3c93fc7af196a09eba39c11a7338cd9e8b356fa44d2f45
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/b/boost1.74/boost1.74_1.74.0-17ubuntu1.debian.tar.xz
|
||||
sha256: 22e623d98c84eb3fec57e19ea371157a5bc8225ba4c5907f7e5155072317a31d
|
||||
- type: shell
|
||||
commands:
|
||||
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
|
||||
|
||||
- name: avahi
|
||||
disabled: false
|
||||
cleanup:
|
||||
- /bin
|
||||
- /lib/avahi
|
||||
- /share
|
||||
config-opts:
|
||||
- --with-distro=none
|
||||
- --disable-gobject
|
||||
- --disable-introspection
|
||||
- --disable-qt3
|
||||
- --disable-qt4
|
||||
- --disable-qt5
|
||||
- --disable-gtk
|
||||
- --disable-core-docs
|
||||
- --disable-manpages
|
||||
- --disable-libdaemon
|
||||
- --disable-python
|
||||
- --disable-pygobject
|
||||
- --disable-mono
|
||||
- --disable-monodoc
|
||||
- --disable-autoipd
|
||||
- --disable-doxygen-doc
|
||||
- --disable-doxygen-dot
|
||||
- --disable-doxygen-xml
|
||||
- --disable-doxygen-html
|
||||
- --disable-manpages
|
||||
- --disable-xmltoman
|
||||
- --disable-libevent
|
||||
sources:
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/a/avahi/avahi_0.8.orig.tar.gz
|
||||
sha256: 060309d7a333d38d951bc27598c677af1796934dbd98e1024e7ad8de798fedda
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/a/avahi/avahi_0.8-6ubuntu1.debian.tar.xz
|
||||
sha256: ebf1dfe5e853b6bc6843e3bd784cb6af632041f305abd0e5415114f80c1dcea4
|
||||
- type: shell
|
||||
commands:
|
||||
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
|
||||
- autoreconf -ivf
|
||||
|
||||
- name: libevdev
|
||||
disabled: false
|
||||
buildsystem: meson
|
||||
config-opts:
|
||||
- -Ddocumentation=disabled
|
||||
- -Dtests=disabled
|
||||
cleanup:
|
||||
- /bin
|
||||
sources:
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/libe/libevdev/libevdev_1.13.0+dfsg.orig.tar.xz
|
||||
sha256: a882e13ef1dd6bd227318080cabf60fe5af3c06471259d3acfc9dbfb202351a7
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/libe/libevdev/libevdev_1.13.0+dfsg-1.debian.tar.xz
|
||||
sha256: d33c56acbbfff2dc540e45c57a38d92210b5e7fd0947ac47fbe48183468aad74
|
||||
- type: shell
|
||||
commands:
|
||||
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
|
||||
|
||||
- name: intel-mediasdk
|
||||
disabled: false
|
||||
buildsystem: cmake
|
||||
config-opts:
|
||||
- -DENABLE_OPENCL=ON
|
||||
- -DENABLE_X11_DRI3=ON
|
||||
- -DENABLE_WAYLAND=ON
|
||||
- -DENABLE_ITT=OFF
|
||||
- -DENABLE_TEXTLOG=OFF
|
||||
- -DENABLE_STAT=OFF
|
||||
- -DBUILD_ALL=OFF
|
||||
- -DBUILD_RUNTIME=ON
|
||||
- -DBUILD_SAMPLES=OFF
|
||||
- -DBUILD_TESTS=OFF
|
||||
- -DBUILD_TOOLS=OFF
|
||||
- -DUSE_SYSTEM_GTEST=OFF
|
||||
- -DMFX_ENABLE_KERNELS=ON
|
||||
only-arches:
|
||||
- x86_64
|
||||
sources:
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/universe/i/intel-mediasdk/intel-mediasdk_22.3.0.orig.tar.gz
|
||||
sha256: e1e74229f409e969b70c2b35b1955068de3d40db85ecc42bd6ff501468bc76d7
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/universe/i/intel-mediasdk/intel-mediasdk_22.3.0-1.debian.tar.xz
|
||||
sha256: 024d98d2f63443d2765a90cfe997d104e7b897694889f199ca8fb4d9ffdcf1dc
|
||||
- type: shell
|
||||
commands:
|
||||
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
|
||||
modules:
|
||||
- name: libdrm
|
||||
disabled: false
|
||||
buildsystem: meson
|
||||
sources:
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/libd/libdrm/libdrm_2.4.110.orig.tar.xz
|
||||
sha256: eecee4c4b47ed6d6ce1a9be3d6d92102548ea35e442282216d47d05293cf9737
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/libd/libdrm/libdrm_2.4.110-1ubuntu1.debian.tar.xz
|
||||
sha256: 464b9553861f39beddfaee6b8924734b02a0febfae3968e4ca1360f2972bba8b
|
||||
- type: shell
|
||||
commands:
|
||||
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
|
||||
|
||||
- name: numactl
|
||||
buildsystem: autotools
|
||||
make-args:
|
||||
- install
|
||||
sources:
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/n/numactl/numactl_2.0.14.orig.tar.gz
|
||||
sha256: 1ee27abd07ff6ba140aaf9bc6379b37825e54496e01d6f7343330cf1a4487035
|
||||
- type: archive
|
||||
url: http://archive.ubuntu.com/ubuntu/pool/main/n/numactl/numactl_2.0.14-3ubuntu2.debian.tar.xz
|
||||
sha256: 49089e5be5367f6367f8b0389d1d523944432607783b53f0605705792e1015ee
|
||||
- type: shell
|
||||
commands:
|
||||
- for n in $(cat patches/series); do if [[ $n != "#"* ]]; then patch -Np1 -i "patches/$n" -d .; fi; done
|
||||
cleanup:
|
||||
- "/bin"
|
||||
|
||||
# Caching is configured until here, not including CUDA
|
||||
- name: cuda
|
||||
disabled: false
|
||||
buildsystem: simple
|
||||
only-arches:
|
||||
- x86_64
|
||||
- aarch64
|
||||
cleanup:
|
||||
- "*"
|
||||
build-commands:
|
||||
- chmod u+x ./cuda.run
|
||||
- ./cuda.run --silent --toolkit --toolkitpath=$FLATPAK_DEST/cuda --no-opengl-libs --no-man-page --no-drm
|
||||
--tmpdir=$FLATPAK_BUILDER_BUILDDIR
|
||||
- rm -r $FLATPAK_DEST/cuda/nsight-systems-*
|
||||
- rm ./cuda.run
|
||||
sources:
|
||||
- type: file
|
||||
only-arches:
|
||||
- x86_64
|
||||
url: https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux.run
|
||||
sha256: 9223c4af3aebe4a7bbed9abd9b163b03a1b34b855fbc2b4a0d1b706ac09a5a16
|
||||
dest-filename: cuda.run
|
||||
- type: file
|
||||
only-arches:
|
||||
- aarch64
|
||||
url: https://developer.download.nvidia.com/compute/cuda/11.8.0/local_installers/cuda_11.8.0_520.61.05_linux_sbsa.run # yamllint disable-line rule:line-length
|
||||
sha256: e6e9a8d31163c9776b5e313fd7590877c5684e1ecddee741154f95704d4ed27c
|
||||
dest-filename: cuda.run
|
||||
|
||||
- name: sunshine
|
||||
disabled: false
|
||||
buildsystem: cmake
|
||||
no-make-install: false
|
||||
builddir: true
|
||||
build-options:
|
||||
append-path: /usr/lib/sdk/node18/bin
|
||||
build-args:
|
||||
- --share=network
|
||||
cxxflags: -I${C_INCLUDE_PATH}/libevdev-1.0
|
||||
env:
|
||||
npm_config_nodedir: /usr/lib/sdk/node18
|
||||
NPM_CONFIG_LOGLEVEL: info
|
||||
build-commands:
|
||||
# Install npm dependencies
|
||||
- cd ${FLATPAK_BUILDER_BUILDDIR} && npm install
|
||||
config-opts:
|
||||
- -DCMAKE_BUILD_TYPE=Release
|
||||
- -DCMAKE_INSTALL_PREFIX=/app
|
||||
- -DCMAKE_CUDA_COMPILER=/app/cuda/bin/nvcc
|
||||
- -DSUNSHINE_ASSETS_DIR=share/sunshine
|
||||
- -DSUNSHINE_EXECUTABLE_PATH=/app/bin/sunshine
|
||||
- -DSUNSHINE_ENABLE_WAYLAND=ON
|
||||
- -DSUNSHINE_ENABLE_X11=ON
|
||||
- -DSUNSHINE_ENABLE_DRM=ON
|
||||
- -DSUNSHINE_ENABLE_CUDA=ON
|
||||
- -DSUNSHINE_CONFIGURE_FLATPAK=ON
|
||||
sources:
|
||||
- type: git
|
||||
url: "@GITHUB_CLONE_URL@"
|
||||
branch: "@GITHUB_BRANCH@"
|
||||
commit: "@GITHUB_COMMIT@"
|
||||
post-install:
|
||||
# use `sed` to update apps.json with prefixes required for flatpak
|
||||
# -r (regex)
|
||||
# -z (handle new lines) https://linuxhint.com/sed-replace-newline-with-space
|
||||
# `/gm` global and multiline
|
||||
- sed -r -z -i -e
|
||||
's/("((do)|(undo)|(cmd)|(detached))"\s*:\s*\[?\n*\s*")(.*")/\1flatpak-spawn --host \7/gm'
|
||||
/app/share/sunshine/apps.json
|
||||
- sed -i
|
||||
's%/app/bin/sunshine%flatpak run dev.lizardbyte.sunshine\nExecStop=flatpak kill dev.lizardbyte.sunshine%g'
|
||||
/app/share/sunshine/systemd/user/sunshine.service
|
||||
- install -D $FLATPAK_BUILDER_BUILDDIR/packaging/linux/flatpak/scripts/* /app/bin
|
||||
13
packaging/linux/flatpak/scripts/additional-install.sh
Normal file
13
packaging/linux/flatpak/scripts/additional-install.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
|
||||
# User Service
|
||||
mkdir -p ~/.config/systemd/user
|
||||
cp /app/share/sunshine/systemd/user/sunshine.service $HOME/.config/systemd/user/sunshine.service
|
||||
echo Sunshine User Service has been installed.
|
||||
echo Use [systemctl --user enable sunshine] once to autostart Sunshine on login.
|
||||
|
||||
# Udev rule and input group
|
||||
UDEV=$(cat /app/share/sunshine/udev/rules.d/85-sunshine.rules)
|
||||
echo Configuring mouse permission.
|
||||
flatpak-spawn --host pkexec sh -c "usermod -a -G input $USER && echo '$UDEV' > /etc/udev/rules.d/85-sunshine.rules"
|
||||
echo Restart computer for mouse permission to take effect.
|
||||
11
packaging/linux/flatpak/scripts/remove-additional-install.sh
Normal file
11
packaging/linux/flatpak/scripts/remove-additional-install.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/sh
|
||||
|
||||
# User Service
|
||||
systemctl --user stop sunshine
|
||||
rm $HOME/.config/systemd/user/sunshine.service
|
||||
systemctl --user daemon-reload
|
||||
echo Sunshine User Service has been removed.
|
||||
|
||||
# Udev rule and input group
|
||||
flatpak-spawn --host pkexec sh -c "gpasswd -d $USER input && rm /etc/udev/rules.d/85-sunshine.rules"
|
||||
echo Mouse permission removed. Restart computer to take effect.
|
||||
12
packaging/linux/sunshine.desktop
Normal file
12
packaging/linux/sunshine.desktop
Normal file
@@ -0,0 +1,12 @@
|
||||
[Desktop Entry]
|
||||
Type=Application
|
||||
Name=@PROJECT_NAME@
|
||||
Exec=sunshine
|
||||
Version=1.0
|
||||
Comment=@PROJECT_DESCRIPTION@
|
||||
Icon=sunshine
|
||||
Categories=Utility;
|
||||
Terminal=true
|
||||
X-AppImage-Name=sunshine
|
||||
X-AppImage-Version=@PROJECT_VERSION@
|
||||
X-AppImage-Arch=x86_64
|
||||
65
packaging/macos/Portfile
Normal file
65
packaging/macos/Portfile
Normal file
@@ -0,0 +1,65 @@
|
||||
# -*- coding: utf-8; mode: tcl; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- vim:fenc=utf-8:ft=tcl:et:sw=4:ts=4:sts=4
|
||||
|
||||
# initial PR into macports: https://github.com/macports/macports-ports/pull/15143
|
||||
|
||||
PortSystem 1.0
|
||||
PortGroup cmake 1.1
|
||||
PortGroup github 1.0
|
||||
PortGroup boost 1.0
|
||||
|
||||
name @PROJECT_NAME@
|
||||
version @PROJECT_VERSION@
|
||||
revision 0
|
||||
categories multimedia emulators games
|
||||
platforms darwin
|
||||
license GPL-3
|
||||
maintainers @LizardByte
|
||||
description @PROJECT_DESCRIPTION@
|
||||
|
||||
# long_description will not be split into multiple lines as it's configured by CMakeLists
|
||||
long_description @PROJECT_LONG_DESCRIPTION@
|
||||
homepage @PROJECT_HOMEPAGE_URL@
|
||||
master_sites https://github.com/lizardbyte/sunshine/releases
|
||||
|
||||
compiler.cxx_standard 2017
|
||||
fetch.type git
|
||||
|
||||
git.url @GITHUB_CLONE_URL@
|
||||
git.branch @GITHUB_COMMIT@
|
||||
|
||||
post-fetch {
|
||||
system -W ${worksrcpath} "${git.cmd} submodule update --init --recursive"
|
||||
}
|
||||
|
||||
depends_lib port:avahi \
|
||||
port:curl \
|
||||
port:libopus \
|
||||
port:npm9 \
|
||||
port:pkgconfig
|
||||
|
||||
boost.version 1.80
|
||||
|
||||
configure.args -DCMAKE_INSTALL_PREFIX=${prefix} \
|
||||
-DSUNSHINE_ASSETS_DIR=etc/sunshine/assets
|
||||
|
||||
startupitem.create yes
|
||||
startupitem.executable "${prefix}/bin/{$name}"
|
||||
startupitem.location LaunchDaemons
|
||||
startupitem.name ${name}
|
||||
startupitem.netchange yes
|
||||
|
||||
platform darwin {
|
||||
if { ${os.major} < 20 } {
|
||||
# See: https://github.com/LizardByte/Sunshine/discussions/117#discussioncomment-2513494
|
||||
notes-append "Port is limited to software encoding, when used with macOS releases prior to Big Sur."
|
||||
}
|
||||
}
|
||||
|
||||
pre-build {
|
||||
system -W ${worksrcpath} "npm install"
|
||||
}
|
||||
|
||||
notes-append "Run @PROJECT_NAME@ by executing 'sunshine <path to user config>', e.g. 'sunshine ~/sunshine.conf' "
|
||||
notes-append "The config file will be created if it doesn't exist."
|
||||
notes-append "It is recommended to set a location for the apps file in the config."
|
||||
notes-append "See our documentation at 'https://docs.lizardbyte.dev/projects/sunshine/en/v@PROJECT_VERSION@/' for further info."
|
||||
Submodule pre-compiled deleted from afd9a9bbfc
170
scripts/_locale.py
Normal file
170
scripts/_locale.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""
|
||||
..
|
||||
_locale.py
|
||||
|
||||
Functions related to building, initializing, updating, and compiling localization translations.
|
||||
|
||||
Borrowed from RetroArcher.
|
||||
"""
|
||||
# standard imports
|
||||
import argparse
|
||||
import datetime
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
project_name = 'Sunshine'
|
||||
project_owner = 'LizardByte'
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
root_dir = os.path.dirname(script_dir)
|
||||
locale_dir = os.path.join(root_dir, 'locale')
|
||||
project_dir = os.path.join(root_dir, 'src')
|
||||
|
||||
year = datetime.datetime.now().year
|
||||
|
||||
# retroarcher target locales
|
||||
target_locales = [
|
||||
'de', # Deutsch
|
||||
'en', # English
|
||||
'en_GB', # English (United Kingdom)
|
||||
'en_US', # English (United States)
|
||||
'es', # español
|
||||
'fr', # français
|
||||
'it', # italiano
|
||||
'ru', # русский
|
||||
]
|
||||
|
||||
|
||||
def x_extract():
|
||||
"""Executes `xgettext extraction` in subprocess."""
|
||||
|
||||
pot_filepath = os.path.join(locale_dir, f'{project_name.lower()}.po')
|
||||
|
||||
commands = [
|
||||
'xgettext',
|
||||
'--keyword=translate:1,1t',
|
||||
'--keyword=translate:1c,2,2t',
|
||||
'--keyword=translate:1,2,3t',
|
||||
'--keyword=translate:1c,2,3,4t',
|
||||
'--keyword=gettext:1',
|
||||
'--keyword=pgettext:1c,2',
|
||||
'--keyword=ngettext:1,2',
|
||||
'--keyword=npgettext:1c,2,3',
|
||||
f'--default-domain={project_name.lower()}',
|
||||
f'--output={pot_filepath}',
|
||||
'--language=C++',
|
||||
'--boost',
|
||||
'--from-code=utf-8',
|
||||
'-F',
|
||||
f'--msgid-bugs-address=github.com/{project_owner.lower()}/{project_name.lower()}',
|
||||
f'--copyright-holder={project_owner}',
|
||||
f'--package-name={project_name}',
|
||||
'--package-version=v0'
|
||||
]
|
||||
|
||||
extensions = ['cpp', 'h', 'm', 'mm']
|
||||
|
||||
# find input files
|
||||
for root, dirs, files in os.walk(project_dir, topdown=True):
|
||||
for name in files:
|
||||
filename = os.path.join(root, name)
|
||||
extension = filename.rsplit('.', 1)[-1]
|
||||
if extension in extensions: # append input files
|
||||
commands.append(filename)
|
||||
|
||||
print(commands)
|
||||
subprocess.check_output(args=commands, cwd=root_dir)
|
||||
|
||||
try:
|
||||
# fix header
|
||||
body = ""
|
||||
with open(file=pot_filepath, mode='r') as file:
|
||||
for line in file.readlines():
|
||||
if line != '"Language: \\n"\n': # do not include this line
|
||||
if line == '# SOME DESCRIPTIVE TITLE.\n':
|
||||
body += f'# Translations template for {project_name}.\n'
|
||||
elif line.startswith('#') and 'YEAR' in line:
|
||||
body += line.replace('YEAR', str(year))
|
||||
elif line.startswith('#') and 'PACKAGE' in line:
|
||||
body += line.replace('PACKAGE', project_name)
|
||||
else:
|
||||
body += line
|
||||
|
||||
# rewrite pot file with updated header
|
||||
with open(file=pot_filepath, mode='w+') as file:
|
||||
file.write(body)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def babel_init(locale_code: str):
|
||||
"""Executes `pybabel init` in subprocess.
|
||||
|
||||
:param locale_code: str - locale code
|
||||
"""
|
||||
commands = [
|
||||
'pybabel',
|
||||
'init',
|
||||
'-i', os.path.join(locale_dir, f'{project_name.lower()}.po'),
|
||||
'-d', locale_dir,
|
||||
'-D', project_name.lower(),
|
||||
'-l', locale_code
|
||||
]
|
||||
|
||||
print(commands)
|
||||
subprocess.check_output(args=commands, cwd=root_dir)
|
||||
|
||||
|
||||
def babel_update():
|
||||
"""Executes `pybabel update` in subprocess."""
|
||||
commands = [
|
||||
'pybabel',
|
||||
'update',
|
||||
'-i', os.path.join(locale_dir, f'{project_name.lower()}.po'),
|
||||
'-d', locale_dir,
|
||||
'-D', project_name.lower(),
|
||||
'--update-header-comment'
|
||||
]
|
||||
|
||||
print(commands)
|
||||
subprocess.check_output(args=commands, cwd=root_dir)
|
||||
|
||||
|
||||
def babel_compile():
|
||||
"""Executes `pybabel compile` in subprocess."""
|
||||
commands = [
|
||||
'pybabel',
|
||||
'compile',
|
||||
'-d', locale_dir,
|
||||
'-D', project_name.lower()
|
||||
]
|
||||
|
||||
print(commands)
|
||||
subprocess.check_output(args=commands, cwd=root_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Set up and gather command line arguments
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Script helps update locale translations. Translations must be done manually.')
|
||||
|
||||
parser.add_argument('--extract', action='store_true', help='Extract messages from c++ files.')
|
||||
parser.add_argument('--init', action='store_true', help='Initialize any new locales specified in target locales.')
|
||||
parser.add_argument('--update', action='store_true', help='Update existing locales.')
|
||||
parser.add_argument('--compile', action='store_true', help='Compile translated locales.')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.extract:
|
||||
x_extract()
|
||||
|
||||
if args.init:
|
||||
for locale_id in target_locales:
|
||||
if not os.path.isdir(os.path.join(locale_dir, locale_id)):
|
||||
babel_init(locale_code=locale_id)
|
||||
|
||||
if args.update:
|
||||
babel_update()
|
||||
|
||||
if args.compile:
|
||||
babel_compile()
|
||||
1
scripts/requirements.txt
Normal file
1
scripts/requirements.txt
Normal file
@@ -0,0 +1 @@
|
||||
Babel==2.11.0
|
||||
281
src/audio.cpp
Normal file
281
src/audio.cpp
Normal file
@@ -0,0 +1,281 @@
|
||||
#include <thread>
|
||||
|
||||
#include <opus/opus_multistream.h>
|
||||
|
||||
#include "platform/common.h"
|
||||
|
||||
#include "audio.h"
|
||||
#include "config.h"
|
||||
#include "main.h"
|
||||
#include "thread_safe.h"
|
||||
#include "utility.h"
|
||||
|
||||
namespace audio {
|
||||
using namespace std::literals;
|
||||
using opus_t = util::safe_ptr<OpusMSEncoder, opus_multistream_encoder_destroy>;
|
||||
using sample_queue_t = std::shared_ptr<safe::queue_t<std::vector<std::int16_t>>>;
|
||||
|
||||
struct audio_ctx_t {
|
||||
// We want to change the sink for the first stream only
|
||||
std::unique_ptr<std::atomic_bool> sink_flag;
|
||||
|
||||
std::unique_ptr<platf::audio_control_t> control;
|
||||
|
||||
bool restore_sink;
|
||||
platf::sink_t sink;
|
||||
};
|
||||
|
||||
static int start_audio_control(audio_ctx_t &ctx);
|
||||
static void stop_audio_control(audio_ctx_t &);
|
||||
|
||||
int map_stream(int channels, bool quality);
|
||||
|
||||
constexpr auto SAMPLE_RATE = 48000;
|
||||
|
||||
opus_stream_config_t stream_configs[MAX_STREAM_CONFIG] {
|
||||
{
|
||||
SAMPLE_RATE,
|
||||
2,
|
||||
1,
|
||||
1,
|
||||
platf::speaker::map_stereo,
|
||||
96000,
|
||||
},
|
||||
{
|
||||
SAMPLE_RATE,
|
||||
2,
|
||||
1,
|
||||
1,
|
||||
platf::speaker::map_stereo,
|
||||
512000,
|
||||
},
|
||||
{
|
||||
SAMPLE_RATE,
|
||||
6,
|
||||
4,
|
||||
2,
|
||||
platf::speaker::map_surround51,
|
||||
256000,
|
||||
},
|
||||
{
|
||||
SAMPLE_RATE,
|
||||
6,
|
||||
6,
|
||||
0,
|
||||
platf::speaker::map_surround51,
|
||||
1536000,
|
||||
},
|
||||
{
|
||||
SAMPLE_RATE,
|
||||
8,
|
||||
5,
|
||||
3,
|
||||
platf::speaker::map_surround71,
|
||||
450000,
|
||||
},
|
||||
{
|
||||
SAMPLE_RATE,
|
||||
8,
|
||||
8,
|
||||
0,
|
||||
platf::speaker::map_surround71,
|
||||
2048000,
|
||||
},
|
||||
};
|
||||
|
||||
auto control_shared = safe::make_shared<audio_ctx_t>(start_audio_control, stop_audio_control);
|
||||
|
||||
void encodeThread(sample_queue_t samples, config_t config, void *channel_data) {
|
||||
auto packets = mail::man->queue<packet_t>(mail::audio_packets);
|
||||
auto stream = &stream_configs[map_stream(config.channels, config.flags[config_t::HIGH_QUALITY])];
|
||||
|
||||
// Encoding takes place on this thread
|
||||
platf::adjust_thread_priority(platf::thread_priority_e::high);
|
||||
|
||||
opus_t opus { opus_multistream_encoder_create(
|
||||
stream->sampleRate,
|
||||
stream->channelCount,
|
||||
stream->streams,
|
||||
stream->coupledStreams,
|
||||
stream->mapping,
|
||||
OPUS_APPLICATION_RESTRICTED_LOWDELAY,
|
||||
nullptr) };
|
||||
|
||||
opus_multistream_encoder_ctl(opus.get(), OPUS_SET_BITRATE(stream->bitrate));
|
||||
opus_multistream_encoder_ctl(opus.get(), OPUS_SET_VBR(0));
|
||||
|
||||
auto frame_size = config.packetDuration * stream->sampleRate / 1000;
|
||||
while(auto sample = samples->pop()) {
|
||||
buffer_t packet { 1400 };
|
||||
|
||||
int bytes = opus_multistream_encode(opus.get(), sample->data(), frame_size, std::begin(packet), packet.size());
|
||||
if(bytes < 0) {
|
||||
BOOST_LOG(error) << "Couldn't encode audio: "sv << opus_strerror(bytes);
|
||||
packets->stop();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
packet.fake_resize(bytes);
|
||||
packets->raise(channel_data, std::move(packet));
|
||||
}
|
||||
}
|
||||
|
||||
void capture(safe::mail_t mail, config_t config, void *channel_data) {
|
||||
auto shutdown_event = mail->event<bool>(mail::shutdown);
|
||||
auto stream = &stream_configs[map_stream(config.channels, config.flags[config_t::HIGH_QUALITY])];
|
||||
|
||||
auto ref = control_shared.ref();
|
||||
if(!ref) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto &control = ref->control;
|
||||
if(!control) {
|
||||
shutdown_event->view();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Order of priority:
|
||||
// 1. Config
|
||||
// 2. Virtual if available
|
||||
// 3. Host
|
||||
std::string *sink = &ref->sink.host;
|
||||
if(!config::audio.sink.empty()) {
|
||||
sink = &config::audio.sink;
|
||||
}
|
||||
else if(ref->sink.null) {
|
||||
auto &null = *ref->sink.null;
|
||||
switch(stream->channelCount) {
|
||||
case 2:
|
||||
sink = &null.stereo;
|
||||
break;
|
||||
case 6:
|
||||
sink = &null.surround51;
|
||||
break;
|
||||
case 8:
|
||||
sink = &null.surround71;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Only the first to start a session may change the default sink
|
||||
if(!ref->sink_flag->exchange(true, std::memory_order_acquire)) {
|
||||
ref->restore_sink = !config.flags[config_t::HOST_AUDIO];
|
||||
|
||||
// If the sink is empty (Host has no sink!), definately switch to the virtual.
|
||||
if(ref->sink.host.empty()) {
|
||||
if(control->set_sink(*sink)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
// If the client requests audio on the host, don't change the default sink
|
||||
else if(!config.flags[config_t::HOST_AUDIO] && control->set_sink(*sink)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Capture takes place on this thread
|
||||
platf::adjust_thread_priority(platf::thread_priority_e::critical);
|
||||
|
||||
auto samples = std::make_shared<sample_queue_t::element_type>(30);
|
||||
std::thread thread { encodeThread, samples, config, channel_data };
|
||||
|
||||
auto fg = util::fail_guard([&]() {
|
||||
samples->stop();
|
||||
thread.join();
|
||||
|
||||
shutdown_event->view();
|
||||
});
|
||||
|
||||
auto frame_size = config.packetDuration * stream->sampleRate / 1000;
|
||||
int samples_per_frame = frame_size * stream->channelCount;
|
||||
|
||||
auto mic = control->microphone(stream->mapping, stream->channelCount, stream->sampleRate, frame_size);
|
||||
if(!mic) {
|
||||
BOOST_LOG(error) << "Couldn't create audio input"sv;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
while(!shutdown_event->peek()) {
|
||||
std::vector<std::int16_t> sample_buffer;
|
||||
sample_buffer.resize(samples_per_frame);
|
||||
|
||||
auto status = mic->sample(sample_buffer);
|
||||
switch(status) {
|
||||
case platf::capture_e::ok:
|
||||
break;
|
||||
case platf::capture_e::timeout:
|
||||
continue;
|
||||
case platf::capture_e::reinit:
|
||||
mic.reset();
|
||||
mic = control->microphone(stream->mapping, stream->channelCount, stream->sampleRate, frame_size);
|
||||
if(!mic) {
|
||||
BOOST_LOG(error) << "Couldn't re-initialize audio input"sv;
|
||||
|
||||
return;
|
||||
}
|
||||
return;
|
||||
default:
|
||||
return;
|
||||
}
|
||||
|
||||
samples->raise(std::move(sample_buffer));
|
||||
}
|
||||
}
|
||||
|
||||
int map_stream(int channels, bool quality) {
|
||||
int shift = quality ? 1 : 0;
|
||||
switch(channels) {
|
||||
case 2:
|
||||
return STEREO + shift;
|
||||
case 6:
|
||||
return SURROUND51 + shift;
|
||||
case 8:
|
||||
return SURROUND71 + shift;
|
||||
}
|
||||
return STEREO;
|
||||
}
|
||||
|
||||
int start_audio_control(audio_ctx_t &ctx) {
|
||||
auto fg = util::fail_guard([]() {
|
||||
BOOST_LOG(warning) << "There will be no audio"sv;
|
||||
});
|
||||
|
||||
ctx.sink_flag = std::make_unique<std::atomic_bool>(false);
|
||||
|
||||
// The default sink has not been replaced yet.
|
||||
ctx.restore_sink = false;
|
||||
|
||||
if(!(ctx.control = platf::audio_control())) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
auto sink = ctx.control->sink_info();
|
||||
if(!sink) {
|
||||
// Let the calling code know it failed
|
||||
ctx.control.reset();
|
||||
return 0;
|
||||
}
|
||||
|
||||
ctx.sink = std::move(*sink);
|
||||
|
||||
fg.disable();
|
||||
return 0;
|
||||
}
|
||||
|
||||
void stop_audio_control(audio_ctx_t &ctx) {
|
||||
// restore audio-sink if applicable
|
||||
if(!ctx.restore_sink) {
|
||||
return;
|
||||
}
|
||||
|
||||
const std::string &sink = config::audio.sink.empty() ? ctx.sink.host : config::audio.sink;
|
||||
if(!sink.empty()) {
|
||||
// Best effort, it's allowed to fail
|
||||
ctx.control->set_sink(sink);
|
||||
}
|
||||
}
|
||||
} // namespace audio
|
||||
47
src/audio.h
Normal file
47
src/audio.h
Normal file
@@ -0,0 +1,47 @@
|
||||
#ifndef SUNSHINE_AUDIO_H
|
||||
#define SUNSHINE_AUDIO_H
|
||||
|
||||
#include "thread_safe.h"
|
||||
#include "utility.h"
|
||||
namespace audio {
|
||||
enum stream_config_e : int {
|
||||
STEREO,
|
||||
HIGH_STEREO,
|
||||
SURROUND51,
|
||||
HIGH_SURROUND51,
|
||||
SURROUND71,
|
||||
HIGH_SURROUND71,
|
||||
MAX_STREAM_CONFIG
|
||||
};
|
||||
|
||||
struct opus_stream_config_t {
|
||||
std::int32_t sampleRate;
|
||||
int channelCount;
|
||||
int streams;
|
||||
int coupledStreams;
|
||||
const std::uint8_t *mapping;
|
||||
int bitrate;
|
||||
};
|
||||
|
||||
extern opus_stream_config_t stream_configs[MAX_STREAM_CONFIG];
|
||||
|
||||
struct config_t {
|
||||
enum flags_e : int {
|
||||
HIGH_QUALITY,
|
||||
HOST_AUDIO,
|
||||
MAX_FLAGS
|
||||
};
|
||||
|
||||
int packetDuration;
|
||||
int channels;
|
||||
int mask;
|
||||
|
||||
std::bitset<MAX_FLAGS> flags;
|
||||
};
|
||||
|
||||
using buffer_t = util::buffer_t<std::uint8_t>;
|
||||
using packet_t = std::pair<void *, buffer_t>;
|
||||
void capture(safe::mail_t mail, config_t config, void *channel_data);
|
||||
} // namespace audio
|
||||
|
||||
#endif
|
||||
300
src/cbs.cpp
Normal file
300
src/cbs.cpp
Normal file
@@ -0,0 +1,300 @@
|
||||
extern "C" {
|
||||
#include <cbs/cbs_h264.h>
|
||||
#include <cbs/cbs_h265.h>
|
||||
#include <cbs/h264_levels.h>
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavutil/pixdesc.h>
|
||||
}
|
||||
|
||||
#include "cbs.h"
|
||||
#include "main.h"
|
||||
#include "utility.h"
|
||||
|
||||
using namespace std::literals;
|
||||
namespace cbs {
|
||||
void close(CodedBitstreamContext *c) {
|
||||
ff_cbs_close(&c);
|
||||
}
|
||||
|
||||
using ctx_t = util::safe_ptr<CodedBitstreamContext, close>;
|
||||
|
||||
class frag_t : public CodedBitstreamFragment {
|
||||
public:
|
||||
frag_t(frag_t &&o) {
|
||||
std::copy((std::uint8_t *)&o, (std::uint8_t *)(&o + 1), (std::uint8_t *)this);
|
||||
|
||||
o.data = nullptr;
|
||||
o.units = nullptr;
|
||||
};
|
||||
|
||||
frag_t() {
|
||||
std::fill_n((std::uint8_t *)this, sizeof(*this), 0);
|
||||
}
|
||||
|
||||
frag_t &operator=(frag_t &&o) {
|
||||
std::copy((std::uint8_t *)&o, (std::uint8_t *)(&o + 1), (std::uint8_t *)this);
|
||||
|
||||
o.data = nullptr;
|
||||
o.units = nullptr;
|
||||
|
||||
return *this;
|
||||
};
|
||||
|
||||
|
||||
~frag_t() {
|
||||
if(data || units) {
|
||||
ff_cbs_fragment_free(this);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
util::buffer_t<std::uint8_t> write(const cbs::ctx_t &cbs_ctx, std::uint8_t nal, void *uh, AVCodecID codec_id) {
|
||||
cbs::frag_t frag;
|
||||
auto err = ff_cbs_insert_unit_content(&frag, -1, nal, uh, nullptr);
|
||||
if(err < 0) {
|
||||
char err_str[AV_ERROR_MAX_STRING_SIZE] { 0 };
|
||||
BOOST_LOG(error) << "Could not insert NAL unit SPS: "sv << av_make_error_string(err_str, AV_ERROR_MAX_STRING_SIZE, err);
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
err = ff_cbs_write_fragment_data(cbs_ctx.get(), &frag);
|
||||
if(err < 0) {
|
||||
char err_str[AV_ERROR_MAX_STRING_SIZE] { 0 };
|
||||
BOOST_LOG(error) << "Could not write fragment data: "sv << av_make_error_string(err_str, AV_ERROR_MAX_STRING_SIZE, err);
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
// frag.data_size * 8 - frag.data_bit_padding == bits in fragment
|
||||
util::buffer_t<std::uint8_t> data { frag.data_size };
|
||||
std::copy_n(frag.data, frag.data_size, std::begin(data));
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
util::buffer_t<std::uint8_t> write(std::uint8_t nal, void *uh, AVCodecID codec_id) {
|
||||
cbs::ctx_t cbs_ctx;
|
||||
ff_cbs_init(&cbs_ctx, codec_id, nullptr);
|
||||
|
||||
return write(cbs_ctx, nal, uh, codec_id);
|
||||
}
|
||||
|
||||
util::buffer_t<std::uint8_t> make_sps_h264(const AVCodecContext *ctx) {
|
||||
H264RawSPS sps {};
|
||||
|
||||
/* b_per_p == ctx->max_b_frames for h264 */
|
||||
/* desired_b_depth == avoption("b_depth") == 1 */
|
||||
/* max_b_depth == std::min(av_log2(ctx->b_per_p) + 1, desired_b_depth) ==> 1 */
|
||||
auto max_b_depth = 1;
|
||||
auto dpb_frame = ctx->gop_size == 1 ? 0 : 1 + max_b_depth;
|
||||
auto mb_width = (FFALIGN(ctx->width, 16) / 16) * 16;
|
||||
auto mb_height = (FFALIGN(ctx->height, 16) / 16) * 16;
|
||||
|
||||
|
||||
sps.nal_unit_header.nal_ref_idc = 3;
|
||||
sps.nal_unit_header.nal_unit_type = H264_NAL_SPS;
|
||||
|
||||
sps.profile_idc = FF_PROFILE_H264_HIGH & 0xFF;
|
||||
|
||||
sps.constraint_set1_flag = 1;
|
||||
|
||||
if(ctx->level != FF_LEVEL_UNKNOWN) {
|
||||
sps.level_idc = ctx->level;
|
||||
}
|
||||
else {
|
||||
auto framerate = ctx->framerate;
|
||||
|
||||
auto level = ff_h264_guess_level(
|
||||
sps.profile_idc,
|
||||
ctx->bit_rate,
|
||||
framerate.num / framerate.den,
|
||||
mb_width,
|
||||
mb_height,
|
||||
dpb_frame);
|
||||
|
||||
if(!level) {
|
||||
BOOST_LOG(error) << "Could not guess h264 level"sv;
|
||||
|
||||
return {};
|
||||
}
|
||||
sps.level_idc = level->level_idc;
|
||||
}
|
||||
|
||||
sps.seq_parameter_set_id = 0;
|
||||
sps.chroma_format_idc = 1;
|
||||
|
||||
sps.log2_max_frame_num_minus4 = 3; // 4;
|
||||
sps.pic_order_cnt_type = 0;
|
||||
sps.log2_max_pic_order_cnt_lsb_minus4 = 0; // 4;
|
||||
|
||||
sps.max_num_ref_frames = dpb_frame;
|
||||
|
||||
sps.pic_width_in_mbs_minus1 = mb_width / 16 - 1;
|
||||
sps.pic_height_in_map_units_minus1 = mb_height / 16 - 1;
|
||||
|
||||
sps.frame_mbs_only_flag = 1;
|
||||
sps.direct_8x8_inference_flag = 1;
|
||||
|
||||
if(ctx->width != mb_width || ctx->height != mb_height) {
|
||||
sps.frame_cropping_flag = 1;
|
||||
sps.frame_crop_left_offset = 0;
|
||||
sps.frame_crop_top_offset = 0;
|
||||
sps.frame_crop_right_offset = (mb_width - ctx->width) / 2;
|
||||
sps.frame_crop_bottom_offset = (mb_height - ctx->height) / 2;
|
||||
}
|
||||
|
||||
sps.vui_parameters_present_flag = 1;
|
||||
|
||||
auto &vui = sps.vui;
|
||||
|
||||
vui.video_format = 5;
|
||||
vui.colour_description_present_flag = 1;
|
||||
vui.video_signal_type_present_flag = 1;
|
||||
vui.video_full_range_flag = ctx->color_range == AVCOL_RANGE_JPEG;
|
||||
vui.colour_primaries = ctx->color_primaries;
|
||||
vui.transfer_characteristics = ctx->color_trc;
|
||||
vui.matrix_coefficients = ctx->colorspace;
|
||||
|
||||
vui.low_delay_hrd_flag = 1 - vui.fixed_frame_rate_flag;
|
||||
|
||||
vui.bitstream_restriction_flag = 1;
|
||||
vui.motion_vectors_over_pic_boundaries_flag = 1;
|
||||
vui.log2_max_mv_length_horizontal = 15;
|
||||
vui.log2_max_mv_length_vertical = 15;
|
||||
vui.max_num_reorder_frames = max_b_depth;
|
||||
vui.max_dec_frame_buffering = max_b_depth + 1;
|
||||
|
||||
return write(sps.nal_unit_header.nal_unit_type, (void *)&sps.nal_unit_header, AV_CODEC_ID_H264);
|
||||
}
|
||||
|
||||
hevc_t make_sps_hevc(const AVCodecContext *avctx, const AVPacket *packet) {
|
||||
cbs::ctx_t ctx;
|
||||
if(ff_cbs_init(&ctx, AV_CODEC_ID_H265, nullptr)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
cbs::frag_t frag;
|
||||
|
||||
int err = ff_cbs_read_packet(ctx.get(), &frag, packet);
|
||||
if(err < 0) {
|
||||
char err_str[AV_ERROR_MAX_STRING_SIZE] { 0 };
|
||||
BOOST_LOG(error) << "Couldn't read packet: "sv << av_make_error_string(err_str, AV_ERROR_MAX_STRING_SIZE, err);
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
|
||||
auto vps_p = ((CodedBitstreamH265Context *)ctx->priv_data)->active_vps;
|
||||
auto sps_p = ((CodedBitstreamH265Context *)ctx->priv_data)->active_sps;
|
||||
|
||||
H265RawSPS sps { *sps_p };
|
||||
H265RawVPS vps { *vps_p };
|
||||
|
||||
vps.profile_tier_level.general_profile_compatibility_flag[4] = 1;
|
||||
sps.profile_tier_level.general_profile_compatibility_flag[4] = 1;
|
||||
|
||||
auto &vui = sps.vui;
|
||||
std::memset(&vui, 0, sizeof(vui));
|
||||
|
||||
sps.vui_parameters_present_flag = 1;
|
||||
|
||||
// skip sample aspect ratio
|
||||
|
||||
vui.video_format = 5;
|
||||
vui.colour_description_present_flag = 1;
|
||||
vui.video_signal_type_present_flag = 1;
|
||||
vui.video_full_range_flag = avctx->color_range == AVCOL_RANGE_JPEG;
|
||||
vui.colour_primaries = avctx->color_primaries;
|
||||
vui.transfer_characteristics = avctx->color_trc;
|
||||
vui.matrix_coefficients = avctx->colorspace;
|
||||
|
||||
|
||||
vui.vui_timing_info_present_flag = vps.vps_timing_info_present_flag;
|
||||
vui.vui_num_units_in_tick = vps.vps_num_units_in_tick;
|
||||
vui.vui_time_scale = vps.vps_time_scale;
|
||||
vui.vui_poc_proportional_to_timing_flag = vps.vps_poc_proportional_to_timing_flag;
|
||||
vui.vui_num_ticks_poc_diff_one_minus1 = vps.vps_num_ticks_poc_diff_one_minus1;
|
||||
vui.vui_hrd_parameters_present_flag = 0;
|
||||
|
||||
vui.bitstream_restriction_flag = 1;
|
||||
vui.motion_vectors_over_pic_boundaries_flag = 1;
|
||||
vui.restricted_ref_pic_lists_flag = 1;
|
||||
vui.max_bytes_per_pic_denom = 0;
|
||||
vui.max_bits_per_min_cu_denom = 0;
|
||||
vui.log2_max_mv_length_horizontal = 15;
|
||||
vui.log2_max_mv_length_vertical = 15;
|
||||
|
||||
cbs::ctx_t write_ctx;
|
||||
ff_cbs_init(&write_ctx, AV_CODEC_ID_H265, nullptr);
|
||||
|
||||
|
||||
return hevc_t {
|
||||
nal_t {
|
||||
write(write_ctx, vps.nal_unit_header.nal_unit_type, (void *)&vps.nal_unit_header, AV_CODEC_ID_H265),
|
||||
write(ctx, vps_p->nal_unit_header.nal_unit_type, (void *)&vps_p->nal_unit_header, AV_CODEC_ID_H265),
|
||||
},
|
||||
|
||||
nal_t {
|
||||
write(write_ctx, sps.nal_unit_header.nal_unit_type, (void *)&sps.nal_unit_header, AV_CODEC_ID_H265),
|
||||
write(ctx, sps_p->nal_unit_header.nal_unit_type, (void *)&sps_p->nal_unit_header, AV_CODEC_ID_H265),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
util::buffer_t<std::uint8_t> read_sps_h264(const AVPacket *packet) {
|
||||
cbs::ctx_t ctx;
|
||||
if(ff_cbs_init(&ctx, AV_CODEC_ID_H264, nullptr)) {
|
||||
return {};
|
||||
}
|
||||
|
||||
cbs::frag_t frag;
|
||||
|
||||
int err = ff_cbs_read_packet(ctx.get(), &frag, &*packet);
|
||||
if(err < 0) {
|
||||
char err_str[AV_ERROR_MAX_STRING_SIZE] { 0 };
|
||||
BOOST_LOG(error) << "Couldn't read packet: "sv << av_make_error_string(err_str, AV_ERROR_MAX_STRING_SIZE, err);
|
||||
|
||||
return {};
|
||||
}
|
||||
|
||||
auto h264 = (H264RawNALUnitHeader *)((CodedBitstreamH264Context *)ctx->priv_data)->active_sps;
|
||||
return write(h264->nal_unit_type, (void *)h264, AV_CODEC_ID_H264);
|
||||
}
|
||||
|
||||
h264_t make_sps_h264(const AVCodecContext *ctx, const AVPacket *packet) {
|
||||
return h264_t {
|
||||
make_sps_h264(ctx),
|
||||
read_sps_h264(packet),
|
||||
};
|
||||
}
|
||||
|
||||
bool validate_sps(const AVPacket *packet, int codec_id) {
|
||||
cbs::ctx_t ctx;
|
||||
if(ff_cbs_init(&ctx, (AVCodecID)codec_id, nullptr)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
cbs::frag_t frag;
|
||||
|
||||
int err = ff_cbs_read_packet(ctx.get(), &frag, packet);
|
||||
if(err < 0) {
|
||||
char err_str[AV_ERROR_MAX_STRING_SIZE] { 0 };
|
||||
BOOST_LOG(error) << "Couldn't read packet: "sv << av_make_error_string(err_str, AV_ERROR_MAX_STRING_SIZE, err);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
if(codec_id == AV_CODEC_ID_H264) {
|
||||
auto h264 = (CodedBitstreamH264Context *)ctx->priv_data;
|
||||
|
||||
if(!h264->active_sps->vui_parameters_present_flag) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return ((CodedBitstreamH265Context *)ctx->priv_data)->active_sps->vui_parameters_present_flag;
|
||||
}
|
||||
} // namespace cbs
|
||||
34
src/cbs.h
Normal file
34
src/cbs.h
Normal file
@@ -0,0 +1,34 @@
|
||||
#ifndef SUNSHINE_CBS_H
|
||||
#define SUNSHINE_CBS_H
|
||||
|
||||
#include "utility.h"
|
||||
|
||||
struct AVPacket;
|
||||
struct AVCodecContext;
|
||||
|
||||
namespace cbs {
|
||||
|
||||
struct nal_t {
|
||||
util::buffer_t<std::uint8_t> _new;
|
||||
util::buffer_t<std::uint8_t> old;
|
||||
};
|
||||
|
||||
struct hevc_t {
|
||||
nal_t vps;
|
||||
nal_t sps;
|
||||
};
|
||||
|
||||
struct h264_t {
|
||||
nal_t sps;
|
||||
};
|
||||
|
||||
hevc_t make_sps_hevc(const AVCodecContext *ctx, const AVPacket *packet);
|
||||
h264_t make_sps_h264(const AVCodecContext *ctx, const AVPacket *packet);
|
||||
|
||||
/**
|
||||
* Check if SPS->VUI is present
|
||||
*/
|
||||
bool validate_sps(const AVPacket *packet, int codec_id);
|
||||
} // namespace cbs
|
||||
|
||||
#endif
|
||||
1078
src/config.cpp
Normal file
1078
src/config.cpp
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,16 +1,17 @@
|
||||
#ifndef SUNSHINE_CONFIG_H
|
||||
#define SUNSHINE_CONFIG_H
|
||||
|
||||
#include <chrono>
|
||||
#include <string>
|
||||
#include <bitset>
|
||||
#include <chrono>
|
||||
#include <optional>
|
||||
#include <string>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
namespace config {
|
||||
struct video_t {
|
||||
// ffmpeg params
|
||||
int crf; // higher == more compression and less quality
|
||||
int qp; // higher == more compression and less quality, ignored if crf != 0
|
||||
int qp; // higher == more compression and less quality
|
||||
|
||||
int hevc_mode;
|
||||
|
||||
@@ -22,17 +23,44 @@ struct video_t {
|
||||
|
||||
struct {
|
||||
std::optional<int> preset;
|
||||
std::optional<int> tune;
|
||||
std::optional<int> rc;
|
||||
int coder;
|
||||
} nv;
|
||||
|
||||
struct {
|
||||
std::optional<int> preset;
|
||||
std::optional<int> cavlc;
|
||||
} qsv;
|
||||
|
||||
struct {
|
||||
std::optional<int> quality_h264;
|
||||
std::optional<int> quality_hevc;
|
||||
std::optional<int> rc_h264;
|
||||
std::optional<int> rc_hevc;
|
||||
std::optional<int> usage_h264;
|
||||
std::optional<int> usage_hevc;
|
||||
std::optional<int> preanalysis;
|
||||
std::optional<int> vbaq;
|
||||
int coder;
|
||||
} amd;
|
||||
|
||||
struct {
|
||||
int allow_sw;
|
||||
int require_sw;
|
||||
int realtime;
|
||||
int coder;
|
||||
} vt;
|
||||
|
||||
std::string encoder;
|
||||
std::string adapter_name;
|
||||
std::string output_name;
|
||||
bool dwmflush;
|
||||
};
|
||||
|
||||
struct audio_t {
|
||||
std::string sink;
|
||||
std::string virtual_sink;
|
||||
};
|
||||
|
||||
struct stream_t {
|
||||
@@ -50,6 +78,7 @@ struct nvhttp_t {
|
||||
// Could be any of the following values:
|
||||
// pc|lan|wan
|
||||
std::string origin_pin_allowed;
|
||||
std::string origin_web_ui_allowed;
|
||||
|
||||
std::string pkey; // must be 2048 bits
|
||||
std::string cert; // must be signed with a key of 2048 bits
|
||||
@@ -59,26 +88,50 @@ struct nvhttp_t {
|
||||
std::string file_state;
|
||||
|
||||
std::string external_ip;
|
||||
std::vector<std::string> resolutions;
|
||||
std::vector<int> fps;
|
||||
};
|
||||
|
||||
struct input_t {
|
||||
std::unordered_map<int, int> keybindings;
|
||||
|
||||
std::chrono::milliseconds back_button_timeout;
|
||||
std::chrono::milliseconds key_repeat_delay;
|
||||
std::chrono::duration<double> key_repeat_period;
|
||||
|
||||
std::string gamepad;
|
||||
};
|
||||
|
||||
namespace flag {
|
||||
enum flag_e : std::size_t {
|
||||
PIN_STDIN = 0, // Read PIN from stdin instead of http
|
||||
FRESH_STATE, // Do not load or save state
|
||||
PIN_STDIN = 0, // Read PIN from stdin instead of http
|
||||
FRESH_STATE, // Do not load or save state
|
||||
FORCE_VIDEO_HEADER_REPLACE, // force replacing headers inside video data
|
||||
UPNP, // Try Universal Plug 'n Play
|
||||
CONST_PIN, // Use "universal" pin
|
||||
FLAG_SIZE
|
||||
};
|
||||
}
|
||||
|
||||
struct sunshine_t {
|
||||
int min_log_level;
|
||||
|
||||
std::bitset<flag::FLAG_SIZE> flags;
|
||||
std::string credentials_file;
|
||||
|
||||
std::string username;
|
||||
std::string password;
|
||||
std::string salt;
|
||||
|
||||
std::string config_file;
|
||||
|
||||
struct cmd_t {
|
||||
std::string name;
|
||||
int argc;
|
||||
char **argv;
|
||||
} cmd;
|
||||
|
||||
std::uint16_t port;
|
||||
std::string log_file;
|
||||
};
|
||||
|
||||
extern video_t video;
|
||||
@@ -89,6 +142,6 @@ extern input_t input;
|
||||
extern sunshine_t sunshine;
|
||||
|
||||
int parse(int argc, char *argv[]);
|
||||
}
|
||||
|
||||
std::unordered_map<std::string, std::string> parse_config(const std::string_view &file_content);
|
||||
} // namespace config
|
||||
#endif
|
||||
761
src/confighttp.cpp
Normal file
761
src/confighttp.cpp
Normal file
@@ -0,0 +1,761 @@
|
||||
// Created by TheElixZammuto on 2021-05-09.
|
||||
// TODO: Authentication, better handling of routes common to nvhttp, cleanup
|
||||
|
||||
#define BOOST_BIND_GLOBAL_PLACEHOLDERS
|
||||
|
||||
#include "process.h"
|
||||
|
||||
#include <filesystem>
|
||||
#include <set>
|
||||
|
||||
#include <boost/property_tree/json_parser.hpp>
|
||||
#include <boost/property_tree/ptree.hpp>
|
||||
#include <boost/property_tree/xml_parser.hpp>
|
||||
|
||||
#include <boost/algorithm/string.hpp>
|
||||
|
||||
#include <boost/asio/ssl/context.hpp>
|
||||
|
||||
#include <boost/filesystem.hpp>
|
||||
|
||||
#include <Simple-Web-Server/crypto.hpp>
|
||||
#include <Simple-Web-Server/server_https.hpp>
|
||||
#include <boost/asio/ssl/context_base.hpp>
|
||||
|
||||
#include "config.h"
|
||||
#include "confighttp.h"
|
||||
#include "crypto.h"
|
||||
#include "httpcommon.h"
|
||||
#include "main.h"
|
||||
#include "network.h"
|
||||
#include "nvhttp.h"
|
||||
#include "platform/common.h"
|
||||
#include "rtsp.h"
|
||||
#include "utility.h"
|
||||
#include "uuid.h"
|
||||
|
||||
using namespace std::literals;
|
||||
|
||||
namespace confighttp {
|
||||
namespace fs = std::filesystem;
|
||||
namespace pt = boost::property_tree;
|
||||
|
||||
using https_server_t = SimpleWeb::Server<SimpleWeb::HTTPS>;
|
||||
|
||||
using args_t = SimpleWeb::CaseInsensitiveMultimap;
|
||||
using resp_https_t = std::shared_ptr<typename SimpleWeb::ServerBase<SimpleWeb::HTTPS>::Response>;
|
||||
using req_https_t = std::shared_ptr<typename SimpleWeb::ServerBase<SimpleWeb::HTTPS>::Request>;
|
||||
|
||||
enum class op_e {
|
||||
ADD,
|
||||
REMOVE
|
||||
};
|
||||
|
||||
void print_req(const req_https_t &request) {
|
||||
BOOST_LOG(debug) << "METHOD :: "sv << request->method;
|
||||
BOOST_LOG(debug) << "DESTINATION :: "sv << request->path;
|
||||
|
||||
for(auto &[name, val] : request->header) {
|
||||
BOOST_LOG(debug) << name << " -- " << (name == "Authorization" ? "CREDENTIALS REDACTED" : val);
|
||||
}
|
||||
|
||||
BOOST_LOG(debug) << " [--] "sv;
|
||||
|
||||
for(auto &[name, val] : request->parse_query_string()) {
|
||||
BOOST_LOG(debug) << name << " -- " << val;
|
||||
}
|
||||
|
||||
BOOST_LOG(debug) << " [--] "sv;
|
||||
}
|
||||
|
||||
void send_unauthorized(resp_https_t response, req_https_t request) {
|
||||
auto address = request->remote_endpoint().address().to_string();
|
||||
BOOST_LOG(info) << "Web UI: ["sv << address << "] -- not authorized"sv;
|
||||
const SimpleWeb::CaseInsensitiveMultimap headers {
|
||||
{ "WWW-Authenticate", R"(Basic realm="Sunshine Gamestream Host", charset="UTF-8")" }
|
||||
};
|
||||
response->write(SimpleWeb::StatusCode::client_error_unauthorized, headers);
|
||||
}
|
||||
|
||||
void send_redirect(resp_https_t response, req_https_t request, const char *path) {
|
||||
auto address = request->remote_endpoint().address().to_string();
|
||||
BOOST_LOG(info) << "Web UI: ["sv << address << "] -- not authorized"sv;
|
||||
const SimpleWeb::CaseInsensitiveMultimap headers {
|
||||
{ "Location", path }
|
||||
};
|
||||
response->write(SimpleWeb::StatusCode::redirection_temporary_redirect, headers);
|
||||
}
|
||||
|
||||
bool authenticate(resp_https_t response, req_https_t request) {
|
||||
auto address = request->remote_endpoint().address().to_string();
|
||||
auto ip_type = net::from_address(address);
|
||||
|
||||
if(ip_type > http::origin_web_ui_allowed) {
|
||||
BOOST_LOG(info) << "Web UI: ["sv << address << "] -- denied"sv;
|
||||
response->write(SimpleWeb::StatusCode::client_error_forbidden);
|
||||
return false;
|
||||
}
|
||||
|
||||
// If credentials are shown, redirect the user to a /welcome page
|
||||
if(config::sunshine.username.empty()) {
|
||||
send_redirect(response, request, "/welcome");
|
||||
return false;
|
||||
}
|
||||
|
||||
auto fg = util::fail_guard([&]() {
|
||||
send_unauthorized(response, request);
|
||||
});
|
||||
|
||||
auto auth = request->header.find("authorization");
|
||||
if(auth == request->header.end()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto &rawAuth = auth->second;
|
||||
auto authData = SimpleWeb::Crypto::Base64::decode(rawAuth.substr("Basic "sv.length()));
|
||||
|
||||
int index = authData.find(':');
|
||||
if(index >= authData.size() - 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto username = authData.substr(0, index);
|
||||
auto password = authData.substr(index + 1);
|
||||
auto hash = util::hex(crypto::hash(password + config::sunshine.salt)).to_string();
|
||||
|
||||
if(username != config::sunshine.username || hash != config::sunshine.password) {
|
||||
return false;
|
||||
}
|
||||
|
||||
fg.disable();
|
||||
return true;
|
||||
}
|
||||
|
||||
void not_found(resp_https_t response, req_https_t request) {
|
||||
pt::ptree tree;
|
||||
tree.put("root.<xmlattr>.status_code", 404);
|
||||
|
||||
std::ostringstream data;
|
||||
|
||||
pt::write_xml(data, tree);
|
||||
response->write(data.str());
|
||||
|
||||
*response << "HTTP/1.1 404 NOT FOUND\r\n"
|
||||
<< data.str();
|
||||
}
|
||||
|
||||
// todo - combine these functions into a single function that accepts the page, i.e "index", "pin", "apps"
|
||||
void getIndexPage(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string header = read_file(WEB_DIR "header.html");
|
||||
std::string content = read_file(WEB_DIR "index.html");
|
||||
response->write(header + content);
|
||||
}
|
||||
|
||||
void getPinPage(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string header = read_file(WEB_DIR "header.html");
|
||||
std::string content = read_file(WEB_DIR "pin.html");
|
||||
response->write(header + content);
|
||||
}
|
||||
|
||||
void getAppsPage(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
SimpleWeb::CaseInsensitiveMultimap headers;
|
||||
headers.emplace("Access-Control-Allow-Origin", "https://images.igdb.com/");
|
||||
|
||||
std::string header = read_file(WEB_DIR "header.html");
|
||||
std::string content = read_file(WEB_DIR "apps.html");
|
||||
response->write(header + content, headers);
|
||||
}
|
||||
|
||||
void getClientsPage(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string header = read_file(WEB_DIR "header.html");
|
||||
std::string content = read_file(WEB_DIR "clients.html");
|
||||
response->write(header + content);
|
||||
}
|
||||
|
||||
void getConfigPage(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string header = read_file(WEB_DIR "header.html");
|
||||
std::string content = read_file(WEB_DIR "config.html");
|
||||
response->write(header + content);
|
||||
}
|
||||
|
||||
void getPasswordPage(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string header = read_file(WEB_DIR "header.html");
|
||||
std::string content = read_file(WEB_DIR "password.html");
|
||||
response->write(header + content);
|
||||
}
|
||||
|
||||
void getWelcomePage(resp_https_t response, req_https_t request) {
|
||||
print_req(request);
|
||||
if(!config::sunshine.username.empty()) {
|
||||
send_redirect(response, request, "/");
|
||||
return;
|
||||
}
|
||||
std::string header = read_file(WEB_DIR "header-no-nav.html");
|
||||
std::string content = read_file(WEB_DIR "welcome.html");
|
||||
response->write(header + content);
|
||||
}
|
||||
|
||||
void getTroubleshootingPage(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string header = read_file(WEB_DIR "header.html");
|
||||
std::string content = read_file(WEB_DIR "troubleshooting.html");
|
||||
response->write(header + content);
|
||||
}
|
||||
|
||||
void getFaviconImage(resp_https_t response, req_https_t request) {
|
||||
// todo - combine function with getSunshineLogoImage and possibly getNodeModules
|
||||
// todo - use mime_types map
|
||||
print_req(request);
|
||||
|
||||
std::ifstream in(WEB_DIR "images/favicon.ico", std::ios::binary);
|
||||
SimpleWeb::CaseInsensitiveMultimap headers;
|
||||
headers.emplace("Content-Type", "image/x-icon");
|
||||
response->write(SimpleWeb::StatusCode::success_ok, in, headers);
|
||||
}
|
||||
|
||||
void getSunshineLogoImage(resp_https_t response, req_https_t request) {
|
||||
// todo - combine function with getFaviconImage and possibly getNodeModules
|
||||
// todo - use mime_types map
|
||||
print_req(request);
|
||||
|
||||
std::ifstream in(WEB_DIR "images/logo-sunshine-45.png", std::ios::binary);
|
||||
SimpleWeb::CaseInsensitiveMultimap headers;
|
||||
headers.emplace("Content-Type", "image/png");
|
||||
response->write(SimpleWeb::StatusCode::success_ok, in, headers);
|
||||
}
|
||||
|
||||
bool isChildPath(fs::path const &base, fs::path const &query) {
|
||||
auto relPath = fs::relative(base, query);
|
||||
return *(relPath.begin()) != fs::path("..");
|
||||
}
|
||||
|
||||
void getNodeModules(resp_https_t response, req_https_t request) {
|
||||
print_req(request);
|
||||
fs::path webDirPath(WEB_DIR);
|
||||
fs::path nodeModulesPath(webDirPath / "node_modules");
|
||||
|
||||
// .relative_path is needed to shed any leading slash that might exist in the request path
|
||||
auto filePath = fs::weakly_canonical(webDirPath / fs::path(request->path).relative_path());
|
||||
|
||||
// Don't do anything if file does not exist or is outside the node_modules directory
|
||||
if(!isChildPath(filePath, nodeModulesPath)) {
|
||||
BOOST_LOG(warning) << "Someone requested a path " << filePath << " that is outside the node_modules folder";
|
||||
response->write(SimpleWeb::StatusCode::client_error_bad_request, "Bad Request");
|
||||
}
|
||||
else if(!fs::exists(filePath)) {
|
||||
response->write(SimpleWeb::StatusCode::client_error_not_found);
|
||||
}
|
||||
else {
|
||||
auto relPath = fs::relative(filePath, webDirPath);
|
||||
// get the mime type from the file extension mime_types map
|
||||
// remove the leading period from the extension
|
||||
auto mimeType = mime_types.find(relPath.extension().string().substr(1));
|
||||
// check if the extension is in the map at the x position
|
||||
if(mimeType != mime_types.end()) {
|
||||
// if it is, set the content type to the mime type
|
||||
SimpleWeb::CaseInsensitiveMultimap headers;
|
||||
headers.emplace("Content-Type", mimeType->second);
|
||||
std::ifstream in(filePath.string(), std::ios::binary);
|
||||
response->write(SimpleWeb::StatusCode::success_ok, in, headers);
|
||||
}
|
||||
// do not return any file if the type is not in the map
|
||||
}
|
||||
}
|
||||
|
||||
void getApps(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string content = read_file(config::stream.file_apps.c_str());
|
||||
response->write(content);
|
||||
}
|
||||
|
||||
void getLogs(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::string content = read_file(config::sunshine.log_file.c_str());
|
||||
SimpleWeb::CaseInsensitiveMultimap headers;
|
||||
headers.emplace("Content-Type", "text/plain");
|
||||
response->write(SimpleWeb::StatusCode::success_ok, content, headers);
|
||||
}
|
||||
|
||||
void saveApp(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::stringstream ss;
|
||||
ss << request->content.rdbuf();
|
||||
|
||||
pt::ptree outputTree;
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
|
||||
pt::ptree inputTree, fileTree;
|
||||
|
||||
BOOST_LOG(fatal) << config::stream.file_apps;
|
||||
try {
|
||||
// TODO: Input Validation
|
||||
pt::read_json(ss, inputTree);
|
||||
pt::read_json(config::stream.file_apps, fileTree);
|
||||
|
||||
if(inputTree.get_child("prep-cmd").empty()) {
|
||||
inputTree.erase("prep-cmd");
|
||||
}
|
||||
|
||||
if(inputTree.get_child("detached").empty()) {
|
||||
inputTree.erase("detached");
|
||||
}
|
||||
|
||||
auto &apps_node = fileTree.get_child("apps"s);
|
||||
int index = inputTree.get<int>("index");
|
||||
|
||||
inputTree.erase("index");
|
||||
|
||||
if(index == -1) {
|
||||
apps_node.push_back(std::make_pair("", inputTree));
|
||||
}
|
||||
else {
|
||||
// Unfortunately Boost PT does not allow to directly edit the array, copy should do the trick
|
||||
pt::ptree newApps;
|
||||
int i = 0;
|
||||
for(const auto &kv : apps_node) {
|
||||
if(i == index) {
|
||||
newApps.push_back(std::make_pair("", inputTree));
|
||||
}
|
||||
else {
|
||||
newApps.push_back(std::make_pair("", kv.second));
|
||||
}
|
||||
i++;
|
||||
}
|
||||
fileTree.erase("apps");
|
||||
fileTree.push_back(std::make_pair("apps", newApps));
|
||||
}
|
||||
pt::write_json(config::stream.file_apps, fileTree);
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(warning) << "SaveApp: "sv << e.what();
|
||||
|
||||
outputTree.put("status", "false");
|
||||
outputTree.put("error", "Invalid Input JSON");
|
||||
return;
|
||||
}
|
||||
|
||||
outputTree.put("status", "true");
|
||||
proc::refresh(config::stream.file_apps);
|
||||
}
|
||||
|
||||
void deleteApp(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
pt::ptree outputTree;
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
pt::ptree fileTree;
|
||||
try {
|
||||
pt::read_json(config::stream.file_apps, fileTree);
|
||||
auto &apps_node = fileTree.get_child("apps"s);
|
||||
int index = stoi(request->path_match[1]);
|
||||
|
||||
if(index < 0) {
|
||||
outputTree.put("status", "false");
|
||||
outputTree.put("error", "Invalid Index");
|
||||
return;
|
||||
}
|
||||
else {
|
||||
// Unfortunately Boost PT does not allow to directly edit the array, copy should do the trick
|
||||
pt::ptree newApps;
|
||||
int i = 0;
|
||||
for(const auto &kv : apps_node) {
|
||||
if(i++ != index) {
|
||||
newApps.push_back(std::make_pair("", kv.second));
|
||||
}
|
||||
}
|
||||
fileTree.erase("apps");
|
||||
fileTree.push_back(std::make_pair("apps", newApps));
|
||||
}
|
||||
pt::write_json(config::stream.file_apps, fileTree);
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(warning) << "DeleteApp: "sv << e.what();
|
||||
outputTree.put("status", "false");
|
||||
outputTree.put("error", "Invalid File JSON");
|
||||
return;
|
||||
}
|
||||
|
||||
outputTree.put("status", "true");
|
||||
proc::refresh(config::stream.file_apps);
|
||||
}
|
||||
|
||||
void uploadCover(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
std::stringstream ss;
|
||||
std::stringstream configStream;
|
||||
ss << request->content.rdbuf();
|
||||
pt::ptree outputTree;
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
|
||||
SimpleWeb::StatusCode code = SimpleWeb::StatusCode::success_ok;
|
||||
if(outputTree.get_child_optional("error").has_value()) {
|
||||
code = SimpleWeb::StatusCode::client_error_bad_request;
|
||||
}
|
||||
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(code, data.str());
|
||||
});
|
||||
pt::ptree inputTree;
|
||||
try {
|
||||
pt::read_json(ss, inputTree);
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(warning) << "UploadCover: "sv << e.what();
|
||||
outputTree.put("status", "false");
|
||||
outputTree.put("error", e.what());
|
||||
return;
|
||||
}
|
||||
|
||||
auto key = inputTree.get("key", "");
|
||||
if(key.empty()) {
|
||||
outputTree.put("error", "Cover key is required");
|
||||
return;
|
||||
}
|
||||
auto url = inputTree.get("url", "");
|
||||
|
||||
const std::string coverdir = platf::appdata().string() + "/covers/";
|
||||
if(!boost::filesystem::exists(coverdir)) {
|
||||
boost::filesystem::create_directory(coverdir);
|
||||
}
|
||||
|
||||
std::basic_string path = coverdir + http::url_escape(key) + ".png";
|
||||
if(!url.empty()) {
|
||||
if(http::url_get_host(url) != "images.igdb.com") {
|
||||
outputTree.put("error", "Only images.igdb.com is allowed");
|
||||
return;
|
||||
}
|
||||
if(!http::download_file(url, path)) {
|
||||
outputTree.put("error", "Failed to download cover");
|
||||
return;
|
||||
}
|
||||
}
|
||||
else {
|
||||
auto data = SimpleWeb::Crypto::Base64::decode(inputTree.get<std::string>("data"));
|
||||
|
||||
std::ofstream imgfile(path);
|
||||
imgfile.write(data.data(), (int)data.size());
|
||||
}
|
||||
outputTree.put("path", path);
|
||||
}
|
||||
|
||||
void getConfig(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
pt::ptree outputTree;
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
|
||||
outputTree.put("status", "true");
|
||||
outputTree.put("platform", SUNSHINE_PLATFORM);
|
||||
outputTree.put("restart_supported", platf::restart_supported());
|
||||
|
||||
auto vars = config::parse_config(read_file(config::sunshine.config_file.c_str()));
|
||||
|
||||
for(auto &[name, value] : vars) {
|
||||
outputTree.put(std::move(name), std::move(value));
|
||||
}
|
||||
}
|
||||
|
||||
void saveConfig(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::stringstream ss;
|
||||
std::stringstream configStream;
|
||||
ss << request->content.rdbuf();
|
||||
pt::ptree outputTree;
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
pt::ptree inputTree;
|
||||
try {
|
||||
// TODO: Input Validation
|
||||
pt::read_json(ss, inputTree);
|
||||
for(const auto &kv : inputTree) {
|
||||
std::string value = inputTree.get<std::string>(kv.first);
|
||||
if(value.length() == 0 || value.compare("null") == 0) continue;
|
||||
|
||||
configStream << kv.first << " = " << value << std::endl;
|
||||
}
|
||||
write_file(config::sunshine.config_file.c_str(), configStream.str());
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(warning) << "SaveConfig: "sv << e.what();
|
||||
outputTree.put("status", "false");
|
||||
outputTree.put("error", e.what());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void restart(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::stringstream ss;
|
||||
std::stringstream configStream;
|
||||
ss << request->content.rdbuf();
|
||||
pt::ptree outputTree;
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
|
||||
if(!platf::restart_supported()) {
|
||||
outputTree.put("status", false);
|
||||
outputTree.put("error", "Restart is not currently supported on this platform");
|
||||
return;
|
||||
}
|
||||
|
||||
if(!platf::restart()) {
|
||||
outputTree.put("status", false);
|
||||
outputTree.put("error", "Restart failed");
|
||||
return;
|
||||
}
|
||||
|
||||
outputTree.put("status", true);
|
||||
}
|
||||
|
||||
void savePassword(resp_https_t response, req_https_t request) {
|
||||
if(!config::sunshine.username.empty() && !authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::stringstream ss;
|
||||
std::stringstream configStream;
|
||||
ss << request->content.rdbuf();
|
||||
|
||||
pt::ptree inputTree, outputTree;
|
||||
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
|
||||
try {
|
||||
// TODO: Input Validation
|
||||
pt::read_json(ss, inputTree);
|
||||
auto username = inputTree.count("currentUsername") > 0 ? inputTree.get<std::string>("currentUsername") : "";
|
||||
auto newUsername = inputTree.get<std::string>("newUsername");
|
||||
auto password = inputTree.count("currentPassword") > 0 ? inputTree.get<std::string>("currentPassword") : "";
|
||||
auto newPassword = inputTree.count("newPassword") > 0 ? inputTree.get<std::string>("newPassword") : "";
|
||||
auto confirmPassword = inputTree.count("confirmNewPassword") > 0 ? inputTree.get<std::string>("confirmNewPassword") : "";
|
||||
if(newUsername.length() == 0) newUsername = username;
|
||||
if(newUsername.length() == 0) {
|
||||
outputTree.put("status", false);
|
||||
outputTree.put("error", "Invalid Username");
|
||||
}
|
||||
else {
|
||||
auto hash = util::hex(crypto::hash(password + config::sunshine.salt)).to_string();
|
||||
if(config::sunshine.username.empty() || (username == config::sunshine.username && hash == config::sunshine.password)) {
|
||||
if(newPassword.empty() || newPassword != confirmPassword) {
|
||||
outputTree.put("status", false);
|
||||
outputTree.put("error", "Password Mismatch");
|
||||
}
|
||||
else {
|
||||
http::save_user_creds(config::sunshine.credentials_file, newUsername, newPassword);
|
||||
http::reload_user_creds(config::sunshine.credentials_file);
|
||||
outputTree.put("status", true);
|
||||
}
|
||||
}
|
||||
else {
|
||||
outputTree.put("status", false);
|
||||
outputTree.put("error", "Invalid Current Credentials");
|
||||
}
|
||||
}
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(warning) << "SavePassword: "sv << e.what();
|
||||
outputTree.put("status", false);
|
||||
outputTree.put("error", e.what());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void savePin(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
std::stringstream ss;
|
||||
ss << request->content.rdbuf();
|
||||
|
||||
pt::ptree inputTree, outputTree;
|
||||
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
|
||||
try {
|
||||
// TODO: Input Validation
|
||||
pt::read_json(ss, inputTree);
|
||||
std::string pin = inputTree.get<std::string>("pin");
|
||||
outputTree.put("status", nvhttp::pin(pin));
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(warning) << "SavePin: "sv << e.what();
|
||||
outputTree.put("status", false);
|
||||
outputTree.put("error", e.what());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
void unpairAll(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
pt::ptree outputTree;
|
||||
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
nvhttp::erase_all_clients();
|
||||
outputTree.put("status", true);
|
||||
}
|
||||
|
||||
void closeApp(resp_https_t response, req_https_t request) {
|
||||
if(!authenticate(response, request)) return;
|
||||
|
||||
print_req(request);
|
||||
|
||||
pt::ptree outputTree;
|
||||
|
||||
auto g = util::fail_guard([&]() {
|
||||
std::ostringstream data;
|
||||
pt::write_json(data, outputTree);
|
||||
response->write(data.str());
|
||||
});
|
||||
|
||||
proc::proc.terminate();
|
||||
outputTree.put("status", true);
|
||||
}
|
||||
|
||||
void start() {
|
||||
auto shutdown_event = mail::man->event<bool>(mail::shutdown);
|
||||
|
||||
auto port_https = map_port(PORT_HTTPS);
|
||||
|
||||
https_server_t server { config::nvhttp.cert, config::nvhttp.pkey };
|
||||
server.default_resource["GET"] = not_found;
|
||||
server.resource["^/$"]["GET"] = getIndexPage;
|
||||
server.resource["^/pin$"]["GET"] = getPinPage;
|
||||
server.resource["^/apps$"]["GET"] = getAppsPage;
|
||||
server.resource["^/clients$"]["GET"] = getClientsPage;
|
||||
server.resource["^/config$"]["GET"] = getConfigPage;
|
||||
server.resource["^/password$"]["GET"] = getPasswordPage;
|
||||
server.resource["^/welcome$"]["GET"] = getWelcomePage;
|
||||
server.resource["^/troubleshooting$"]["GET"] = getTroubleshootingPage;
|
||||
server.resource["^/api/pin$"]["POST"] = savePin;
|
||||
server.resource["^/api/apps$"]["GET"] = getApps;
|
||||
server.resource["^/api/logs$"]["GET"] = getLogs;
|
||||
server.resource["^/api/apps$"]["POST"] = saveApp;
|
||||
server.resource["^/api/config$"]["GET"] = getConfig;
|
||||
server.resource["^/api/config$"]["POST"] = saveConfig;
|
||||
server.resource["^/api/restart$"]["POST"] = restart;
|
||||
server.resource["^/api/password$"]["POST"] = savePassword;
|
||||
server.resource["^/api/apps/([0-9]+)$"]["DELETE"] = deleteApp;
|
||||
server.resource["^/api/clients/unpair$"]["POST"] = unpairAll;
|
||||
server.resource["^/api/apps/close$"]["POST"] = closeApp;
|
||||
server.resource["^/api/covers/upload$"]["POST"] = uploadCover;
|
||||
server.resource["^/images/favicon.ico$"]["GET"] = getFaviconImage;
|
||||
server.resource["^/images/logo-sunshine-45.png$"]["GET"] = getSunshineLogoImage;
|
||||
server.resource["^/node_modules\\/.+$"]["GET"] = getNodeModules;
|
||||
server.config.reuse_address = true;
|
||||
server.config.address = "0.0.0.0"s;
|
||||
server.config.port = port_https;
|
||||
|
||||
auto accept_and_run = [&](auto *server) {
|
||||
try {
|
||||
server->start([](unsigned short port) {
|
||||
BOOST_LOG(info) << "Configuration UI available at [https://localhost:"sv << port << "]";
|
||||
});
|
||||
}
|
||||
catch(boost::system::system_error &err) {
|
||||
// It's possible the exception gets thrown after calling server->stop() from a different thread
|
||||
if(shutdown_event->peek()) {
|
||||
return;
|
||||
}
|
||||
|
||||
BOOST_LOG(fatal) << "Couldn't start Configuration HTTPS server on port ["sv << port_https << "]: "sv << err.what();
|
||||
shutdown_event->raise(true);
|
||||
return;
|
||||
}
|
||||
};
|
||||
std::thread tcp { accept_and_run, &server };
|
||||
|
||||
// Wait for any event
|
||||
shutdown_event->view();
|
||||
|
||||
server.stop();
|
||||
|
||||
tcp.join();
|
||||
}
|
||||
} // namespace confighttp
|
||||
38
src/confighttp.h
Normal file
38
src/confighttp.h
Normal file
@@ -0,0 +1,38 @@
|
||||
// Created by loki on 6/3/19.
|
||||
|
||||
#ifndef SUNSHINE_CONFIGHTTP_H
|
||||
#define SUNSHINE_CONFIGHTTP_H
|
||||
|
||||
#include <functional>
|
||||
#include <string>
|
||||
|
||||
#include "thread_safe.h"
|
||||
|
||||
#define WEB_DIR SUNSHINE_ASSETS_DIR "/web/"
|
||||
|
||||
|
||||
namespace confighttp {
|
||||
constexpr auto PORT_HTTPS = 1;
|
||||
void start();
|
||||
} // namespace confighttp
|
||||
|
||||
// mime types map
|
||||
const std::map<std::string, std::string> mime_types = {
|
||||
{ "css", "text/css" },
|
||||
{ "gif", "image/gif" },
|
||||
{ "htm", "text/html" },
|
||||
{ "html", "text/html" },
|
||||
{ "ico", "image/x-icon" },
|
||||
{ "jpeg", "image/jpeg" },
|
||||
{ "jpg", "image/jpeg" },
|
||||
{ "js", "application/javascript" },
|
||||
{ "json", "application/json" },
|
||||
{ "png", "image/png" },
|
||||
{ "svg", "image/svg+xml" },
|
||||
{ "ttf", "font/ttf" },
|
||||
{ "txt", "text/plain" },
|
||||
{ "woff2", "font/woff2" },
|
||||
{ "xml", "text/xml" },
|
||||
};
|
||||
|
||||
#endif // SUNSHINE_CONFIGHTTP_H
|
||||
489
src/crypto.cpp
Normal file
489
src/crypto.cpp
Normal file
@@ -0,0 +1,489 @@
|
||||
// Created by loki on 5/31/19.
|
||||
|
||||
#include "crypto.h"
|
||||
#include <openssl/pem.h>
|
||||
|
||||
namespace crypto {
|
||||
using asn1_string_t = util::safe_ptr<ASN1_STRING, ASN1_STRING_free>;
|
||||
|
||||
cert_chain_t::cert_chain_t() : _certs {}, _cert_ctx { X509_STORE_CTX_new() } {}
|
||||
void cert_chain_t::add(x509_t &&cert) {
|
||||
x509_store_t x509_store { X509_STORE_new() };
|
||||
|
||||
X509_STORE_add_cert(x509_store.get(), cert.get());
|
||||
_certs.emplace_back(std::make_pair(std::move(cert), std::move(x509_store)));
|
||||
}
|
||||
|
||||
static int openssl_verify_cb(int ok, X509_STORE_CTX *ctx) {
|
||||
int err_code = X509_STORE_CTX_get_error(ctx);
|
||||
|
||||
switch(err_code) {
|
||||
// FIXME: Checking for X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY is a temporary workaround to get mmonlight-embedded to work on the raspberry pi
|
||||
case X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY:
|
||||
return 1;
|
||||
|
||||
// Expired or not-yet-valid certificates are fine. Sometimes Moonlight is running on embedded devices
|
||||
// that don't have accurate clocks (or haven't yet synchronized by the time Moonlight first runs).
|
||||
// This behavior also matches what GeForce Experience does.
|
||||
case X509_V_ERR_CERT_NOT_YET_VALID:
|
||||
case X509_V_ERR_CERT_HAS_EXPIRED:
|
||||
return 1;
|
||||
|
||||
default:
|
||||
return ok;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* When certificates from two or more instances of Moonlight have been added to x509_store_t,
|
||||
* only one of them will be verified by X509_verify_cert, resulting in only a single instance of
|
||||
* Moonlight to be able to use Sunshine
|
||||
*
|
||||
* To circumvent this, x509_store_t instance will be created for each instance of the certificates.
|
||||
*/
|
||||
const char *cert_chain_t::verify(x509_t::element_type *cert) {
|
||||
int err_code = 0;
|
||||
for(auto &[_, x509_store] : _certs) {
|
||||
auto fg = util::fail_guard([this]() {
|
||||
X509_STORE_CTX_cleanup(_cert_ctx.get());
|
||||
});
|
||||
|
||||
X509_STORE_CTX_init(_cert_ctx.get(), x509_store.get(), cert, nullptr);
|
||||
X509_STORE_CTX_set_verify_cb(_cert_ctx.get(), openssl_verify_cb);
|
||||
|
||||
// We don't care to validate the entire chain for the purposes of client auth.
|
||||
// Some versions of clients forked from Moonlight Embedded produce client certs
|
||||
// that OpenSSL doesn't detect as self-signed due to some X509v3 extensions.
|
||||
X509_STORE_CTX_set_flags(_cert_ctx.get(), X509_V_FLAG_PARTIAL_CHAIN);
|
||||
|
||||
auto err = X509_verify_cert(_cert_ctx.get());
|
||||
|
||||
if(err == 1) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
err_code = X509_STORE_CTX_get_error(_cert_ctx.get());
|
||||
|
||||
if(err_code != X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT && err_code != X509_V_ERR_INVALID_CA) {
|
||||
return X509_verify_cert_error_string(err_code);
|
||||
}
|
||||
}
|
||||
|
||||
return X509_verify_cert_error_string(err_code);
|
||||
}
|
||||
|
||||
namespace cipher {
|
||||
|
||||
static int init_decrypt_gcm(cipher_ctx_t &ctx, aes_t *key, aes_t *iv, bool padding) {
|
||||
ctx.reset(EVP_CIPHER_CTX_new());
|
||||
|
||||
if(!ctx) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_DecryptInit_ex(ctx.get(), EVP_aes_128_gcm(), nullptr, nullptr, nullptr) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_SET_IVLEN, iv->size(), nullptr) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_DecryptInit_ex(ctx.get(), nullptr, nullptr, key->data(), iv->data()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
EVP_CIPHER_CTX_set_padding(ctx.get(), padding);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int init_encrypt_gcm(cipher_ctx_t &ctx, aes_t *key, aes_t *iv, bool padding) {
|
||||
ctx.reset(EVP_CIPHER_CTX_new());
|
||||
|
||||
// Gen 7 servers use 128-bit AES ECB
|
||||
if(EVP_EncryptInit_ex(ctx.get(), EVP_aes_128_gcm(), nullptr, nullptr, nullptr) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_CIPHER_CTX_ctrl(ctx.get(), EVP_CTRL_GCM_SET_IVLEN, iv->size(), nullptr) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_EncryptInit_ex(ctx.get(), nullptr, nullptr, key->data(), iv->data()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
EVP_CIPHER_CTX_set_padding(ctx.get(), padding);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int init_encrypt_cbc(cipher_ctx_t &ctx, aes_t *key, aes_t *iv, bool padding) {
|
||||
ctx.reset(EVP_CIPHER_CTX_new());
|
||||
|
||||
// Gen 7 servers use 128-bit AES ECB
|
||||
if(EVP_EncryptInit_ex(ctx.get(), EVP_aes_128_cbc(), nullptr, key->data(), iv->data()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
EVP_CIPHER_CTX_set_padding(ctx.get(), padding);
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int gcm_t::decrypt(const std::string_view &tagged_cipher, std::vector<std::uint8_t> &plaintext, aes_t *iv) {
|
||||
if(!decrypt_ctx && init_decrypt_gcm(decrypt_ctx, &key, iv, padding)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Calling with cipher == nullptr results in a parameter change
|
||||
// without requiring a reallocation of the internal cipher ctx.
|
||||
if(EVP_DecryptInit_ex(decrypt_ctx.get(), nullptr, nullptr, nullptr, iv->data()) != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto cipher = tagged_cipher.substr(tag_size);
|
||||
auto tag = tagged_cipher.substr(0, tag_size);
|
||||
|
||||
plaintext.resize((cipher.size() + 15) / 16 * 16);
|
||||
|
||||
int size;
|
||||
if(EVP_DecryptUpdate(decrypt_ctx.get(), plaintext.data(), &size, (const std::uint8_t *)cipher.data(), cipher.size()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_CIPHER_CTX_ctrl(decrypt_ctx.get(), EVP_CTRL_GCM_SET_TAG, tag.size(), const_cast<char *>(tag.data())) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int len = size;
|
||||
if(EVP_DecryptFinal_ex(decrypt_ctx.get(), plaintext.data() + size, &len) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
plaintext.resize(size + len);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int gcm_t::encrypt(const std::string_view &plaintext, std::uint8_t *tagged_cipher, aes_t *iv) {
|
||||
if(!encrypt_ctx && init_encrypt_gcm(encrypt_ctx, &key, iv, padding)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Calling with cipher == nullptr results in a parameter change
|
||||
// without requiring a reallocation of the internal cipher ctx.
|
||||
if(EVP_EncryptInit_ex(encrypt_ctx.get(), nullptr, nullptr, nullptr, iv->data()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
auto tag = tagged_cipher;
|
||||
auto cipher = tag + tag_size;
|
||||
|
||||
int len;
|
||||
int size = round_to_pkcs7_padded(plaintext.size());
|
||||
|
||||
// Encrypt into the caller's buffer
|
||||
if(EVP_EncryptUpdate(encrypt_ctx.get(), cipher, &size, (const std::uint8_t *)plaintext.data(), plaintext.size()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// GCM encryption won't ever fill ciphertext here but we have to call it anyway
|
||||
if(EVP_EncryptFinal_ex(encrypt_ctx.get(), cipher + size, &len) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_CIPHER_CTX_ctrl(encrypt_ctx.get(), EVP_CTRL_GCM_GET_TAG, tag_size, tag) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return len + size;
|
||||
}
|
||||
|
||||
int ecb_t::decrypt(const std::string_view &cipher, std::vector<std::uint8_t> &plaintext) {
|
||||
int len;
|
||||
|
||||
auto fg = util::fail_guard([this]() {
|
||||
EVP_CIPHER_CTX_reset(decrypt_ctx.get());
|
||||
});
|
||||
|
||||
// Gen 7 servers use 128-bit AES ECB
|
||||
if(EVP_DecryptInit_ex(decrypt_ctx.get(), EVP_aes_128_ecb(), nullptr, key.data(), nullptr) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
EVP_CIPHER_CTX_set_padding(decrypt_ctx.get(), padding);
|
||||
|
||||
plaintext.resize((cipher.size() + 15) / 16 * 16);
|
||||
auto size = (int)plaintext.size();
|
||||
// Decrypt into the caller's buffer, leaving room for the auth tag to be prepended
|
||||
if(EVP_DecryptUpdate(decrypt_ctx.get(), plaintext.data(), &size, (const std::uint8_t *)cipher.data(), cipher.size()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_DecryptFinal_ex(decrypt_ctx.get(), plaintext.data(), &len) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
plaintext.resize(len + size);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int ecb_t::encrypt(const std::string_view &plaintext, std::vector<std::uint8_t> &cipher) {
|
||||
auto fg = util::fail_guard([this]() {
|
||||
EVP_CIPHER_CTX_reset(encrypt_ctx.get());
|
||||
});
|
||||
|
||||
// Gen 7 servers use 128-bit AES ECB
|
||||
if(EVP_EncryptInit_ex(encrypt_ctx.get(), EVP_aes_128_ecb(), nullptr, key.data(), nullptr) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
EVP_CIPHER_CTX_set_padding(encrypt_ctx.get(), padding);
|
||||
|
||||
int len;
|
||||
|
||||
cipher.resize((plaintext.size() + 15) / 16 * 16);
|
||||
auto size = (int)cipher.size();
|
||||
|
||||
// Encrypt into the caller's buffer
|
||||
if(EVP_EncryptUpdate(encrypt_ctx.get(), cipher.data(), &size, (const std::uint8_t *)plaintext.data(), plaintext.size()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_EncryptFinal_ex(encrypt_ctx.get(), cipher.data() + size, &len) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
cipher.resize(len + size);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int cbc_t::encrypt(const std::string_view &plaintext, std::uint8_t *cipher, aes_t *iv) {
|
||||
if(!encrypt_ctx && init_encrypt_cbc(encrypt_ctx, &key, iv, padding)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Calling with cipher == nullptr results in a parameter change
|
||||
// without requiring a reallocation of the internal cipher ctx.
|
||||
if(EVP_EncryptInit_ex(encrypt_ctx.get(), nullptr, nullptr, nullptr, iv->data()) != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
int len;
|
||||
|
||||
int size = plaintext.size(); // round_to_pkcs7_padded(plaintext.size());
|
||||
|
||||
// Encrypt into the caller's buffer
|
||||
if(EVP_EncryptUpdate(encrypt_ctx.get(), cipher, &size, (const std::uint8_t *)plaintext.data(), plaintext.size()) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(EVP_EncryptFinal_ex(encrypt_ctx.get(), cipher + size, &len) != 1) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return size + len;
|
||||
}
|
||||
|
||||
ecb_t::ecb_t(const aes_t &key, bool padding)
|
||||
: cipher_t { EVP_CIPHER_CTX_new(), EVP_CIPHER_CTX_new(), key, padding } {}
|
||||
|
||||
cbc_t::cbc_t(const aes_t &key, bool padding)
|
||||
: cipher_t { nullptr, nullptr, key, padding } {}
|
||||
|
||||
gcm_t::gcm_t(const crypto::aes_t &key, bool padding)
|
||||
: cipher_t { nullptr, nullptr, key, padding } {}
|
||||
|
||||
} // namespace cipher
|
||||
|
||||
aes_t gen_aes_key(const std::array<uint8_t, 16> &salt, const std::string_view &pin) {
|
||||
aes_t key;
|
||||
|
||||
std::string salt_pin;
|
||||
salt_pin.reserve(salt.size() + pin.size());
|
||||
|
||||
salt_pin.insert(std::end(salt_pin), std::begin(salt), std::end(salt));
|
||||
salt_pin.insert(std::end(salt_pin), std::begin(pin), std::end(pin));
|
||||
|
||||
auto hsh = hash(salt_pin);
|
||||
|
||||
std::copy(std::begin(hsh), std::begin(hsh) + key.size(), std::begin(key));
|
||||
|
||||
return key;
|
||||
}
|
||||
|
||||
sha256_t hash(const std::string_view &plaintext) {
|
||||
sha256_t hsh;
|
||||
EVP_Digest(plaintext.data(), plaintext.size(), hsh.data(), nullptr, EVP_sha256(), nullptr);
|
||||
return hsh;
|
||||
}
|
||||
|
||||
x509_t x509(const std::string_view &x) {
|
||||
bio_t io { BIO_new(BIO_s_mem()) };
|
||||
|
||||
BIO_write(io.get(), x.data(), x.size());
|
||||
|
||||
x509_t p;
|
||||
PEM_read_bio_X509(io.get(), &p, nullptr, nullptr);
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
pkey_t pkey(const std::string_view &k) {
|
||||
bio_t io { BIO_new(BIO_s_mem()) };
|
||||
|
||||
BIO_write(io.get(), k.data(), k.size());
|
||||
|
||||
pkey_t p = nullptr;
|
||||
PEM_read_bio_PrivateKey(io.get(), &p, nullptr, nullptr);
|
||||
|
||||
return p;
|
||||
}
|
||||
|
||||
std::string pem(x509_t &x509) {
|
||||
bio_t bio { BIO_new(BIO_s_mem()) };
|
||||
|
||||
PEM_write_bio_X509(bio.get(), x509.get());
|
||||
BUF_MEM *mem_ptr;
|
||||
BIO_get_mem_ptr(bio.get(), &mem_ptr);
|
||||
|
||||
return { mem_ptr->data, mem_ptr->length };
|
||||
}
|
||||
|
||||
std::string pem(pkey_t &pkey) {
|
||||
bio_t bio { BIO_new(BIO_s_mem()) };
|
||||
|
||||
PEM_write_bio_PrivateKey(bio.get(), pkey.get(), nullptr, nullptr, 0, nullptr, nullptr);
|
||||
BUF_MEM *mem_ptr;
|
||||
BIO_get_mem_ptr(bio.get(), &mem_ptr);
|
||||
|
||||
return { mem_ptr->data, mem_ptr->length };
|
||||
}
|
||||
|
||||
std::string_view signature(const x509_t &x) {
|
||||
// X509_ALGOR *_ = nullptr;
|
||||
|
||||
const ASN1_BIT_STRING *asn1 = nullptr;
|
||||
X509_get0_signature(&asn1, nullptr, x.get());
|
||||
|
||||
return { (const char *)asn1->data, (std::size_t)asn1->length };
|
||||
}
|
||||
|
||||
std::string rand(std::size_t bytes) {
|
||||
std::string r;
|
||||
r.resize(bytes);
|
||||
|
||||
RAND_bytes((uint8_t *)r.data(), r.size());
|
||||
|
||||
return r;
|
||||
}
|
||||
|
||||
std::vector<uint8_t> sign(const pkey_t &pkey, const std::string_view &data, const EVP_MD *md) {
|
||||
md_ctx_t ctx { EVP_MD_CTX_create() };
|
||||
|
||||
if(EVP_DigestSignInit(ctx.get(), nullptr, md, nullptr, pkey.get()) != 1) {
|
||||
return {};
|
||||
}
|
||||
|
||||
if(EVP_DigestSignUpdate(ctx.get(), data.data(), data.size()) != 1) {
|
||||
return {};
|
||||
}
|
||||
|
||||
std::size_t slen = digest_size;
|
||||
|
||||
std::vector<uint8_t> digest;
|
||||
digest.resize(slen);
|
||||
|
||||
if(EVP_DigestSignFinal(ctx.get(), digest.data(), &slen) != 1) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return digest;
|
||||
}
|
||||
|
||||
creds_t gen_creds(const std::string_view &cn, std::uint32_t key_bits) {
|
||||
x509_t x509 { X509_new() };
|
||||
pkey_ctx_t ctx { EVP_PKEY_CTX_new_id(EVP_PKEY_RSA, nullptr) };
|
||||
pkey_t pkey;
|
||||
|
||||
EVP_PKEY_keygen_init(ctx.get());
|
||||
EVP_PKEY_CTX_set_rsa_keygen_bits(ctx.get(), key_bits);
|
||||
EVP_PKEY_keygen(ctx.get(), &pkey);
|
||||
|
||||
X509_set_version(x509.get(), 2);
|
||||
|
||||
// Generate a real serial number to avoid SEC_ERROR_REUSED_ISSUER_AND_SERIAL with Firefox
|
||||
bignum_t serial { BN_new() };
|
||||
BN_rand(serial.get(), 159, BN_RAND_TOP_ANY, BN_RAND_BOTTOM_ANY); // 159 bits to fit in 20 bytes in DER format
|
||||
BN_set_negative(serial.get(), 0); // Serial numbers must be positive
|
||||
BN_to_ASN1_INTEGER(serial.get(), X509_get_serialNumber(x509.get()));
|
||||
|
||||
constexpr auto year = 60 * 60 * 24 * 365;
|
||||
#if OPENSSL_VERSION_NUMBER < 0x10100000L
|
||||
X509_gmtime_adj(X509_get_notBefore(x509.get()), 0);
|
||||
X509_gmtime_adj(X509_get_notAfter(x509.get()), 20 * year);
|
||||
#else
|
||||
asn1_string_t not_before { ASN1_STRING_dup(X509_get0_notBefore(x509.get())) };
|
||||
asn1_string_t not_after { ASN1_STRING_dup(X509_get0_notAfter(x509.get())) };
|
||||
|
||||
X509_gmtime_adj(not_before.get(), 0);
|
||||
X509_gmtime_adj(not_after.get(), 20 * year);
|
||||
|
||||
X509_set1_notBefore(x509.get(), not_before.get());
|
||||
X509_set1_notAfter(x509.get(), not_after.get());
|
||||
#endif
|
||||
|
||||
X509_set_pubkey(x509.get(), pkey.get());
|
||||
|
||||
auto name = X509_get_subject_name(x509.get());
|
||||
X509_NAME_add_entry_by_txt(name, "CN", MBSTRING_ASC,
|
||||
(const std::uint8_t *)cn.data(), cn.size(),
|
||||
-1, 0);
|
||||
|
||||
X509_set_issuer_name(x509.get(), name);
|
||||
X509_sign(x509.get(), pkey.get(), EVP_sha256());
|
||||
|
||||
return { pem(x509), pem(pkey) };
|
||||
}
|
||||
|
||||
std::vector<uint8_t> sign256(const pkey_t &pkey, const std::string_view &data) {
|
||||
return sign(pkey, data, EVP_sha256());
|
||||
}
|
||||
|
||||
bool verify(const x509_t &x509, const std::string_view &data, const std::string_view &signature, const EVP_MD *md) {
|
||||
auto pkey = X509_get_pubkey(x509.get());
|
||||
|
||||
md_ctx_t ctx { EVP_MD_CTX_create() };
|
||||
|
||||
if(EVP_DigestVerifyInit(ctx.get(), nullptr, md, nullptr, pkey) != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(EVP_DigestVerifyUpdate(ctx.get(), data.data(), data.size()) != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if(EVP_DigestVerifyFinal(ctx.get(), (const uint8_t *)signature.data(), signature.size()) != 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool verify256(const x509_t &x509, const std::string_view &data, const std::string_view &signature) {
|
||||
return verify(x509, data, signature, EVP_sha256());
|
||||
}
|
||||
|
||||
void md_ctx_destroy(EVP_MD_CTX *ctx) {
|
||||
EVP_MD_CTX_destroy(ctx);
|
||||
}
|
||||
|
||||
std::string rand_alphabet(std::size_t bytes, const std::string_view &alphabet) {
|
||||
auto value = rand(bytes);
|
||||
|
||||
for(std::size_t i = 0; i != value.size(); ++i) {
|
||||
value[i] = alphabet[value[i] % alphabet.length()];
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
} // namespace crypto
|
||||
135
src/crypto.h
Normal file
135
src/crypto.h
Normal file
@@ -0,0 +1,135 @@
|
||||
// Created by loki on 6/1/19.
|
||||
|
||||
#ifndef SUNSHINE_CRYPTO_H
|
||||
#define SUNSHINE_CRYPTO_H
|
||||
|
||||
#include <array>
|
||||
#include <openssl/evp.h>
|
||||
#include <openssl/rand.h>
|
||||
#include <openssl/sha.h>
|
||||
#include <openssl/x509.h>
|
||||
|
||||
#include "utility.h"
|
||||
|
||||
namespace crypto {
|
||||
struct creds_t {
|
||||
std::string x509;
|
||||
std::string pkey;
|
||||
};
|
||||
constexpr std::size_t digest_size = 256;
|
||||
|
||||
void md_ctx_destroy(EVP_MD_CTX *);
|
||||
|
||||
using sha256_t = std::array<std::uint8_t, SHA256_DIGEST_LENGTH>;
|
||||
|
||||
using aes_t = std::array<std::uint8_t, 16>;
|
||||
using x509_t = util::safe_ptr<X509, X509_free>;
|
||||
using x509_store_t = util::safe_ptr<X509_STORE, X509_STORE_free>;
|
||||
using x509_store_ctx_t = util::safe_ptr<X509_STORE_CTX, X509_STORE_CTX_free>;
|
||||
using cipher_ctx_t = util::safe_ptr<EVP_CIPHER_CTX, EVP_CIPHER_CTX_free>;
|
||||
using md_ctx_t = util::safe_ptr<EVP_MD_CTX, md_ctx_destroy>;
|
||||
using bio_t = util::safe_ptr<BIO, BIO_free_all>;
|
||||
using pkey_t = util::safe_ptr<EVP_PKEY, EVP_PKEY_free>;
|
||||
using pkey_ctx_t = util::safe_ptr<EVP_PKEY_CTX, EVP_PKEY_CTX_free>;
|
||||
using bignum_t = util::safe_ptr<BIGNUM, BN_free>;
|
||||
|
||||
sha256_t hash(const std::string_view &plaintext);
|
||||
|
||||
aes_t gen_aes_key(const std::array<uint8_t, 16> &salt, const std::string_view &pin);
|
||||
|
||||
x509_t x509(const std::string_view &x);
|
||||
pkey_t pkey(const std::string_view &k);
|
||||
std::string pem(x509_t &x509);
|
||||
std::string pem(pkey_t &pkey);
|
||||
|
||||
std::vector<uint8_t> sign256(const pkey_t &pkey, const std::string_view &data);
|
||||
bool verify256(const x509_t &x509, const std::string_view &data, const std::string_view &signature);
|
||||
|
||||
creds_t gen_creds(const std::string_view &cn, std::uint32_t key_bits);
|
||||
|
||||
std::string_view signature(const x509_t &x);
|
||||
|
||||
std::string rand(std::size_t bytes);
|
||||
std::string rand_alphabet(std::size_t bytes,
|
||||
const std::string_view &alphabet = std::string_view { "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789!%&()=-" });
|
||||
|
||||
class cert_chain_t {
|
||||
public:
|
||||
KITTY_DECL_CONSTR(cert_chain_t)
|
||||
|
||||
void add(x509_t &&cert);
|
||||
|
||||
const char *verify(x509_t::element_type *cert);
|
||||
|
||||
private:
|
||||
std::vector<std::pair<x509_t, x509_store_t>> _certs;
|
||||
x509_store_ctx_t _cert_ctx;
|
||||
};
|
||||
|
||||
namespace cipher {
|
||||
constexpr std::size_t tag_size = 16;
|
||||
constexpr std::size_t round_to_pkcs7_padded(std::size_t size) {
|
||||
return ((size + 15) / 16) * 16;
|
||||
}
|
||||
|
||||
class cipher_t {
|
||||
public:
|
||||
cipher_ctx_t decrypt_ctx;
|
||||
cipher_ctx_t encrypt_ctx;
|
||||
|
||||
aes_t key;
|
||||
|
||||
bool padding;
|
||||
};
|
||||
|
||||
class ecb_t : public cipher_t {
|
||||
public:
|
||||
ecb_t() = default;
|
||||
ecb_t(ecb_t &&) noexcept = default;
|
||||
ecb_t &operator=(ecb_t &&) noexcept = default;
|
||||
|
||||
ecb_t(const aes_t &key, bool padding = true);
|
||||
|
||||
int encrypt(const std::string_view &plaintext, std::vector<std::uint8_t> &cipher);
|
||||
int decrypt(const std::string_view &cipher, std::vector<std::uint8_t> &plaintext);
|
||||
};
|
||||
|
||||
class gcm_t : public cipher_t {
|
||||
public:
|
||||
gcm_t() = default;
|
||||
gcm_t(gcm_t &&) noexcept = default;
|
||||
gcm_t &operator=(gcm_t &&) noexcept = default;
|
||||
|
||||
gcm_t(const crypto::aes_t &key, bool padding = true);
|
||||
|
||||
/**
|
||||
* length of cipher must be at least: round_to_pkcs7_padded(plaintext.size()) + crypto::cipher::tag_size
|
||||
*
|
||||
* return -1 on error
|
||||
* return bytes written on success
|
||||
*/
|
||||
int encrypt(const std::string_view &plaintext, std::uint8_t *tagged_cipher, aes_t *iv);
|
||||
|
||||
int decrypt(const std::string_view &cipher, std::vector<std::uint8_t> &plaintext, aes_t *iv);
|
||||
};
|
||||
|
||||
class cbc_t : public cipher_t {
|
||||
public:
|
||||
cbc_t() = default;
|
||||
cbc_t(cbc_t &&) noexcept = default;
|
||||
cbc_t &operator=(cbc_t &&) noexcept = default;
|
||||
|
||||
cbc_t(const crypto::aes_t &key, bool padding = true);
|
||||
|
||||
/**
|
||||
* length of cipher must be at least: round_to_pkcs7_padded(plaintext.size())
|
||||
*
|
||||
* return -1 on error
|
||||
* return bytes written on success
|
||||
*/
|
||||
int encrypt(const std::string_view &plaintext, std::uint8_t *cipher, aes_t *iv);
|
||||
};
|
||||
} // namespace cipher
|
||||
} // namespace crypto
|
||||
|
||||
#endif //SUNSHINE_CRYPTO_H
|
||||
235
src/httpcommon.cpp
Normal file
235
src/httpcommon.cpp
Normal file
@@ -0,0 +1,235 @@
|
||||
#define BOOST_BIND_GLOBAL_PLACEHOLDERS
|
||||
|
||||
#include "process.h"
|
||||
|
||||
#include <filesystem>
|
||||
|
||||
#include <boost/property_tree/json_parser.hpp>
|
||||
#include <boost/property_tree/ptree.hpp>
|
||||
#include <boost/property_tree/xml_parser.hpp>
|
||||
|
||||
#include <boost/asio/ssl/context.hpp>
|
||||
|
||||
#include <Simple-Web-Server/server_http.hpp>
|
||||
#include <Simple-Web-Server/server_https.hpp>
|
||||
#include <boost/asio/ssl/context_base.hpp>
|
||||
#include <curl/curl.h>
|
||||
|
||||
#include "config.h"
|
||||
#include "crypto.h"
|
||||
#include "httpcommon.h"
|
||||
#include "main.h"
|
||||
#include "network.h"
|
||||
#include "nvhttp.h"
|
||||
#include "platform/common.h"
|
||||
#include "rtsp.h"
|
||||
#include "utility.h"
|
||||
#include "uuid.h"
|
||||
|
||||
namespace http {
|
||||
using namespace std::literals;
|
||||
namespace fs = std::filesystem;
|
||||
namespace pt = boost::property_tree;
|
||||
|
||||
int reload_user_creds(const std::string &file);
|
||||
bool user_creds_exist(const std::string &file);
|
||||
|
||||
std::string unique_id;
|
||||
net::net_e origin_pin_allowed;
|
||||
net::net_e origin_web_ui_allowed;
|
||||
|
||||
int init() {
|
||||
bool clean_slate = config::sunshine.flags[config::flag::FRESH_STATE];
|
||||
origin_pin_allowed = net::from_enum_string(config::nvhttp.origin_pin_allowed);
|
||||
origin_web_ui_allowed = net::from_enum_string(config::nvhttp.origin_web_ui_allowed);
|
||||
|
||||
if(clean_slate) {
|
||||
unique_id = util::uuid_t::generate().string();
|
||||
auto dir = std::filesystem::temp_directory_path() / "Sunshine"sv;
|
||||
config::nvhttp.cert = (dir / ("cert-"s + unique_id)).string();
|
||||
config::nvhttp.pkey = (dir / ("pkey-"s + unique_id)).string();
|
||||
}
|
||||
|
||||
if(!fs::exists(config::nvhttp.pkey) || !fs::exists(config::nvhttp.cert)) {
|
||||
if(create_creds(config::nvhttp.pkey, config::nvhttp.cert)) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
if(user_creds_exist(config::sunshine.credentials_file)) {
|
||||
if(reload_user_creds(config::sunshine.credentials_file)) return -1;
|
||||
}
|
||||
else {
|
||||
BOOST_LOG(info) << "Open the Web UI to set your new username and password and getting started";
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int save_user_creds(const std::string &file, const std::string &username, const std::string &password, bool run_our_mouth) {
|
||||
pt::ptree outputTree;
|
||||
|
||||
if(fs::exists(file)) {
|
||||
try {
|
||||
pt::read_json(file, outputTree);
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(error) << "Couldn't read user credentials: "sv << e.what();
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
auto salt = crypto::rand_alphabet(16);
|
||||
outputTree.put("username", username);
|
||||
outputTree.put("salt", salt);
|
||||
outputTree.put("password", util::hex(crypto::hash(password + salt)).to_string());
|
||||
try {
|
||||
pt::write_json(file, outputTree);
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(error) << "generating user credentials: "sv << e.what();
|
||||
return -1;
|
||||
}
|
||||
|
||||
BOOST_LOG(info) << "New credentials have been created"sv;
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool user_creds_exist(const std::string &file) {
|
||||
if(!fs::exists(file)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
pt::ptree inputTree;
|
||||
try {
|
||||
pt::read_json(file, inputTree);
|
||||
return inputTree.find("username") != inputTree.not_found() &&
|
||||
inputTree.find("password") != inputTree.not_found() &&
|
||||
inputTree.find("salt") != inputTree.not_found();
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(error) << "validating user credentials: "sv << e.what();
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
int reload_user_creds(const std::string &file) {
|
||||
pt::ptree inputTree;
|
||||
try {
|
||||
pt::read_json(file, inputTree);
|
||||
config::sunshine.username = inputTree.get<std::string>("username");
|
||||
config::sunshine.password = inputTree.get<std::string>("password");
|
||||
config::sunshine.salt = inputTree.get<std::string>("salt");
|
||||
}
|
||||
catch(std::exception &e) {
|
||||
BOOST_LOG(error) << "loading user credentials: "sv << e.what();
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int create_creds(const std::string &pkey, const std::string &cert) {
|
||||
fs::path pkey_path = pkey;
|
||||
fs::path cert_path = cert;
|
||||
|
||||
auto creds = crypto::gen_creds("Sunshine Gamestream Host"sv, 2048);
|
||||
|
||||
auto pkey_dir = pkey_path;
|
||||
auto cert_dir = cert_path;
|
||||
pkey_dir.remove_filename();
|
||||
cert_dir.remove_filename();
|
||||
|
||||
std::error_code err_code {};
|
||||
fs::create_directories(pkey_dir, err_code);
|
||||
if(err_code) {
|
||||
BOOST_LOG(error) << "Couldn't create directory ["sv << pkey_dir << "] :"sv << err_code.message();
|
||||
return -1;
|
||||
}
|
||||
|
||||
fs::create_directories(cert_dir, err_code);
|
||||
if(err_code) {
|
||||
BOOST_LOG(error) << "Couldn't create directory ["sv << cert_dir << "] :"sv << err_code.message();
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(write_file(pkey.c_str(), creds.pkey)) {
|
||||
BOOST_LOG(error) << "Couldn't open ["sv << config::nvhttp.pkey << ']';
|
||||
return -1;
|
||||
}
|
||||
|
||||
if(write_file(cert.c_str(), creds.x509)) {
|
||||
BOOST_LOG(error) << "Couldn't open ["sv << config::nvhttp.cert << ']';
|
||||
return -1;
|
||||
}
|
||||
|
||||
fs::permissions(pkey_path,
|
||||
fs::perms::owner_read | fs::perms::owner_write,
|
||||
fs::perm_options::replace, err_code);
|
||||
|
||||
if(err_code) {
|
||||
BOOST_LOG(error) << "Couldn't change permissions of ["sv << config::nvhttp.pkey << "] :"sv << err_code.message();
|
||||
return -1;
|
||||
}
|
||||
|
||||
fs::permissions(cert_path,
|
||||
fs::perms::owner_read | fs::perms::group_read | fs::perms::others_read | fs::perms::owner_write,
|
||||
fs::perm_options::replace, err_code);
|
||||
|
||||
if(err_code) {
|
||||
BOOST_LOG(error) << "Couldn't change permissions of ["sv << config::nvhttp.cert << "] :"sv << err_code.message();
|
||||
return -1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool download_file(const std::string &url, const std::string &file) {
|
||||
CURL *curl = curl_easy_init();
|
||||
if(!curl) {
|
||||
BOOST_LOG(error) << "Couldn't create CURL instance";
|
||||
return false;
|
||||
}
|
||||
FILE *fp = fopen(file.c_str(), "wb");
|
||||
if(!fp) {
|
||||
BOOST_LOG(error) << "Couldn't open ["sv << file << ']';
|
||||
curl_easy_cleanup(curl);
|
||||
return false;
|
||||
}
|
||||
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, fwrite);
|
||||
curl_easy_setopt(curl, CURLOPT_WRITEDATA, fp);
|
||||
#ifdef _WIN32
|
||||
curl_easy_setopt(curl, CURLOPT_SSL_OPTIONS, CURLSSLOPT_NATIVE_CA);
|
||||
#endif
|
||||
CURLcode result = curl_easy_perform(curl);
|
||||
if(result != CURLE_OK) {
|
||||
BOOST_LOG(error) << "Couldn't download ["sv << url << ", code:" << result << ']';
|
||||
}
|
||||
curl_easy_cleanup(curl);
|
||||
fclose(fp);
|
||||
return result == CURLE_OK;
|
||||
}
|
||||
|
||||
std::string url_escape(const std::string &url) {
|
||||
CURL *curl = curl_easy_init();
|
||||
char *string = curl_easy_escape(curl, url.c_str(), url.length());
|
||||
std::string result(string);
|
||||
curl_free(string);
|
||||
curl_easy_cleanup(curl);
|
||||
return result;
|
||||
}
|
||||
|
||||
std::string url_get_host(const std::string &url) {
|
||||
CURLU *curlu = curl_url();
|
||||
curl_url_set(curlu, CURLUPART_URL, url.c_str(), url.length());
|
||||
char *host;
|
||||
if(curl_url_get(curlu, CURLUPART_HOST, &host, 0) != CURLUE_OK) {
|
||||
curl_url_cleanup(curlu);
|
||||
return "";
|
||||
}
|
||||
std::string result(host);
|
||||
curl_free(host);
|
||||
curl_url_cleanup(curlu);
|
||||
return result;
|
||||
}
|
||||
|
||||
} // namespace http
|
||||
23
src/httpcommon.h
Normal file
23
src/httpcommon.h
Normal file
@@ -0,0 +1,23 @@
|
||||
#include "network.h"
|
||||
#include "thread_safe.h"
|
||||
|
||||
namespace http {
|
||||
|
||||
int init();
|
||||
int create_creds(const std::string &pkey, const std::string &cert);
|
||||
int save_user_creds(
|
||||
const std::string &file,
|
||||
const std::string &username,
|
||||
const std::string &password,
|
||||
bool run_our_mouth = false);
|
||||
|
||||
int reload_user_creds(const std::string &file);
|
||||
bool download_file(const std::string &url, const std::string &file);
|
||||
std::string url_escape(const std::string &url);
|
||||
std::string url_get_host(const std::string &url);
|
||||
|
||||
extern std::string unique_id;
|
||||
extern net::net_e origin_pin_allowed;
|
||||
extern net::net_e origin_web_ui_allowed;
|
||||
|
||||
} // namespace http
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user