mirror of
https://gitlab.freedesktop.org/gstreamer/gstreamer-rs.git
synced 2024-06-10 10:19:25 +00:00
Compare commits
2252 commits
Author | SHA1 | Date | |
---|---|---|---|
d8b7356f3d | |||
bc96d439d0 | |||
e6ed67cbc5 | |||
8a3ea1192d | |||
b045708353 | |||
c545154472 | |||
b20ea25147 | |||
4ebec84f5e | |||
10aff0d66e | |||
9d3ec9da53 | |||
413a6baa8c | |||
9e2c6268cb | |||
4cda565a39 | |||
805cd6c591 | |||
a0e58ec359 | |||
9f151466b7 | |||
1b537c17c8 | |||
c3619b45aa | |||
f59029b57c | |||
b468280353 | |||
0ef80c4fe7 | |||
455996c60b | |||
83fe420466 | |||
5af4a262b8 | |||
b8dbfc66ca | |||
b15e0e1633 | |||
a430291725 | |||
0ee36ea4b5 | |||
a7a0bf226d | |||
19ea814a09 | |||
2a9d0d035f | |||
1e293e5cb8 | |||
fe1fe5b114 | |||
238768f525 | |||
2f99c4c560 | |||
9fca740851 | |||
9490735655 | |||
81b20b9329 | |||
ba4bd5c631 | |||
4b79dddc14 | |||
01b32ce143 | |||
873aeff133 | |||
87cc9fe6e4 | |||
bac0828260 | |||
200d8b1c0c | |||
dc04a53207 | |||
0bb334e14c | |||
46226106b4 | |||
b7b5352353 | |||
88a6977777 | |||
cb560e59a3 | |||
241338f43c | |||
5c8a989029 | |||
57050f66c6 | |||
63654c67da | |||
70a15e8dbe | |||
953e3747f2 | |||
e117010bc0 | |||
694d1fd39b | |||
db03c8edd1 | |||
ea25c9262b | |||
0d872ae6f8 | |||
7433ea79c9 | |||
46be4a0b1e | |||
43c82da25a | |||
da1f53f4c7 | |||
0524435190 | |||
917c458a86 | |||
5eaa0ca46d | |||
5400979e28 | |||
c43c08804a | |||
a7ebe45ff3 | |||
2b53c55ee6 | |||
04c840a1d9 | |||
6111663e26 | |||
7cea7ba6f1 | |||
e2e38d9494 | |||
676e41064b | |||
4524af89ee | |||
e5830c2ea9 | |||
d7fe0709a5 | |||
983e8b3308 | |||
6aff1773bd | |||
ebc06257b5 | |||
86d02890ca | |||
29c82cd54d | |||
c05563d22e | |||
9e80250b49 | |||
0b027c853b | |||
3246f4fb5b | |||
82f6accc31 | |||
ffad1188b9 | |||
353e3d1611 | |||
b5cb4ae831 | |||
14576fdf26 | |||
70045a36fb | |||
28451435a4 | |||
fcc7ab6b88 | |||
18a02f6d34 | |||
c5111ddcc2 | |||
d5917be045 | |||
4e6ddf6663 | |||
ab0a29b765 | |||
8062a8748f | |||
28fe70f479 | |||
c8b98dde8c | |||
b47aba1837 | |||
28931e2f09 | |||
1649e268c5 | |||
d575cd1f95 | |||
ac275fe10e | |||
b10f395c2c | |||
22796cee0c | |||
7f9dd58718 | |||
798ee29b98 | |||
040a194700 | |||
b69fee9abe | |||
a87a844604 | |||
5d19b26974 | |||
2613c57739 | |||
f82b9cc197 | |||
12c9ada9e0 | |||
a784ea2d0b | |||
90e6108ed7 | |||
77b2800caf | |||
c4636fc0cd | |||
6a1441203d | |||
811e564ff9 | |||
0eacca7102 | |||
aab36d9745 | |||
d96dbef08c | |||
7ec3c8713a | |||
36792404a9 | |||
ebc18ea0b8 | |||
9cf270611f | |||
79846af1e6 | |||
90bb9182c2 | |||
a3b3017d75 | |||
e16832a2a8 | |||
a165f1aa96 | |||
e192eac554 | |||
a31940a916 | |||
230c906626 | |||
fde0c061c2 | |||
a51b5bdfd3 | |||
8230a7ccba | |||
2dff5b8ae2 | |||
14ffbfbe83 | |||
771dbb4ed5 | |||
aaea288abf | |||
cfc1aed3c3 | |||
ba91648bd5 | |||
95c00c4a5c | |||
193bcbf055 | |||
ce5dca918d | |||
e6ce8e4f71 | |||
a83680ffeb | |||
e13e9a7a7f | |||
446bb7ec3e | |||
047f4a3f75 | |||
c376bfac4d | |||
6cea21617c | |||
c5357064cf | |||
b5c7c402b9 | |||
e35782a3a4 | |||
92064a0c41 | |||
c66fc90566 | |||
0b4c602c6f | |||
3b3c3baee5 | |||
5fe76aa785 | |||
5f01bcf8f4 | |||
60e8c44abb | |||
08fa853c7e | |||
52c764b986 | |||
b5f4246445 | |||
ea002e2e11 | |||
dae3d30fae | |||
52bd716a80 | |||
26611a66bd | |||
003ebbdf1c | |||
0533160d94 | |||
dff595193d | |||
86cf7a7d81 | |||
38a9b7a242 | |||
2fb93e1c12 | |||
8b9862052b | |||
5c88bd0b5f | |||
96de51a7b7 | |||
cd30854c2b | |||
33e3e25b49 | |||
248b6d2f31 | |||
2139f368e9 | |||
e72a3bfc8d | |||
34fee6b691 | |||
fddeacc358 | |||
5d8652e872 | |||
3daab0112d | |||
0ec7b2608c | |||
84ca72a833 | |||
ceed45cfd7 | |||
a29d7c0e19 | |||
f055c113ac | |||
9bcf48050b | |||
a649e7dead | |||
f255b82b55 | |||
7f234c88ac | |||
59420b1590 | |||
a05e3fed14 | |||
ca8309a5dd | |||
897c7dfd39 | |||
c82ba6ffe0 | |||
e56061c25e | |||
4c3de8b80e | |||
db2028c4c5 | |||
9ab8dee59c | |||
6371b82c48 | |||
9617731206 | |||
53f1ab938e | |||
6cb371d3e5 | |||
1946973c25 | |||
c4413f1db7 | |||
3ac254d34c | |||
30d8a7893b | |||
4c6bb9eefa | |||
39b472ce8b | |||
9bc2a3dbf5 | |||
9419730ea4 | |||
ec3a3610d3 | |||
6403d3c0ee | |||
b8fff2d6fc | |||
7cabb4f22c | |||
bf568714b6 | |||
73180e530b | |||
5fdd56747f | |||
0f3d2d6d09 | |||
ce98a4755e | |||
1ec4560b62 | |||
5741b6a52e | |||
5ee8ee8545 | |||
754b6487d4 | |||
41519511aa | |||
3051401aa4 | |||
4b3d9f586e | |||
f471501df5 | |||
eb6d3a6c6c | |||
86d470e82c | |||
e84af103a1 | |||
339bec6aef | |||
b901322c46 | |||
4a015d94af | |||
fc4a0d29c6 | |||
130dc49b22 | |||
bd4122e334 | |||
bd9b1d6e38 | |||
a26fcaf0ad | |||
16acea71d4 | |||
d5ba6c1336 | |||
4d19d7b0b6 | |||
414019af21 | |||
a41dc25eba | |||
a04ed127af | |||
5312131069 | |||
62f58620b7 | |||
4c8d16d09e | |||
547cfb44e2 | |||
3f16233a01 | |||
27a0bc5af0 | |||
abdd4df415 | |||
b158ca83f9 | |||
a2387d1f84 | |||
8df470b85c | |||
b42b01ba86 | |||
3a5f69b64c | |||
a6470f13c9 | |||
7bde0285ff | |||
d7494bf1db | |||
fa3ce573d7 | |||
44602238d9 | |||
c0696d872d | |||
635b31614c | |||
72a5b1bdb9 | |||
3e5316c869 | |||
f8effdda61 | |||
6eb01dc916 | |||
61d559521b | |||
e8387bf4cf | |||
4957921cfa | |||
2a00236a1f | |||
8e3994f641 | |||
c071d8cba7 | |||
a60cb26c27 | |||
51075c71f6 | |||
b80a723de8 | |||
b93113c4c6 | |||
3988df8463 | |||
7167fb78ce | |||
3228c36ef7 | |||
54979d859d | |||
ba202a5f87 | |||
0306dd6b53 | |||
5e32d2efbf | |||
c7662ce15a | |||
4b87796c92 | |||
83a562e227 | |||
f4486f5d61 | |||
4976e4ac4b | |||
13835a9f03 | |||
db16dca822 | |||
277cb517cd | |||
76b8281709 | |||
a7be931474 | |||
faf03c73ca | |||
9c6e48119c | |||
7a7b2c7b21 | |||
af100377ed | |||
e95e62d871 | |||
03417068dc | |||
63852b3b19 | |||
783b95884d | |||
03b614372d | |||
ab153de47f | |||
bac88e88b8 | |||
00e42854fa | |||
e4d7748241 | |||
7a79fc89d6 | |||
cdd548acba | |||
1e2e57836f | |||
c0f9551fc4 | |||
16e1f92489 | |||
0d7555bed0 | |||
d96d164c8d | |||
f8bb992aaf | |||
a53069208b | |||
2abf75122d | |||
8fba09e1ed | |||
75f4c66f14 | |||
92327be3b1 | |||
63c79d377f | |||
29ffd10b35 | |||
8b6c99a84e | |||
88c21505d2 | |||
a4247d5199 | |||
214f61abc5 | |||
19cf78d85f | |||
2fe62d3107 | |||
387c51f860 | |||
89ab9d09c8 | |||
f9fa7f55fc | |||
b156ba2c59 | |||
7cf66dbc61 | |||
9eacba1569 | |||
c2cda2c067 | |||
4ec5f99142 | |||
7ac1a2b753 | |||
8cf099f0ab | |||
6a703508bc | |||
2740c38cdd | |||
617652dce0 | |||
80abcf6ca3 | |||
67b8c29274 | |||
bc979b7ce9 | |||
354f072ff3 | |||
f806967a2e | |||
c6015043f5 | |||
a913a895c0 | |||
90aad36c34 | |||
42fe22301b | |||
296a12d1ea | |||
96c7eb9563 | |||
eff6b4c952 | |||
12bed29738 | |||
af57f6f17b | |||
1bc197db79 | |||
a66fc95d2a | |||
7f7c7a4165 | |||
7c67375d99 | |||
b59680f437 | |||
e2c1dad0d5 | |||
49bf604276 | |||
750206067c | |||
1df5b0d028 | |||
827cb31bac | |||
d72884685a | |||
df67a2d860 | |||
b89b135c93 | |||
2becc79dfb | |||
5f8aaed96b | |||
093bc9b9cc | |||
a5fa1dab79 | |||
f75aa5f1f8 | |||
130805fc50 | |||
5f16254059 | |||
2290262c2e | |||
fadca54d51 | |||
53173eb46c | |||
71b77c513d | |||
3ffba2453d | |||
041a1f2a8e | |||
b6919d7c99 | |||
a13dcad033 | |||
6ab34e1656 | |||
4d7809424f | |||
2f75087830 | |||
adea2428af | |||
4f8862e15b | |||
074a2b1578 | |||
4384934b32 | |||
9453d63631 | |||
7cb5473ba6 | |||
13f0483a44 | |||
ccf3b57a8b | |||
33d4969493 | |||
13f4085456 | |||
9d61e39ed2 | |||
3d82f9cb65 | |||
3699da7314 | |||
5c331e7e77 | |||
40578ae7e9 | |||
5a24f2d9db | |||
0585476687 | |||
b1577af7cf | |||
5dd1decd6a | |||
86eb6c2bd8 | |||
145664ec0d | |||
e026d922e4 | |||
5c156737a4 | |||
e108a908dc | |||
98a8b75646 | |||
68e0ae9a9c | |||
6fc969932b | |||
e75391139c | |||
3cb2454fd3 | |||
341ac517d1 | |||
2dbd99bd18 | |||
03d046daba | |||
a11e5cfd75 | |||
4d787df819 | |||
ca2fd54230 | |||
fdbef66c24 | |||
ce0ddc7be0 | |||
8b0398aa32 | |||
a88b06f73e | |||
a252de69b1 | |||
e584fdb17b | |||
ca9d822042 | |||
98db1b546e | |||
9647ce8895 | |||
b34718697c | |||
8eae37c525 | |||
68db910bc8 | |||
6cb19c1f18 | |||
85e427345e | |||
0ab72911ee | |||
954d88154c | |||
81a5f25f81 | |||
a7131fc051 | |||
bc81e5a6a2 | |||
6d3c9d931b | |||
d2ef4023f7 | |||
7344e4bab2 | |||
273f084c06 | |||
368e1cacb6 | |||
fe038b7a14 | |||
43b5cffc85 | |||
ce0fab9cf9 | |||
b243ba1577 | |||
01d3cef73e | |||
c4a968a403 | |||
0bd6e07346 | |||
4b112a9bb8 | |||
37785b222a | |||
32a02b51e1 | |||
0b922b0e89 | |||
0056c62cec | |||
58475f4a25 | |||
8b6bf18bb0 | |||
b6e253acbf | |||
de01403658 | |||
b3980b389d | |||
862f4d014c | |||
cd8e8cea5a | |||
12b4a9a03d | |||
70b42a4846 | |||
f0bb4e5bef | |||
6ffb1db482 | |||
2ac85cd8de | |||
60ad9b5038 | |||
6f06a26327 | |||
7d9c12f8d1 | |||
13140d8b8a | |||
d284fcdca0 | |||
b9f36c1b42 | |||
f6f0465655 | |||
9155d6eb7b | |||
c2dd69dfac | |||
8692caa0a6 | |||
172e22c4b3 | |||
91eec7a33f | |||
3ddbdbd383 | |||
a021aaa3ce | |||
77866a52df | |||
00281e98f0 | |||
c4dc549a5e | |||
478606e44c | |||
c62530e181 | |||
f1db72dab2 | |||
51611ba779 | |||
a7f670df7a | |||
5f05f7ec05 | |||
01bbb07744 | |||
5cea810820 | |||
f24b38470b | |||
7efc22ebc9 | |||
ecfb10649a | |||
4b99aae824 | |||
e82a1d0eb8 | |||
8585158880 | |||
848ed9c1e5 | |||
f56febfd35 | |||
0f1c937465 | |||
73a9a5b3a3 | |||
72d1f72edc | |||
c33fb38ca1 | |||
2c2a6c3871 | |||
d6bf3e1c3b | |||
36f89e19fc | |||
dc5e408c2d | |||
eadb3c6db2 | |||
0f859b9029 | |||
8ef4420257 | |||
d5e24b9fbd | |||
38dd1f462b | |||
257a049673 | |||
24a16a43b6 | |||
d6e9cd33c0 | |||
56498aa856 | |||
a6abe15678 | |||
82cf6e7842 | |||
c2e7abd128 | |||
094d74e391 | |||
0a6ed3c717 | |||
dd284a80ea | |||
a68c37e4eb | |||
f8d17555c5 | |||
8aa3e4417c | |||
2a11b9d2cf | |||
64abf69987 | |||
3e2eb6e652 | |||
3a340d0a89 | |||
003554876c | |||
ea136515cf | |||
6152dd7e3b | |||
aa23ddbbdb | |||
37bfb78fdc | |||
f235dc987d | |||
0fccb73eb6 | |||
608cae3703 | |||
f07727ee6d | |||
1b022a6b7c | |||
da2eb50dd3 | |||
84720eee66 | |||
89326c7e7c | |||
277d63601c | |||
ba1d8c5ce6 | |||
7b279b9d6c | |||
8d30bcbf4b | |||
5e852fa0ff | |||
315704fe67 | |||
1e793f3b65 | |||
32a608b76b | |||
b94bfe1613 | |||
a028e807df | |||
7490846309 | |||
567ce0a3bf | |||
45c145ad50 | |||
dce54ee237 | |||
0e51e70c5a | |||
6ff5f89832 | |||
0ed3d95c60 | |||
81bcf5c8dd | |||
30838b6549 | |||
cbd3035a5b | |||
8ed150c853 | |||
ae688406f8 | |||
06a0dbacba | |||
036a020b62 | |||
882513d33a | |||
aeca82c095 | |||
b4d3bf297e | |||
e6e5d25e48 | |||
e9ecdcdd72 | |||
3d0f563111 | |||
141a71d69e | |||
dfc6344f59 | |||
146b4fc08e | |||
cdc7aa4846 | |||
dcfea9c35a | |||
d6cc452cf3 | |||
f166e80a79 | |||
e70536f6b2 | |||
f7b848b3cd | |||
cd49019941 | |||
7ecc11564d | |||
7b3b4f56ee | |||
bed4b8644b | |||
6c7196732e | |||
8344cbc829 | |||
3764bf75f8 | |||
d7a292ec00 | |||
7113458128 | |||
2e2b572215 | |||
8c22ba25f8 | |||
857f3740e2 | |||
c32607a010 | |||
9deb507286 | |||
d867077c3a | |||
c3a950f207 | |||
e3ad1918dc | |||
e579eb5d84 | |||
0fa8d0d62f | |||
2a935320e7 | |||
ef5d5f50e4 | |||
355ebc243f | |||
b58d518aa3 | |||
de04e3f827 | |||
0e7f5a19df | |||
2be477b753 | |||
3a61276cdd | |||
750422d8eb | |||
1935e04dd8 | |||
3ab7bc7648 | |||
a54c234fc6 | |||
e8859951d3 | |||
7012fa638e | |||
40a5a9edaa | |||
6a5d16d817 | |||
73ac98218f | |||
407f03af0c | |||
9d1cc19162 | |||
b89b98b6e9 | |||
b5fcbc9657 | |||
0bc16c65f0 | |||
ae4dd88f3d | |||
0b013001b7 | |||
92d8df4ac3 | |||
e87de384e1 | |||
3da927a856 | |||
cbfc19ccfc | |||
1ee4845544 | |||
1fb531bba2 | |||
e763e4fdde | |||
d388cbda13 | |||
b482ef4af5 | |||
fa5491e6b3 | |||
5a0152b469 | |||
1264eb10ac | |||
56b9b66027 | |||
774fafd987 | |||
77fd187986 | |||
88791294a5 | |||
9ba448427d | |||
a6ef6dd1ec | |||
c69ff617f5 | |||
f1c8869447 | |||
58e51417c0 | |||
d3543efec2 | |||
32c432655f | |||
220da7f1e0 | |||
e2b38774cf | |||
7079768dd7 | |||
19949fa2c9 | |||
efb85f416e | |||
01e24d2018 | |||
951f000622 | |||
c6cbf86012 | |||
2cad43cd0f | |||
2bffc4e5ec | |||
708ad14422 | |||
90b8ee2022 | |||
87ea535bc1 | |||
67ecf0823d | |||
92215f4a63 | |||
031133c179 | |||
7331dbc440 | |||
47394a437f | |||
bf7c770457 | |||
f439a0ab85 | |||
a1165a7456 | |||
2a4e1069a1 | |||
0e5a4f05f8 | |||
4ebdfb8602 | |||
bfbcdb5925 | |||
130cc9d63b | |||
7423b1dea6 | |||
7ad75d4b1f | |||
9c282ec7c3 | |||
5495acf77d | |||
67e3183922 | |||
19f7aaf413 | |||
9a9ce252a4 | |||
d80227e566 | |||
f9690817ad | |||
935609ad6c | |||
cc59067811 | |||
77252708a4 | |||
Rafael Caricio | 20a493ee49 | ||
8d3ada5d89 | |||
49faa03c98 | |||
384783b242 | |||
f5a902ba49 | |||
7721030c15 | |||
4bdbe0022b | |||
6f51444c1f | |||
b230d8aab6 | |||
39bb9abc3c | |||
6d4ad712c7 | |||
734afa998b | |||
69f05a1577 | |||
b9e4b91a3d | |||
7d10f9f4c8 | |||
8aa1634902 | |||
5cdf98b76d | |||
344026a178 | |||
08cac646be | |||
b89a285178 | |||
dcf6d16496 | |||
08551bb1bc | |||
f6336b1be3 | |||
8e2c621a9f | |||
a4d901ee63 | |||
7bd4d770ff | |||
f17781e188 | |||
25c53c4276 | |||
274a5bd020 | |||
04b6710f84 | |||
8a7813d04c | |||
dac3cf67f4 | |||
3cd902513d | |||
dcd53bd16e | |||
6300c39998 | |||
d8a57e6d4a | |||
238f9baee5 | |||
c64b6ac68b | |||
ade0aad6b0 | |||
bff1354b74 | |||
e2fe1d6371 | |||
57d8d46ab6 | |||
eb4d997f0a | |||
79377337a4 | |||
Rafael Caricio | 69ea30dbda | ||
4f2d974aa6 | |||
21e342c947 | |||
ccb9e3a7f8 | |||
e294bb9713 | |||
a2150b8368 | |||
92c6eefadd | |||
99e344af4d | |||
86549dc06e | |||
8595b67218 | |||
183a399d1d | |||
2b4fcb131e | |||
1411c9e35e | |||
960befb2f5 | |||
0d229b4c69 | |||
432cb36611 | |||
8e6afe147e | |||
a1dbc7a0ee | |||
e0d9f886e3 | |||
55ef309b16 | |||
b8e1c25c85 | |||
253ba03a7a | |||
a75d881272 | |||
64829ac47a | |||
1b10c5324d | |||
59efe09fe5 | |||
8a6de3ca4f | |||
f294339240 | |||
7fa76eac1b | |||
dec112d0a8 | |||
77029602ae | |||
eb798fb83b | |||
97e8913cb9 | |||
f014505e1c | |||
069c6a2c01 | |||
879d6a4548 | |||
099ffdc4b4 | |||
8d982d8b51 | |||
efe13948cc | |||
5163e212ed | |||
a7618aecc2 | |||
e5a785c9a5 | |||
d00b289621 | |||
35c6af424d | |||
201eed8400 | |||
6c594b4c38 | |||
b6c8938f1a | |||
5227af5935 | |||
8ca9692924 | |||
29bc304c88 | |||
be356dbf08 | |||
9a9b7bc5ce | |||
f7248408cb | |||
Rafael Caricio | 6163914605 | ||
44d899a0eb | |||
8a89be67f7 | |||
870ad5e4d7 | |||
a207765e36 | |||
ebab45d7b0 | |||
e229288ecd | |||
a341b4972f | |||
1e24e2d133 | |||
0c9158f404 | |||
64c09353b7 | |||
e9d95eda59 | |||
70d95dca4c | |||
f65d410fc5 | |||
ffa6d67868 | |||
76f01516e3 | |||
4c7581ab03 | |||
0e72d934e1 | |||
8d9abb935b | |||
be2de21602 | |||
fbad0fd0bc | |||
0921028507 | |||
212099c55f | |||
fad06dd47d | |||
c5d19e7c80 | |||
c0fd8a8aad | |||
2dcd5cf9eb | |||
4e221c1b48 | |||
b06a692dea | |||
583ef0395d | |||
2cb7fc8122 | |||
446f09fec6 | |||
8a50bbcaa3 | |||
9018d1e3ac | |||
7a9537c0fd | |||
751a5b4788 | |||
138bc71051 | |||
32fbb04fa4 | |||
95b541cf82 | |||
2d17d46c50 | |||
414222aa99 | |||
dccd0f9e01 | |||
8cb13ac752 | |||
b5920ee729 | |||
16655828ab | |||
afe594089b | |||
4201727769 | |||
cf1a64ecf8 | |||
99fb9423d2 | |||
8720c9c386 | |||
2090452e3d | |||
538eb95e51 | |||
a787197254 | |||
865df62f8d | |||
fe319af598 | |||
fbceaab941 | |||
0e17878bed | |||
f2a2e6702c | |||
17005f220b | |||
4dade30f0d | |||
f30fcbcf9a | |||
65629426cb | |||
07562c592f | |||
90aaa32957 | |||
fb724aedeb | |||
8e9e135128 | |||
542bb130f4 | |||
f333991a5a | |||
c61d913ec6 | |||
9e77ab3212 | |||
14f485d0b1 | |||
20ac231146 | |||
c9d07219c8 | |||
7754b8dd70 | |||
310ff531fb | |||
b28664ccbd | |||
6b47d646c0 | |||
5e3ace0ecc | |||
2c373f5d24 | |||
50f3eee8eb | |||
01b8279bc1 | |||
35aff36c61 | |||
f35baffb52 | |||
c254c5fd1a | |||
cb1f08a425 | |||
aaf601831e | |||
d4430ecc6a | |||
8f1c94f45d | |||
5ed8416717 | |||
e76ae3f434 | |||
0055399471 | |||
7283fe69b2 | |||
9d873d6755 | |||
7075a4b3ea | |||
4d7bd9bfcb | |||
d53dfcf94e | |||
5e27da5a1c | |||
842dad3fa5 | |||
5d2f36fac3 | |||
268e351dde | |||
8c69388eff | |||
5349822962 | |||
f68efd0cab | |||
351f070c8e | |||
98cc289bcc | |||
e263bd8945 | |||
d6095900e9 | |||
e82d388410 | |||
a51235048d | |||
f3bba21faa | |||
56a46fc6fd | |||
081d42ac86 | |||
b62d22852e | |||
45e16f7753 | |||
c1d3ed5eac | |||
cd49659fae | |||
0aa59be45c | |||
45856ac35b | |||
1b22be2e15 | |||
94b5a13f4b | |||
fec2718aef | |||
a34bc85ca1 | |||
d83faa044e | |||
a16c3888e5 | |||
80b0b378fc | |||
ba955a22bd | |||
b50a7993de | |||
4a28a188d9 | |||
90ac00a6f6 | |||
d3eb9db8ce | |||
0f89be2751 | |||
2b7a63fc8e | |||
e04bb8b524 | |||
7fa0b4e84d | |||
0e0d9f532b | |||
7420a72831 | |||
40dcdcbf5c | |||
61a3f5296c | |||
f095b6f0ba | |||
af89799e06 | |||
062ceaa00a | |||
3b18f10342 | |||
8d001d8b16 | |||
7d2ab1450e | |||
c82aefd2a4 | |||
680d4af732 | |||
4651c9db4d | |||
5052a93ee3 | |||
a90aed0d41 | |||
60a192d525 | |||
9a8ca0cadd | |||
627f4d798e | |||
bcf4f63ac6 | |||
d92e49a797 | |||
b36b63c60a | |||
8dd2af610f | |||
3532c9adb2 | |||
d5e4705605 | |||
1274a59472 | |||
23e4d1efa8 | |||
72027a6d60 | |||
89837f0757 | |||
8d03a0d032 | |||
715f7cd6c2 | |||
eed648831d | |||
4d002786ec | |||
73ab9054c4 | |||
8908d4bc98 | |||
5c14350079 | |||
e31e2ad9bd | |||
b7afdd4dd0 | |||
44b2eba600 | |||
ae234bb01e | |||
2c7c46e153 | |||
69599e90d4 | |||
07be60a22f | |||
208e1ef7a4 | |||
21dbe86c8e | |||
c251ca5bc3 | |||
fbe0728da5 | |||
215cdfd548 | |||
2943c310cd | |||
cd2e3aa7f1 | |||
e6dc3f5b3e | |||
609a744d6c | |||
3fbe37a2ec | |||
faa1311fcd | |||
fe7850720c | |||
3080c37897 | |||
1a11fda103 | |||
12f1483d43 | |||
54440e47df | |||
2f55ffe419 | |||
39e90b603b | |||
6f2cd17232 | |||
a3b95d60ae | |||
4b3362039e | |||
3234a18677 | |||
389d406e73 | |||
163130cad7 | |||
a8dc789ef8 | |||
44d39f8824 | |||
240da14042 | |||
beb3ad2010 | |||
b69043d462 | |||
1f7a0f29d9 | |||
0f22eb7633 | |||
319f1c68a5 | |||
ab04715dc7 | |||
bf660cf22a | |||
fc1185b951 | |||
972c4e75ac | |||
ad1c2d83a4 | |||
40be06d5f6 | |||
bdac7cb99c | |||
b451f692cf | |||
586fc75ffc | |||
462e19dee9 | |||
2426d7ce8c | |||
7ad71949d7 | |||
d68c359db5 | |||
a1fed48e8c | |||
d1f978f9f1 | |||
98e12e835a | |||
033dac2270 | |||
0173b73170 | |||
ddd4e3a79a | |||
8ae97b8c64 | |||
47ae1c8658 | |||
899703fb8e | |||
2176e17add | |||
70e95d4576 | |||
4956346b23 | |||
74557132e1 | |||
7d1e7478ac | |||
23565c8723 | |||
deced8b3b8 | |||
cb5edb1e5b | |||
fb49d26b11 | |||
0dc1ee439f | |||
0403fd9aa4 | |||
7ccab6cc8b | |||
60cd874db7 | |||
52e09fd83d | |||
a734f38ee6 | |||
26f2ed30ae | |||
15763a941b | |||
06cf5de45f | |||
13eb483444 | |||
03ad59f006 | |||
bb94064e11 | |||
1ebfaee644 | |||
eecce101da | |||
b66a00ed07 | |||
ab0eb8a1a4 | |||
5e2249d368 | |||
65c833bc80 | |||
68d492bb33 | |||
fb56bbda6e | |||
ee9157a239 | |||
8f5e373882 | |||
8fa1076dfa | |||
971f35754c | |||
8b09a84bf1 | |||
22a86a66b4 | |||
5a9b103f93 | |||
7444975421 | |||
ce1a25a128 | |||
2bb500db0b | |||
5151b76729 | |||
711a4bb76e | |||
605c633579 | |||
847e800269 | |||
cf50295b73 | |||
07f2c46e0e | |||
9eb479cf78 | |||
e35d26fcd3 | |||
d245056e7b | |||
a3875eab94 | |||
45708832dd | |||
843d3749b7 | |||
1b46bca2db | |||
6d63180bfc | |||
ac9b1e881c | |||
27dd305cb4 | |||
0f6d1cbe31 | |||
1dae136ae3 | |||
bbcd221e8d | |||
fc452036d2 | |||
56dfe0fe59 | |||
74e70eb580 | |||
1e18c59d2a | |||
e448a762ca | |||
c30eff771a | |||
4215c6fab5 | |||
97f59d34fa | |||
9bf5f641ff | |||
0526471689 | |||
1541fbf32a | |||
d1cc8d1298 | |||
d9a88b7cb7 | |||
5a75f3bf8e | |||
ee1a5e8395 | |||
8f3ed6d289 | |||
668a9bbf58 | |||
33f75191d5 | |||
f6dc14b946 | |||
7cc1523e7d | |||
7d968d78bb | |||
badec2252c | |||
8e6c08f8fd | |||
c965217e54 | |||
15fbb17a09 | |||
e0e17b8b25 | |||
1d78ac9323 | |||
abd2d4dfd7 | |||
31b78f483e | |||
14dadf4c62 | |||
f31aa2efee | |||
27613a8901 | |||
1ffcea4da7 | |||
86b07ce5b9 | |||
668b2b3fdf | |||
9263c9f42e | |||
21d38e41d6 | |||
3b28bccf03 | |||
ddf398e3da | |||
2471150e72 | |||
63f6d6a7d3 | |||
7c1ed9ff4d | |||
f3647adfbc | |||
8faaf8a629 | |||
e61b83f592 | |||
04a87085f6 | |||
279f1a404b | |||
e68790d579 | |||
e74f54a64f | |||
38078c71f8 | |||
41b1263061 | |||
3288992f95 | |||
0d391d5031 | |||
6527ffbce0 | |||
a2e22416fe | |||
df36a7c57a | |||
1eba9b64e5 | |||
0b544ffbd3 | |||
dd9e12822f | |||
eda7cebb42 | |||
c6c499aed2 | |||
d788a10856 | |||
eb1c62612e | |||
3cf2104726 | |||
f27a2bddd6 | |||
1480c65c32 | |||
e3a65a3a88 | |||
213020165a | |||
0d009bca31 | |||
0444660d1f | |||
220c500799 | |||
a3015ab507 | |||
9901f0c6a2 | |||
30bb699cbc | |||
82b7239379 | |||
3780dc7667 | |||
c942a1d563 | |||
b4282c9444 | |||
cdfe4b3d67 | |||
082d109f27 | |||
d785bf8c3a | |||
c00a97f5ec | |||
0a5100ca9b | |||
b2746c6c29 | |||
b43d785d83 | |||
e657e676d5 | |||
42feed5441 | |||
c2e43f86a5 | |||
502a0080f0 | |||
18c7c6a3a6 | |||
fa72d53437 | |||
92f9f3be39 | |||
b4a3738b82 | |||
8cc018c164 | |||
984ab87c79 | |||
06916b23c7 | |||
9e1fb4570f | |||
e95a75fdab | |||
7f0ca0011c | |||
12fec484c7 | |||
cc76442601 | |||
4579fb006b | |||
fc49a0cfd8 | |||
18e45dd991 | |||
a7be9ea9d9 | |||
febd0ee144 | |||
e6b78d1489 | |||
f315d3a052 | |||
4bbd201377 | |||
f83b385487 | |||
deb22b264e | |||
986a136492 | |||
051df59cd1 | |||
675fe349a7 | |||
67af5ac569 | |||
cdddf48f08 | |||
6e3878c8f5 | |||
8b77bd4068 | |||
d01ed466d0 | |||
aab0a45cfc | |||
87e7a84a27 | |||
3d39faac28 | |||
7037f6d836 | |||
e989899467 | |||
3670076f07 | |||
219616ced0 | |||
dc7e705f44 | |||
f6cf6c8863 | |||
7e71c74505 | |||
b9cd38b796 | |||
6d88e18772 | |||
8fb37c5134 | |||
37c3d2d8e0 | |||
d7fe610767 | |||
6e6f5108a9 | |||
1c7b4516f9 | |||
71e58bfec2 | |||
72b373c454 | |||
650787ee7c | |||
da61c19b69 | |||
b13f2060dc | |||
757530bd8a | |||
c4cbcecc7c | |||
e8d25438e7 | |||
8f51cbc899 | |||
eb6939f8b7 | |||
baccf52e43 | |||
931e35cdf9 | |||
f5f5fae815 | |||
d0902f19a8 | |||
513fb4cdc3 | |||
e5977c2d20 | |||
4e235b0492 | |||
c2a2549b66 | |||
c05d2e24af | |||
14edcf9aaf | |||
bcddcc2c88 | |||
226419003d | |||
669bf52291 | |||
0e435578b7 | |||
64ec364259 | |||
13165938ab | |||
0fee9489a5 | |||
e2a9f83f5b | |||
c66287843f | |||
a1c4a58180 | |||
4b8f85c4c6 | |||
1856d47a06 | |||
672b7abe1d | |||
60c3272224 | |||
538ce91987 | |||
7c5303c492 | |||
80ee1edec5 | |||
9f1b8f6165 | |||
514c18ad5a | |||
4b2f0a936b | |||
6b4f6ca45c | |||
acb2a9e522 | |||
ce09e333b3 | |||
4951fb8630 | |||
2e57783c9f | |||
a3a7f85f7f | |||
a208919c88 | |||
212d4ef008 | |||
430abb1e9d | |||
c68b601c74 | |||
59b5ad9c8a | |||
db30c121a0 | |||
adfb6e35a4 | |||
12887f1931 | |||
782d84bede | |||
ece84597fd | |||
068b078edf | |||
6db5ab5631 | |||
323c67dee4 | |||
bdccaeee0d | |||
43bfd1ae85 | |||
c5c9fd81e4 | |||
2ded2837ba | |||
abfa75b334 | |||
f504493134 | |||
cb475e0965 | |||
3ad9d3340b | |||
f84fc0dc59 | |||
e146672851 | |||
26a6fba6e2 | |||
0564b2f9fb | |||
e2264a3f3f | |||
38dfeb9b00 | |||
a0d82ec741 | |||
b1afc4804a | |||
044a72d9f6 | |||
6ad0e9e0a0 | |||
a091ea201c | |||
386bd05817 | |||
93c794b338 | |||
b73b4428ba | |||
eb5df0ae26 | |||
45f8035ccb | |||
7380f18ae7 | |||
bea15b8ebd | |||
ffa9697edd | |||
6275b4ba70 | |||
c30ea09e4c | |||
e008923a30 | |||
edac1a1d3a | |||
c9b88afcb4 | |||
393d8959bd | |||
a06cf66e6d | |||
ac75382d8c | |||
35c2086e2a | |||
c0904af4a7 | |||
55c10235cf | |||
6770a57f24 | |||
ee07ba4bc4 | |||
1cec94af1a | |||
e784f56ea5 | |||
0adc8c44df | |||
7bb9a0df70 | |||
9123143b57 | |||
611d65f15d | |||
7c0fa02ffb | |||
dfeb777513 | |||
346573875b | |||
a94d84e780 | |||
156dffd00e | |||
6a5f17ae06 | |||
c723ecac5d | |||
64d1598336 | |||
b18fd9cc6d | |||
0a71e3c8da | |||
62f963d61d | |||
aaa9eddc85 | |||
e31ce9d07b | |||
90bb458e64 | |||
8dda8def6e | |||
03e8e6d22f | |||
532af96ef9 | |||
dc763501ae | |||
fde3d2088d | |||
b8d50a342f | |||
110bfce7e6 | |||
a6d5f6f5a5 | |||
738a07b279 | |||
a85b30eacb | |||
a828d3cc12 | |||
8226c94110 | |||
510e4cd2b1 | |||
ae6d3e2b1b | |||
5b4a28e63d | |||
aa60c40c61 | |||
6588144300 | |||
7d13ecf1aa | |||
0d26534233 | |||
5b27f8c94e | |||
7e465e4639 | |||
af6efa3efe | |||
899a1f6ea4 | |||
563beca4d6 | |||
bdc26746b6 | |||
9fd768425e | |||
a7d559116d | |||
60b6fe9749 | |||
6e2cba928a | |||
fc1716cd8a | |||
a2bb72b604 | |||
3d8d1ffb19 | |||
9b22b09efe | |||
b0fc500ed1 | |||
abf82e6f7a | |||
96266ff922 | |||
7fcd560d18 | |||
170f7214eb | |||
b19dcd0b66 | |||
be33e4f776 | |||
09d39a9cbb | |||
f78cb39615 | |||
1c05f61bb8 | |||
9b600416a1 | |||
d43234b65e | |||
4b3ea270d8 | |||
800ad5fc4a | |||
5ebf6de6eb | |||
d192d1caf8 | |||
4f479326f3 | |||
1e58dd553c | |||
5ba79976a9 | |||
04e6c1b547 | |||
33484258d1 | |||
b23bb2039e | |||
a21f334290 | |||
849a524b09 | |||
e453701b50 | |||
34f3e7710c | |||
4960782ba0 | |||
819d60ec8b | |||
b15d8e4c2e | |||
b2083d0973 | |||
0f73912133 | |||
eaed54e279 | |||
21b3bf1f78 | |||
8e05ee28ed | |||
7c592a5b9b | |||
37a352dc35 | |||
10573d882f | |||
c1c06868e9 | |||
88c4f70195 | |||
7e4627baf8 | |||
dcd0695567 | |||
7e892b507f | |||
4ae0168aea | |||
87fb66f283 | |||
1b1a4aa30b | |||
540062b97c | |||
b02f2acae5 | |||
b1088a6d11 | |||
6b8bfd2701 | |||
1fa5028b90 | |||
11a85449e7 | |||
840a1129ba | |||
6fdf35846e | |||
d30e626e79 | |||
255c0ff95e | |||
65fd79f973 | |||
d203745cfc | |||
dae2fd361f | |||
e8c90c9186 | |||
43a4bde600 | |||
d50d7b4a95 | |||
aa3cc48756 | |||
5a915e4732 | |||
5505715cc4 | |||
2364361f6b | |||
4b2e001f8e | |||
8753357c04 | |||
b05b2b3aaa | |||
9c86aa9f76 | |||
49583597ed | |||
01b6d342a4 | |||
7b0a3487c8 | |||
c77d3bf0ee | |||
a49ea94d14 | |||
579ac6c4e4 | |||
7cb151dd46 | |||
1eb0c483fe | |||
41f8d00620 | |||
28cf8434d0 | |||
66b60a0822 | |||
a36fe2a3f5 | |||
980304ce36 | |||
e52e4328b3 | |||
10c93807fb | |||
e8f340c60a | |||
baa29777a4 | |||
6ee12d49ee | |||
bdb60b57ad | |||
f89e840d27 | |||
e6a81edb3c | |||
3ad7a18792 | |||
ffe6821813 | |||
890cd03632 | |||
060a7df448 | |||
da156e8ce7 | |||
5dade6a93c | |||
8213f684bc | |||
1d59ea91c5 | |||
5d11bba67e | |||
1a07bfc710 | |||
2c40ce10ab | |||
827ff99965 | |||
5b7b39c448 | |||
eda1d3d4a7 | |||
0eb5845934 | |||
89605bd345 | |||
3af9de175c | |||
4be08cc7df | |||
801b1f2371 | |||
9dec3c359f | |||
1b2b3a75bc | |||
3f508963e7 | |||
52f736f7fb | |||
e62558dde4 | |||
7ede94fec8 | |||
666ea908a5 | |||
f00c57cd6f | |||
c3946eef7c | |||
ba0b9801e3 | |||
c22a863a57 | |||
9d3888d294 | |||
15295f299f | |||
56b7c1916f | |||
7d8485612e | |||
1395d773c3 | |||
6ab9164dca | |||
e80a29372a | |||
b8b944b72b | |||
f7472c82e3 | |||
3f442d9775 | |||
ab8cc6d4ba | |||
172a4d47ab | |||
6cf3771be5 | |||
bf6669a9cb | |||
6c3cc3c422 | |||
430d89539e | |||
bf1941beee | |||
43d7238b44 | |||
05edd4155e | |||
9cc488db4c | |||
01f0988a48 | |||
0d338a003e | |||
2e85ebe789 | |||
34919c99d0 | |||
39f8eb98ee | |||
4eddd377e1 | |||
d596ea45c5 | |||
eda46ac74d | |||
d200a869bc | |||
b8756a3bd8 | |||
48f74d39e8 | |||
1b9c0482d1 | |||
99616ec0b4 | |||
53be8e5f58 | |||
08545cbefc | |||
2d093ef1cd | |||
d392d968f9 | |||
46080de113 | |||
1c18a54177 | |||
a65d2df25c | |||
7e7f358ba2 | |||
4c53bda67f | |||
7bd5212484 | |||
a775f58753 | |||
1609a7c923 | |||
5e8634e9eb | |||
8ab8f00005 | |||
28cfa91b40 | |||
a310cf8842 | |||
d8cd01027e | |||
1a7972246c | |||
8da8e31d63 | |||
8d983066f5 | |||
21ff418b0f | |||
b6f6758454 | |||
cd3d114d6c | |||
6ec0e3ac4b | |||
7f9fcb09e2 | |||
7328d7ada5 | |||
50a537b986 | |||
3df876a998 | |||
da0636a63c | |||
a38561fbcd | |||
b83b8c8675 | |||
d69f8793d5 | |||
762450bb53 | |||
d66394f7c4 | |||
c51a645ee2 | |||
e7e629e6e6 | |||
89ba883ea8 | |||
534dcf3110 | |||
75ba9eaf24 | |||
a0358ada40 | |||
fb01529f9f | |||
f4553e3d01 | |||
f4ad451956 | |||
15505cc5b8 | |||
27385104d8 | |||
5cf6cd2e1d | |||
4804da1273 | |||
ea239c587e | |||
6fa48890bc | |||
bcec97373f | |||
0b61713398 | |||
a311591310 | |||
f059d75d48 | |||
0803dd411e | |||
b8c20c07ce | |||
c58f2b09d2 | |||
932b8f813d | |||
8b71f5331a | |||
5822785191 | |||
6d6438b179 | |||
a316d610eb | |||
cc6a51e73d | |||
efb249f7b9 | |||
06e0555555 | |||
b5d5838d96 | |||
2b0b3910ee | |||
7160e94042 | |||
0ea48e9894 | |||
59d91b2abf | |||
d746bf91e1 | |||
ce67076f26 | |||
89c7883202 | |||
6b94083a07 | |||
439db15c20 | |||
3b4bdb85f9 | |||
cedc8bf543 | |||
5f5218e1ab | |||
435c6ae5b0 | |||
dbd2911fbc | |||
6487380021 | |||
8d685a77c2 | |||
e20ab8d80c | |||
3d076c05e1 | |||
ad468e676e | |||
5da6e82b5e | |||
631be6b534 | |||
faa6463bda | |||
a8eca0edb8 | |||
219a14550c | |||
0a119cada6 | |||
3c0281db0c | |||
9d86cef2da | |||
31b68201a5 | |||
7a014e4024 | |||
280433ebc1 | |||
e1a964fb00 | |||
32a96dd72c | |||
86cc982982 | |||
fb55cdfeff | |||
1ffa02fc1f | |||
796f93f7f4 | |||
0d5a488b86 | |||
68253e1c28 | |||
4bfa11aec5 | |||
0e8b95f1d0 | |||
ea0eb4fa70 | |||
e11b12df7c | |||
beee75dabe | |||
72f191b79b | |||
927cca106d | |||
01e1cfce54 | |||
4a92966ed5 | |||
a3f6f710a9 | |||
89acc11e2c | |||
835273923e | |||
2fcaaa53ce | |||
c4a06e515b | |||
4cb6b64e2d | |||
9890803cc6 | |||
46cb1595c0 | |||
9dc110be07 | |||
7ed20090d1 | |||
14f7707b51 | |||
3c610e12e5 | |||
29326ebfdd | |||
b273c4e8bf | |||
27ff7b74eb | |||
3f50ef7a92 | |||
9023ce0a94 | |||
f10d6990e4 | |||
ab2bbd6aca | |||
fc2d7fc4d0 | |||
2376f53fd4 | |||
d84336dc12 | |||
59397c84be | |||
1523382b18 | |||
71a3354226 | |||
01b16fe6c0 | |||
ec2a287842 | |||
60a5ccc95d | |||
03f928ee0c | |||
3428e5c0c7 | |||
477b00cce8 | |||
68839c0e79 | |||
53aae7ddba | |||
08c54145d3 | |||
8549046994 | |||
62436aa8d3 | |||
6e119b7256 | |||
5740a70dd2 | |||
87446d4d8e | |||
a216f85abf | |||
2f0e386037 | |||
8ff1a78dfc | |||
ce1148b474 | |||
d0b0006d27 | |||
27dc5b40ce | |||
f3b5340875 | |||
dfa3812ccc | |||
38b94abc59 | |||
3b165b070d | |||
ff5a36561a | |||
0dce894b13 | |||
85e46f39f3 | |||
a215610167 | |||
132477f51f | |||
0ff16c589f | |||
bc5b44ddad | |||
1d726d6a1e | |||
fb56af8d84 | |||
959568f124 | |||
9424eabd9a | |||
bb8e7c3e8f | |||
82b4726bb7 | |||
a7348023a0 | |||
040772ab61 | |||
9ff39bae6f | |||
c215acb7f9 | |||
15d8774e6b | |||
01ae47c90c | |||
1b1a99b320 | |||
d7bc916dfa | |||
604902ed7c | |||
18d6823dd8 | |||
c4e82ce7b9 | |||
8a309b39f2 | |||
f260fa4f8d | |||
c95bd4f47a | |||
12c74d681b | |||
736f58a631 | |||
57bc1c7f42 | |||
d9769aeb6a | |||
0763d2645d | |||
2447664df6 | |||
9f0befa033 | |||
f6700f1ff1 | |||
1199e89dca | |||
15722ec5d2 | |||
584a87163f | |||
5afca49a7b | |||
88321a4d61 | |||
92df13eeb1 | |||
ceeaf97793 | |||
47102b0e76 | |||
217dfdff10 | |||
1c22ab66ee | |||
59a4f06d17 | |||
275425ae9f | |||
5744d33ec7 | |||
8b9ef8b109 | |||
c529d4d4ae | |||
e404d4f213 | |||
7cc57f4164 | |||
6fbe7a1739 | |||
dd23ac7093 | |||
506c19dab4 | |||
f754860e32 | |||
3cda842052 | |||
22b0644579 | |||
d7dcf5ebcb | |||
6fe1b26a10 | |||
b2f3363c8c | |||
1d3a522afb | |||
2ddec80b78 | |||
4cc5772361 | |||
efec7d4e31 | |||
75bcc8402d | |||
55badab13d | |||
f66727297a | |||
e5dbd137bf | |||
5b520d7c47 | |||
9577965782 | |||
a87cc4c3a3 | |||
94b5184c99 | |||
1eba80c04b | |||
8f9d76bb8c | |||
2d87561193 | |||
7daac635c4 | |||
d2591d0019 | |||
c459ca8338 | |||
e36f0b45a6 | |||
513cf47447 | |||
56c2b730f8 | |||
b0b2b8d4ed | |||
6e6c5fd428 | |||
128b3b251e | |||
9dd8bd9095 | |||
28438d245a | |||
2bd095f2f7 | |||
9a96dd05b8 | |||
5b7c882497 | |||
de0ed0040a | |||
7dc5a90b8a | |||
a240aeb902 | |||
dd8ae99cc7 | |||
1b288add4c | |||
8dd288f2b9 | |||
4686b61d78 | |||
1099955249 | |||
1e9d3e717c | |||
0327e98b5f | |||
73193bad86 | |||
e18cc55049 | |||
113c8fa055 | |||
db8b8be2dd | |||
1ea6fac9f4 | |||
ddad791418 | |||
0b1dd5ecfc | |||
e7d6167409 | |||
01f5138e76 | |||
3c8e798f73 | |||
96782e8a8e | |||
0c17f32eee | |||
34656810ad | |||
d31badf9ac | |||
4bd7f7af1a | |||
ef135d46d8 | |||
a8325694f5 | |||
240285ec0f | |||
2bb3385aaf | |||
508fd2cde1 | |||
34074ff0af | |||
4a928136ed | |||
1e489ee1e2 | |||
5a70dd1756 | |||
e7843d68d0 | |||
88a7c18746 | |||
80b4cb590e | |||
9d9522016b | |||
b933931d6c | |||
9889bc990a | |||
f88f9034b5 | |||
4d52ab7d37 | |||
75f6babb57 | |||
89c4f68fa3 | |||
389fa306aa | |||
b84cead5ec | |||
29d9f8c834 | |||
3129027b9d | |||
e88994a0b7 | |||
aeb1b70581 | |||
bd75778fcb | |||
9daa8d20a9 | |||
2ce40e4973 | |||
2a169e0fdd | |||
3f373f623a | |||
7db21a3375 | |||
8aba0597e4 | |||
755496d0f9 | |||
9fb0c4937b | |||
09f1a87dc9 | |||
3d3bdf9aa3 | |||
b861f724c4 | |||
29c9bcf667 | |||
145f0ed6f5 | |||
4eaf574cf8 | |||
1d53b66858 | |||
fec4e12410 | |||
ca96014c47 | |||
be2f5c690b | |||
a7b0d42964 | |||
9379c730b9 | |||
f6ace04caf | |||
54e2ef5e4a | |||
5903496e5b | |||
b91123d298 | |||
99fbbc32cb | |||
3bab13b228 | |||
773d92f9f5 | |||
447e53bcca | |||
027de84349 | |||
0dd9275b8f | |||
ec220201e7 | |||
4667c52f27 | |||
b673e679c4 | |||
595ba7bb1b | |||
1cd822c548 | |||
9ddda41d4c | |||
5d2fe6d069 | |||
2c82e1b684 | |||
9403a0ab15 | |||
60d48f838b | |||
9ec3100ee3 | |||
0b5cc2c83e | |||
918a30a60b | |||
bdcb133fc2 | |||
a3395062f1 | |||
b23fcd3904 | |||
9c141a2fbf | |||
dd4dd364da | |||
67f5c0767a | |||
36291d738e | |||
4cd6e09f3d | |||
f08bd7ef88 | |||
7cca791580 | |||
7ce39a050c | |||
ef120ce923 | |||
c9ab2527e8 | |||
35f19e17bf | |||
c833e9ed69 | |||
b5c376d315 | |||
56b8ebf129 | |||
1b20aa407b | |||
109eac2b9a | |||
f657a56947 | |||
ff64c6606a | |||
f8170e3c46 | |||
37dad014ec | |||
a7749b7933 | |||
a6c8fe0c8a | |||
4c216bca3a | |||
d815e85440 | |||
ae9d97dfca | |||
9efe39ff81 | |||
ba719ac90c | |||
0207e41160 | |||
501934a29b | |||
80eb584a7a | |||
18c9a37f48 | |||
1b00160388 | |||
73ea24ca11 | |||
da5d6df694 | |||
fff69b3a23 | |||
025f215bd3 | |||
c987bb0c7d | |||
fa3f6eefc9 | |||
3c705a3f3d | |||
a8ed2ac6e0 | |||
531014a35c | |||
cb362e6fbc | |||
10bc09ba82 | |||
7f274ebe91 | |||
5ad45cef42 | |||
ad1d78b599 | |||
096ce958a4 | |||
dc9a9c4d78 | |||
0b70e52032 | |||
6e404f1831 | |||
7aeb0e8f92 | |||
323bb1269b | |||
9379098a3f | |||
b59b5ada7e | |||
4778d05238 | |||
c01f7072fb | |||
4120ded424 | |||
0d1b65154a | |||
8ff3cdde7e | |||
57c58f255e | |||
da4efdfa90 | |||
8825a27c3e | |||
a540274c72 | |||
4f10eefe1e | |||
8bfb5b58fa | |||
8aaec57dac | |||
6ce9a9ce12 | |||
55efe30258 | |||
0877e6fcb1 | |||
f1b4592d17 | |||
d2590ff190 | |||
ac0cadfd2d | |||
a4fb36f972 | |||
6e593ef4aa | |||
ea3d0246bf | |||
beaa22fabb | |||
160728965b | |||
2ac5a68896 | |||
82b469bc59 | |||
fb779b42a8 | |||
cc525cdf70 | |||
42768a3159 | |||
2ead28defe | |||
0ec95b4aae | |||
8d9bb82492 | |||
a4239c3462 | |||
23174629b6 | |||
4e6766e6ed | |||
febb0dfd16 | |||
dfe87cf5c9 | |||
70fd572a46 | |||
15d5d169a3 | |||
888d3ca29a | |||
757ee2c703 | |||
e40bc9bffa | |||
d9ab90facf | |||
267d645ec1 | |||
a9cf1769a0 | |||
6abad01d88 | |||
746741f910 | |||
43d1d0a4e9 | |||
9ffe2238a7 | |||
4648cf1b34 | |||
a426c03720 | |||
01397b213d | |||
1d3df7eaa5 | |||
9cb40878f0 | |||
b8eaef3d02 | |||
f8301ea9ce | |||
4f5b2f5060 | |||
3adc3d9337 | |||
81aba1b8a2 | |||
4246a560fc | |||
6752ef8a69 | |||
7d28106c2e | |||
0db75a5186 | |||
a430fd93c8 | |||
f40821ba66 | |||
d2467b4b65 | |||
ca8b8f6a38 | |||
92a1771db0 | |||
f92f8157fd | |||
4b9859633e | |||
b2a2883b0a | |||
3d7181caec | |||
74249ddb38 | |||
093dcb14d0 | |||
80babba79c | |||
2427bfbcef | |||
046adf4d15 | |||
b096a24857 | |||
b227a77c45 | |||
03c05f564f | |||
2562fe9990 | |||
e4efbec6d3 | |||
87feebf827 | |||
204c57e065 | |||
ad316840c5 | |||
d5ebc6a16d | |||
6aebe5ddc3 | |||
ba98d9cece | |||
27f694f995 | |||
e2c1cdee24 | |||
a983541a75 | |||
7e929b83df | |||
a8583c03a1 | |||
2b15e4b3d5 | |||
65e4f212bc | |||
efe335fe29 | |||
8c43b2886a | |||
ebea076675 | |||
c455fe4f45 | |||
3f21d0d197 | |||
dbf7f1f919 | |||
98c722881b | |||
6fc1452a6d | |||
4644ec39ab | |||
5bdfce0932 | |||
7cefac11e5 | |||
20972ae65f | |||
3f87529914 | |||
67b410c127 | |||
2348fb3443 | |||
ebab0d8a1d | |||
01ee083ac7 | |||
6f338f7376 | |||
115d2ce8ba | |||
8bc2cbd6b5 | |||
acde0bab70 | |||
299f8b5927 | |||
f66fcbde91 | |||
0002f2e531 | |||
c9f736985c | |||
8b470e234c | |||
63dd366c31 | |||
9441ce85b4 | |||
8ac0f92d7d | |||
a6c69264b3 | |||
7327666652 | |||
1bd4efe6e5 | |||
c130f620c3 | |||
3f66e87839 | |||
6fac36a355 | |||
d9e5f9d469 | |||
2484fe12d8 | |||
da9db6b80d | |||
f8cebe8aa9 | |||
51ea339a19 | |||
4cfcba0811 | |||
3dc291d51a | |||
018b0ee7c1 | |||
50d33513ea | |||
0aebe626b1 | |||
ac2cf29fb1 | |||
be0f64ce93 | |||
77a9f4b459 | |||
78c1f50b53 | |||
8e52090356 | |||
8a907825e0 | |||
6bc25c6b66 | |||
957cba4605 | |||
861e410380 | |||
7e7567ecec | |||
134564ca3c | |||
4e2c2d5774 | |||
2e4b6be986 | |||
2f49b935df | |||
0687fdc2a8 | |||
f7034258cd | |||
a43d8ff6ee | |||
e274b5495b | |||
96d43f44ac | |||
c69669e770 | |||
b88cfe5685 | |||
a1fa410a33 | |||
e73bc4d9cd | |||
bb24497d63 | |||
980baacf6c | |||
e383b504a2 | |||
80c151c8a4 | |||
cef8bf5c8f | |||
414eba2baa | |||
29d17faf09 | |||
c9c16c9a25 | |||
7e79ddd335 | |||
e456ca724b | |||
2a73d29e71 | |||
2a307d5361 | |||
797e82dce3 | |||
c4666a4208 | |||
75cda423fe | |||
ee9473d833 | |||
e254bdd33d | |||
6b197df98c | |||
53ad4d0862 | |||
3b2b70ea0a | |||
0ca8a2f505 | |||
983d1a3405 | |||
f34757b21f | |||
302075dd1c | |||
c6654b8e17 | |||
96296b8c11 | |||
bfba540f71 | |||
f9ff027a64 | |||
b4127c7df9 | |||
a4113052f2 | |||
304c5dd2c2 | |||
d82ecdc46c | |||
a273ab7944 | |||
2d1fa498c1 | |||
4294d8cea9 | |||
3ea0419e57 | |||
b4739d2be4 | |||
9f5821fa62 | |||
d079a32650 | |||
2b0c3507b4 | |||
edd3178656 | |||
549ad086d5 | |||
3a668fd0a1 | |||
d44c09f12b | |||
fdcc8ff36e | |||
1eb7832602 | |||
7386cdad1e | |||
c3235e87d0 | |||
eddee893bc | |||
9a490b9c28 | |||
25142fc3b5 | |||
d9078774c3 | |||
710c4d218a | |||
03bf57d5d4 | |||
283ff82a53 | |||
f1c28ef10f | |||
a9a4608562 | |||
c1ae289fe8 | |||
0d33dfdcff | |||
0c4431e68e | |||
8312dd237d | |||
b3b0e5e4ff | |||
338f1c1768 | |||
81dca53767 | |||
7e39cbbfed | |||
217a8671a5 | |||
3bf11dd4b4 | |||
9c390351c4 | |||
b488594eed | |||
605af9a77b | |||
7819907118 | |||
0cdf5744c3 | |||
6af207c3e4 | |||
3204009d98 | |||
7e2df42638 | |||
d84e24e744 | |||
9b9a0fd149 | |||
6855a40588 | |||
67913348f5 | |||
b46863b073 | |||
6e7506894c | |||
fcee935e0d | |||
2f22e8c325 | |||
7a914c481b | |||
ab27a1609e | |||
f3b1ae8b1c | |||
d68a12a413 | |||
461e3027c8 | |||
92f50caeaa | |||
da9a9c5673 | |||
fa4cca3cb3 | |||
79acf5bec5 | |||
3702447957 | |||
48a61bc754 | |||
1c667845e5 | |||
bcc17efdfb | |||
8e49fa9f49 | |||
a296d16b5e | |||
5a1f57062f | |||
042dd08090 | |||
d501b15ff9 | |||
8e91ceb6e1 | |||
0726597615 | |||
2e76ecd46e | |||
4b228f5f7f | |||
1349ab5b30 | |||
b3cc2251ba | |||
52b7c65080 | |||
8b735ff536 | |||
8bac723bc4 | |||
24c6202b25 | |||
f3beca2007 | |||
0dbb5d6f19 | |||
06dca7eb5e | |||
c5b07d02fd | |||
658fe26bf4 | |||
15d893e556 | |||
84a7a0ff6a | |||
0d79fa041c | |||
6bdd779fd0 | |||
2593c0c82a | |||
b695c6d521 | |||
3f6d486142 | |||
b04215bfff | |||
9f9307360c | |||
b293d7d03e | |||
f2698880d9 | |||
252abbaeda | |||
669b24f661 | |||
8c9d7d6454 | |||
c4875a5946 | |||
0981a9c38b | |||
8c0b6e6605 | |||
d51c5d24b9 | |||
fa326c86cb | |||
fbadb070c6 | |||
a6c8f62099 | |||
01d79cc53c | |||
706bf6687a | |||
c7582139c7 | |||
385ff00de5 | |||
0dc9558ea2 | |||
6401bef067 | |||
92b40d02a4 | |||
1e63a820a6 | |||
d572897521 | |||
80a0bfbc8d | |||
57956cafad | |||
1fdcf48303 | |||
235840cf05 | |||
7b7c4e6ef5 | |||
faab4f5ef6 | |||
0e69b703cd | |||
b9cd7122ff | |||
9a28247f6b | |||
ca59991a37 | |||
3a2097e040 |
1
.gitignore
vendored
1
.gitignore
vendored
|
@ -1,3 +1,2 @@
|
|||
target/
|
||||
**/*.rs.bk
|
||||
Cargo.lock
|
||||
|
|
597
.gitlab-ci.yml
597
.gitlab-ci.yml
|
@ -1,8 +1,8 @@
|
|||
# We use https://gitlab.freedesktop.org/freedesktop/ci-templates
|
||||
# to build the images used by the ci.
|
||||
#
|
||||
# Here is how to properly update thoses images:
|
||||
# - new Rust stable version: update GST_RS_IMG_TAG
|
||||
# Here is how to properly update those images:
|
||||
# - new Rust stable version: update GST_RS_IMG_TAG and update Rust version
|
||||
# - add dependencies: update FDO_DISTRIBUTION_PACKAGES and update GST_RS_IMG_TAG
|
||||
# - update GStreamer version: update the tag in ci/install-gst.sh and update GST_RS_IMG_TAG
|
||||
#
|
||||
|
@ -11,13 +11,13 @@
|
|||
# - setting it to the current date and the version suffix to 0
|
||||
# - incrementing the version suffix
|
||||
#
|
||||
# After each update commit your changes and push to your personnal repo.
|
||||
# After each update commit your changes and push to your personal repo.
|
||||
# After review and ci approval merge the branch as usual.
|
||||
#
|
||||
# Updating the nightly image should be done by simply running a scheduled ci
|
||||
# pipeline on the upstream repo with the $UPDATE_NIGHTLY variable defined.
|
||||
|
||||
.templates_sha: &templates_sha 322bf2b8f29b6491caeb13861201e96969ddc169
|
||||
.templates_sha: &templates_sha b2e24205598dc1d80b5f2c88cf7618051e30e9fd
|
||||
|
||||
include:
|
||||
- project: 'freedesktop/ci-templates'
|
||||
|
@ -26,10 +26,40 @@ include:
|
|||
|
||||
- local: "ci/images_template.yml"
|
||||
|
||||
workflow:
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "schedule"
|
||||
- if: $CI_MERGE_REQUEST_IID
|
||||
# don't create a pipeline if its a commit pipeline, on a branch and that branch has
|
||||
# open merge requests (bc we will get a MR build instead)
|
||||
- if: $CI_OPEN_MERGE_REQUESTS
|
||||
when: never
|
||||
- if: $CI_COMMIT_TAG
|
||||
- if: $CI_COMMIT_BRANCH
|
||||
|
||||
default:
|
||||
interruptible: true
|
||||
|
||||
variables:
|
||||
FDO_UPSTREAM_REPO: gstreamer/gstreamer-rs
|
||||
|
||||
# DIY CI-templates like setup for windows
|
||||
WINDOWS_RUST_MINIMUM_IMAGE: "$CI_REGISTRY_IMAGE/windows:$GST_RS_IMG_TAG-main-$GST_RS_MSRV"
|
||||
WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE: "$CI_REGISTRY/$FDO_UPSTREAM_REPO/windows:$GST_RS_IMG_TAG-main-$GST_RS_MSRV"
|
||||
WINDOWS_RUST_STABLE_IMAGE: "$CI_REGISTRY_IMAGE/windows:$GST_RS_IMG_TAG-main-$GST_RS_STABLE"
|
||||
WINDOWS_RUST_STABLE_UPSTREAM_IMAGE: "$CI_REGISTRY/$FDO_UPSTREAM_REPO/windows:$GST_RS_IMG_TAG-main-$GST_RS_STABLE"
|
||||
|
||||
RUST_DOCS_FLAGS: "--cfg docsrs --extern-html-root-url=muldiv=https://docs.rs/muldiv/1.0.0/muldiv/ -Z unstable-options --generate-link-to-definition"
|
||||
NAMESPACE: gstreamer
|
||||
# format is <branch>=<name>
|
||||
# the name is used in the URL
|
||||
# latest release must be at the top
|
||||
# (only relevant on main branch)
|
||||
RELEASES:
|
||||
0.22=0.22
|
||||
|
||||
stages:
|
||||
- "trigger"
|
||||
- "container-base"
|
||||
- "container-final"
|
||||
- "lint"
|
||||
|
@ -37,318 +67,517 @@ stages:
|
|||
- "extras"
|
||||
- "deploy"
|
||||
|
||||
.debian:10:
|
||||
# This is an empty job that is used to trigger the pipeline.
|
||||
trigger:
|
||||
image: alpine:latest
|
||||
stage: 'trigger'
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: 10
|
||||
FDO_DISTRIBUTION_TAG: '$RUST_VERSION-$GST_RS_IMG_TAG'
|
||||
# Only stuff inside the repo directory can be cached
|
||||
# Override the CARGO_HOME variable to force its location
|
||||
CARGO_HOME: "${CI_PROJECT_DIR}/.cargo_home"
|
||||
GIT_STRATEGY: none
|
||||
tags: [ 'placeholder-job' ]
|
||||
script:
|
||||
- echo "Trigger job done, now running the pipeline."
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "schedule"
|
||||
# If the MR is assigned to the Merge bot, trigger the pipeline automatically
|
||||
- if: '$CI_MERGE_REQUEST_ASSIGNEES == "gstreamer-merge-bot"'
|
||||
# Require explicit action to trigger tests post merge
|
||||
- if: '$CI_PROJECT_NAMESPACE == "gstreamer" && $CI_COMMIT_BRANCH == "main"'
|
||||
when: 'manual'
|
||||
# When the assignee isn't the merge bot, require an explicit action to trigger the pipeline
|
||||
# to avoid wasting CI resources
|
||||
- if: '$CI_MERGE_REQUEST_ASSIGNEES != "gstreamer-merge-bot"'
|
||||
when: 'manual'
|
||||
allow_failure: false
|
||||
|
||||
.debian:12:
|
||||
needs: []
|
||||
variables:
|
||||
FDO_DISTRIBUTION_VERSION: 'bookworm-slim'
|
||||
before_script:
|
||||
- source ./ci/env.sh
|
||||
- mkdir .cargo && echo -e "[net]\ngit-fetch-with-cli = true" > .cargo/config
|
||||
|
||||
.debian:10-base:
|
||||
extends: .debian:10
|
||||
.debian:12-base:
|
||||
extends: .debian:12
|
||||
variables:
|
||||
FDO_DISTRIBUTION_TAG: 'base-$GST_RS_IMG_TAG'
|
||||
|
||||
.debian:10-stable:
|
||||
extends: .debian:10
|
||||
.debian:12-stable:
|
||||
extends: .debian:12
|
||||
variables:
|
||||
RUST_VERSION: "stable"
|
||||
RUST_IMAGE_FULL: "1"
|
||||
FDO_DISTRIBUTION_TAG: '$GST_RS_STABLE-$GST_RS_IMG_TAG'
|
||||
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $GST_RS_STABLE $RUST_IMAGE_FULL'
|
||||
|
||||
.debian:10-1-40:
|
||||
extends: .debian:10
|
||||
.debian:12-msrv:
|
||||
extends: .debian:12
|
||||
variables:
|
||||
RUST_VERSION: "1.40.0"
|
||||
FDO_DISTRIBUTION_TAG: '$GST_RS_MSRV-$GST_RS_IMG_TAG'
|
||||
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $GST_RS_MSRV $RUST_IMAGE_FULL'
|
||||
|
||||
.debian:10-nightly:
|
||||
extends: .debian:10
|
||||
.debian:12-nightly:
|
||||
extends: .debian:12
|
||||
variables:
|
||||
RUST_VERSION: "nightly"
|
||||
FDO_DISTRIBUTION_TAG: 'nightly-$GST_RS_IMG_TAG'
|
||||
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh nightly $RUST_IMAGE_FULL'
|
||||
|
||||
.build-base-image:
|
||||
extends:
|
||||
- .fdo.container-build@debian
|
||||
stage: container-base
|
||||
variables:
|
||||
FDO_DISTRIBUTION_PACKAGES: "build-essential curl python3-setuptools liborc-0.4-dev libglib2.0-dev libxml2-dev libgtk-3-dev libegl1-mesa libgles2-mesa libgl1-mesa-dri libgl1-mesa-glx libwayland-egl1-mesa xz-utils libssl-dev git wget ca-certificates ninja-build python3-pip flex bison libglib2.0-dev"
|
||||
FDO_DISTRIBUTION_EXEC: 'bash ci/install-gst.sh && pip3 install git+http://gitlab.freedesktop.org/freedesktop/ci-templates'
|
||||
FDO_DISTRIBUTION_PACKAGES: >-
|
||||
build-essential curl python3-setuptools libglib2.0-dev libxml2-dev
|
||||
libdrm-dev libegl1-mesa-dev libgl1-mesa-dev libgbm-dev libgles2-mesa-dev
|
||||
libgl1-mesa-dri libegl-dev libgl1-mesa-glx libwayland-egl1-mesa xz-utils
|
||||
libssl-dev git wget ca-certificates ninja-build python3-pip flex bison
|
||||
libglib2.0-dev libx11-dev libx11-xcb-dev libsoup2.4-dev libvorbis-dev
|
||||
libogg-dev libtheora-dev libmatroska-dev libvpx-dev libopus-dev
|
||||
libgraphene-1.0-dev libjpeg-dev libwayland-dev wayland-protocols
|
||||
python3-gi libavcodec-dev libavformat-dev libavutil-dev libavfilter-dev
|
||||
libswscale-dev yasm libx264-dev libfontconfig-dev libfreetype-dev
|
||||
libxkbcommon-dev libxi-dev libxcb-render0-dev libxcb-shm0-dev
|
||||
libxcb1-dev libxext-dev libxrender-dev libxrandr-dev libxcursor-dev
|
||||
libxdamage-dev libxfixes-dev libxinerama-dev libgudev-1.0-dev
|
||||
libpango1.0-dev libcairo2-dev libjson-glib-dev libgdk-pixbuf-2.0-dev
|
||||
libtiff-dev libpng-dev libjpeg-dev libepoxy-dev libsass-dev sassc
|
||||
libcsound64-dev llvm clang nasm libsodium-dev libwebp-dev
|
||||
libflac-dev
|
||||
FDO_DISTRIBUTION_EXEC: >-
|
||||
bash ci/install-gst.sh &&
|
||||
bash ci/install-dav1d.sh &&
|
||||
pip3 install --break-system-packages git+http://gitlab.freedesktop.org/freedesktop/ci-templates &&
|
||||
pip3 install --break-system-packages tomli
|
||||
|
||||
.build-final-image:
|
||||
extends:
|
||||
- .fdo.container-build@debian
|
||||
stage: container-final
|
||||
variables:
|
||||
FDO_BASE_IMAGE: '$CI_REGISTRY_IMAGE/debian/10:base-$GST_RS_IMG_TAG'
|
||||
FDO_DISTRIBUTION_EXEC: 'bash ci/install-rust.sh $RUST_VERSION'
|
||||
FDO_BASE_IMAGE: '$CI_REGISTRY_IMAGE/debian/bookworm-slim:base-$GST_RS_IMG_TAG'
|
||||
|
||||
build-base:
|
||||
extends:
|
||||
- .build-base-image
|
||||
- .debian:10-base
|
||||
- .debian:12-base
|
||||
|
||||
build-stable:
|
||||
needs: ["build-base"]
|
||||
extends:
|
||||
- .build-final-image
|
||||
- .debian:10-stable
|
||||
- .debian:12-stable
|
||||
|
||||
build-1-40:
|
||||
build-msrv:
|
||||
needs: ["build-base"]
|
||||
extends:
|
||||
- .build-final-image
|
||||
- .debian:10-1-40
|
||||
- .debian:12-msrv
|
||||
|
||||
build-nightly:
|
||||
needs: ["build-base"]
|
||||
extends:
|
||||
- .build-final-image
|
||||
- .debian:10-nightly
|
||||
- .debian:12-nightly
|
||||
|
||||
update-nightly:
|
||||
extends: build-nightly
|
||||
only:
|
||||
variables:
|
||||
- $UPDATE_NIGHTLY == "1"
|
||||
rules:
|
||||
- if: $UPDATE_NIGHTLY == "1"
|
||||
variables:
|
||||
FDO_FORCE_REBUILD: 1
|
||||
|
||||
.dist-debian-container:
|
||||
extends:
|
||||
- .fdo.distribution-image@debian
|
||||
cache:
|
||||
key: "gst"
|
||||
paths:
|
||||
- "${CARGO_HOME}"
|
||||
after_script:
|
||||
- rm -rf target
|
||||
|
||||
.img-stable:
|
||||
extends:
|
||||
- .debian:12-stable
|
||||
- .dist-debian-container
|
||||
- .debian:10-stable
|
||||
|
||||
.img-1-40:
|
||||
.img-msrv:
|
||||
extends:
|
||||
- .debian:12-msrv
|
||||
- .dist-debian-container
|
||||
- .debian:10-1-40
|
||||
|
||||
.img-nightly:
|
||||
extends:
|
||||
- .debian:12-nightly
|
||||
- .dist-debian-container
|
||||
- .debian:10-nightly
|
||||
|
||||
# GST_PLUGINS_RS_TOKEN is a variable of type 'Var' defined in gstreamer-rs CI
|
||||
# settings and containing a gst-plugins-rs pipeline trigger token
|
||||
.plugins-update:
|
||||
stage: deploy
|
||||
script:
|
||||
- |
|
||||
# FDO_DISTRIBUTION_IMAGE still has indirections
|
||||
- echo $FDO_DISTRIBUTION_IMAGE
|
||||
- DISTRO_IMAGE=$(eval echo ${FDO_DISTRIBUTION_IMAGE})
|
||||
- echo $DISTRO_IMAGE
|
||||
# retrieve the infos from the registry
|
||||
- JSON_IMAGE=$(skopeo inspect docker://$DISTRO_IMAGE)
|
||||
- IMAGE_PIPELINE_ID=$(echo $JSON_IMAGE | jq -r '.Labels["fdo.pipeline_id"]')
|
||||
- echo $IMAGE_PIPELINE_ID
|
||||
- echo $CI_PIPELINE_ID
|
||||
- |
|
||||
if [[ x"$IMAGE_PIPELINE_ID" == x"$CI_PIPELINE_ID" ]]; then
|
||||
echo "Image has been updated, notify gst-plugins-rs"
|
||||
curl -X POST -F "token=$GST_PLUGINS_RS_TOKEN" -F "ref=master" -F "variables[UPDATE_IMG]=$UPDATE_IMG" https://gitlab.freedesktop.org/api/v4/projects/1400/trigger/pipeline
|
||||
else
|
||||
echo "Image has not been updated, ignore"
|
||||
fi
|
||||
rules:
|
||||
- if: '$CI_COMMIT_REF_NAME == "master" && $CI_PROJECT_PATH == "gstreamer/gstreamer-rs"'
|
||||
|
||||
# Those jobs need to use another image as ours doesn't have 'skopeo'
|
||||
# and it's not easily installable in Debian stable for now.
|
||||
plugins-update-stable:
|
||||
extends:
|
||||
- .plugins-update
|
||||
- .img-stable
|
||||
image: registry.freedesktop.org/freedesktop/ci-templates/buildah:2020-03-04
|
||||
variables:
|
||||
UPDATE_IMG: "stable"
|
||||
|
||||
plugins-update-1-40:
|
||||
extends:
|
||||
- .plugins-update
|
||||
- .img-1-40
|
||||
image: registry.freedesktop.org/freedesktop/ci-templates/buildah:2020-03-04
|
||||
variables:
|
||||
UPDATE_IMG: "1-40"
|
||||
|
||||
plugins-update-nightly:
|
||||
extends:
|
||||
- .plugins-update
|
||||
- .img-nightly
|
||||
image: registry.freedesktop.org/freedesktop/ci-templates/buildah:2020-03-04
|
||||
variables:
|
||||
UPDATE_IMG: "nightly"
|
||||
.cargo_test_var: &cargo_test
|
||||
- ./ci/run-cargo-test.sh
|
||||
|
||||
.cargo test:
|
||||
stage: "test"
|
||||
script:
|
||||
- rustc --version
|
||||
# First build and test all the crates with their relevant features
|
||||
# Keep features in sync with below
|
||||
- |
|
||||
for crate in gstreamer*; do
|
||||
if [ -n "$ALL_FEATURES" ]; then
|
||||
if [ $crate = "gstreamer" ]; then
|
||||
FEATURES=ser_de,v1_18
|
||||
elif [ $crate = "gstreamer-gl" ]; then
|
||||
FEATURES=egl,x11,wayland,v1_18
|
||||
else
|
||||
FEATURES=v1_18
|
||||
fi
|
||||
- *cargo_test
|
||||
|
||||
cargo build --color=always --manifest-path $crate/Cargo.toml --features=$FEATURES
|
||||
G_DEBUG=fatal_warnings cargo test --color=always --manifest-path $crate/Cargo.toml --features=$FEATURES
|
||||
else
|
||||
cargo build --color=always --manifest-path $crate/Cargo.toml
|
||||
G_DEBUG=fatal_warnings cargo test --color=always --manifest-path $crate/Cargo.toml
|
||||
fi
|
||||
done
|
||||
|
||||
# If we do a build with all features then also build the
|
||||
# tutorials/examples with all features
|
||||
- |
|
||||
if [ -n "$ALL_FEATURES" ]; then
|
||||
cargo build --color=always --manifest-path examples/Cargo.toml --bins --examples --all-features
|
||||
cargo build --color=always --manifest-path tutorials/Cargo.toml --bins --examples --all-features
|
||||
fi
|
||||
|
||||
test 1.40:
|
||||
test msrv:
|
||||
extends:
|
||||
- '.cargo test'
|
||||
- .img-1-40
|
||||
- .img-msrv
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-msrv'
|
||||
artifacts: false
|
||||
|
||||
test stable:
|
||||
extends:
|
||||
- '.cargo test'
|
||||
- .img-stable
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
|
||||
test stable all-features:
|
||||
variables:
|
||||
ALL_FEATURES: 'yes'
|
||||
EXAMPLES_TUTORIALS: 'yes'
|
||||
extends:
|
||||
- '.cargo test'
|
||||
- .img-stable
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
|
||||
|
||||
test nightly:
|
||||
allow_failure: true
|
||||
extends:
|
||||
- '.cargo test'
|
||||
- .img-nightly
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-nightly'
|
||||
artifacts: false
|
||||
|
||||
|
||||
test nightly all-features:
|
||||
allow_failure: true
|
||||
variables:
|
||||
ALL_FEATURES: 'yes'
|
||||
EXAMPLES_TUTORIALS: 'yes'
|
||||
extends:
|
||||
- '.cargo test'
|
||||
- .img-nightly
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-nightly'
|
||||
artifacts: false
|
||||
|
||||
.cargo test sys:
|
||||
stage: "test"
|
||||
script:
|
||||
- ./ci/run-sys-cargo-test.sh
|
||||
|
||||
test stable sys:
|
||||
extends:
|
||||
- '.cargo test sys'
|
||||
- .img-stable
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
|
||||
test msrv sys:
|
||||
extends:
|
||||
- '.cargo test sys'
|
||||
- .img-msrv
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-msrv'
|
||||
artifacts: false
|
||||
|
||||
test nightly sys:
|
||||
extends:
|
||||
- '.cargo test sys'
|
||||
- .img-nightly
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-nightly'
|
||||
artifacts: false
|
||||
|
||||
rustfmt:
|
||||
extends: .img-stable
|
||||
stage: "lint"
|
||||
tags: [ 'placeholder-job' ]
|
||||
script:
|
||||
- cargo fmt --version
|
||||
- cargo fmt -- --color=always --check
|
||||
needs:
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
|
||||
check commits:
|
||||
extends: .img-stable
|
||||
stage: "lint"
|
||||
tags: [ 'placeholder-job' ]
|
||||
script:
|
||||
- ci-fairy check-commits --textwidth 0 --no-signed-off-by
|
||||
needs:
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
|
||||
typos:
|
||||
extends: .img-stable
|
||||
stage: "lint"
|
||||
tags: [ 'placeholder-job' ]
|
||||
script:
|
||||
- typos
|
||||
needs:
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
|
||||
clippy:
|
||||
extends: .img-stable
|
||||
stage: 'extras'
|
||||
variables:
|
||||
CLIPPY_LINTS: -D warnings -W unknown-lints
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
script:
|
||||
- cargo clippy --version
|
||||
# Keep features in sync with above
|
||||
- |
|
||||
for crate in gstreamer*; do
|
||||
if [ $crate = "gstreamer" ]; then
|
||||
FEATURES=ser_de,v1_18
|
||||
elif [ $crate = "gstreamer-gl" ]; then
|
||||
FEATURES=egl,x11,wayland,v1_18
|
||||
else
|
||||
FEATURES=v1_18
|
||||
fi
|
||||
|
||||
cargo clippy --color=always --manifest-path $crate/Cargo.toml --features=$FEATURES --all-targets -- -A clippy::redundant_pattern_matching -A clippy::single_match -A clippy::cast_lossless -A clippy::missing_safety_doc -D warnings
|
||||
done
|
||||
# And also run over all the examples/tutorials
|
||||
- |
|
||||
cargo clippy --color=always --manifest-path examples/Cargo.toml --all-targets --all-features -- -A clippy::redundant_pattern_matching -A clippy::single_match -A clippy::cast_lossless -A clippy::missing_safety_doc -D warnings
|
||||
cargo clippy --color=always --manifest-path tutorials/Cargo.toml --all-targets --all-features -- -A clippy::redundant_pattern_matching -A clippy::single_match -A clippy::cast_lossless -A clippy::missing_safety_doc -D warnings
|
||||
- ./ci/run-clippy.sh
|
||||
|
||||
deny:
|
||||
extends: .img-stable
|
||||
stage: 'extras'
|
||||
only:
|
||||
- schedules
|
||||
needs:
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "schedule"
|
||||
script:
|
||||
- cargo deny check
|
||||
- cargo update --color=always
|
||||
- cargo deny --color=always --workspace --all-features check all
|
||||
|
||||
gir-checks:
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
extends: .img-stable
|
||||
stage: 'extras'
|
||||
tags: [ 'placeholder-job' ]
|
||||
needs:
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
script:
|
||||
- git clone --depth 1 https://github.com/gtk-rs/checker
|
||||
- cd checker && echo '[workspace]' >> Cargo.toml
|
||||
- cargo build --release
|
||||
- |
|
||||
cargo run --release -- \
|
||||
--gir-file ../Gir_GstApp.toml ../gstreamer-app \
|
||||
--gir-file ../Gir_GstAudio.toml ../gstreamer-audio/ \
|
||||
--gir-file ../Gir_GstBase.toml ../gstreamer-base \
|
||||
--gir-file ../Gir_GstCheck.toml ../gstreamer-check/ \
|
||||
--gir-file ../Gir_GstEditingServices.toml ../gstreamer-editing-services/ \
|
||||
--gir-file ../Gir_GstGL.toml ../gstreamer-gl/ \
|
||||
--gir-file ../Gir_GstNet.toml ../gstreamer-net/ \
|
||||
--gir-file ../Gir_GstPbutils.toml ../gstreamer-pbutils/ \
|
||||
--gir-file ../Gir_GstPlayer.toml ../gstreamer-player/ \
|
||||
--gir-file ../Gir_GstRtp.toml ../gstreamer-rtp/ \
|
||||
--gir-file ../Gir_GstRtspServer.toml ../gstreamer-rtsp-server/ \
|
||||
--gir-file ../Gir_GstRtsp.toml ../gstreamer-rtsp/ \
|
||||
--gir-file ../Gir_GstSdp.toml ../gstreamer-sdp/ \
|
||||
--gir-file ../Gir_Gst.toml ../gstreamer/ \
|
||||
--gir-file ../Gir_GstVideo.toml ../gstreamer-video/ \
|
||||
--gir-file ../Gir_GstWebRTC.toml ../gstreamer-webrtc/
|
||||
- cd ..
|
||||
- |
|
||||
for crate in gstreamer*; do
|
||||
echo '-->' $crate
|
||||
(cd $crate && ../checker/check_init_asserts)
|
||||
done
|
||||
- git submodule update --checkout
|
||||
- python3 ci/gir-checks.py
|
||||
|
||||
outdated:
|
||||
extends: .img-stable
|
||||
allow_failure: true
|
||||
stage: 'extras'
|
||||
only:
|
||||
- schedules
|
||||
needs:
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
rules:
|
||||
- if: $CI_PIPELINE_SOURCE == "schedule"
|
||||
script:
|
||||
- cargo outdated --root-deps-only --exit-code 1 -v
|
||||
- cargo update --color=always
|
||||
- cargo outdated --color=always --root-deps-only --exit-code 1 -v
|
||||
|
||||
pages:
|
||||
extends: .img-stable
|
||||
stage: 'deploy'
|
||||
coverage:
|
||||
allow_failure: true
|
||||
extends:
|
||||
- '.cargo test'
|
||||
- .img-stable
|
||||
stage: 'extras'
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'build-stable'
|
||||
artifacts: false
|
||||
variables:
|
||||
ALL_FEATURES: 'yes'
|
||||
RUSTFLAGS: "-Cinstrument-coverage"
|
||||
LLVM_PROFILE_FILE: "gstreamer-rs-%p-%m.profraw"
|
||||
script:
|
||||
- *cargo_test
|
||||
# generate html report
|
||||
- grcov . --binary-path ./target/debug/ -s . -t html --branch --ignore-not-existing --ignore "*target*" --ignore "*/sys/*" --ignore "examples/*" --ignore "tutorials/*" --ignore "*/build.rs" -o ./coverage/
|
||||
# generate cobertura report for gitlab integration
|
||||
- grcov . --binary-path ./target/debug/ -s . -t cobertura --branch --ignore-not-existing --ignore "*target*" --ignore "*/sys/*" --ignore "examples/*" --ignore "tutorials/*" --ignore "*/build.rs" -o coverage.xml
|
||||
# output coverage summary for gitlab parsing.
|
||||
# TODO: use grcov once https://github.com/mozilla/grcov/issues/556 is fixed
|
||||
- grep % coverage/index.html | head -1 ; true
|
||||
artifacts:
|
||||
paths:
|
||||
- 'coverage'
|
||||
reports:
|
||||
coverage_report:
|
||||
coverage_format: cobertura
|
||||
path: coverage.xml
|
||||
|
||||
doc-stripping:
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
extends: .img-nightly
|
||||
stage: 'extras'
|
||||
needs:
|
||||
- job: 'build-nightly'
|
||||
artifacts: false
|
||||
script:
|
||||
- git submodule update --checkout
|
||||
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --embed-docs
|
||||
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --strip-docs
|
||||
- git diff --quiet || (echo 'Files changed after running `rustdoc-stripper -s`, make sure all documentation is protected with `// rustdoc-stripper-ignore-next`!'; git diff; false)
|
||||
|
||||
regen-check:
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
extends: .img-nightly
|
||||
stage: 'extras'
|
||||
needs:
|
||||
- job: 'build-nightly'
|
||||
artifacts: false
|
||||
script:
|
||||
- git submodule update --checkout
|
||||
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --yes
|
||||
- git diff --quiet || (echo 'Files changed after running `generator.py`, make sure all submodules and generated files are in the correct version!'; git diff; false)
|
||||
|
||||
docs:
|
||||
variables:
|
||||
GIT_SUBMODULE_STRATEGY: recursive
|
||||
extends: .img-nightly
|
||||
stage: 'extras'
|
||||
needs:
|
||||
- job: 'build-nightly'
|
||||
artifacts: false
|
||||
script:
|
||||
- git submodule update --checkout
|
||||
- curl --proto '=https' --tlsv1.2 -sSf -o gir-rustdoc.py
|
||||
https://gitlab.gnome.org/World/Rust/gir-rustdoc/-/raw/main/gir-rustdoc.py
|
||||
- chmod +x gir-rustdoc.py
|
||||
- PATH=~/.cargo/bin/:$PATH ./generator.py --gir-files-directories gir-files gst-gir-files --embed-docs --no-fmt
|
||||
- |
|
||||
for crate in gstreamer*; do
|
||||
cd $crate
|
||||
cargo doc --features --features=dox,embed-lgpl-docs
|
||||
cd ..
|
||||
done
|
||||
- mv target/doc public/
|
||||
when: 'manual'
|
||||
RUSTDOCFLAGS="$RUST_DOCS_FLAGS"
|
||||
RUSTFLAGS="--cfg docsrs"
|
||||
eval $(./gir-rustdoc.py pre-docs)
|
||||
cargo +nightly doc --workspace --exclude examples --exclude tutorials --all-features --color=always --no-deps
|
||||
- mv target/doc docs
|
||||
artifacts:
|
||||
paths:
|
||||
- 'docs'
|
||||
|
||||
# https://docs.gitlab.com/ee/user/project/pages/#how-it-works
|
||||
# GitLab automatically deploys the `public/` folder from an
|
||||
# artifact generated by the job named `pages`. This step
|
||||
# re-uses the docs from the build-test `docs` step above.
|
||||
pages:
|
||||
extends: .img-nightly
|
||||
stage: 'deploy'
|
||||
needs: [ 'docs' ]
|
||||
interruptible: false
|
||||
script:
|
||||
- curl --proto '=https' --tlsv1.2 -sSf -o gir-rustdoc.py
|
||||
https://gitlab.gnome.org/World/Rust/gir-rustdoc/-/raw/main/gir-rustdoc.py
|
||||
- chmod +x gir-rustdoc.py
|
||||
- ./gir-rustdoc.py html-index
|
||||
# development docs
|
||||
- mkdir public/git
|
||||
- mv docs public/git/docs
|
||||
# stable docs
|
||||
- ./gir-rustdoc.py docs-from-artifacts
|
||||
- ls public/
|
||||
artifacts:
|
||||
paths:
|
||||
- 'public'
|
||||
rules:
|
||||
- if: ($CI_DEFAULT_BRANCH == $CI_COMMIT_BRANCH) && ($CI_PROJECT_NAMESPACE == $NAMESPACE)
|
||||
when: 'manual'
|
||||
|
||||
|
||||
.windows rust docker build:
|
||||
stage: 'container-final'
|
||||
timeout: '2h'
|
||||
needs: []
|
||||
variables:
|
||||
# Unlike the buildah/linux jobs, this file
|
||||
# needs to be relative to windows-docker/ subdir
|
||||
# as it makes life easier in the powershell script
|
||||
#
|
||||
# We also don't need a CONTEXT_DIR var as its also
|
||||
# hardcoded to be windows-docker/
|
||||
DOCKERFILE: 'ci/windows-docker/Dockerfile'
|
||||
tags:
|
||||
- 'windows'
|
||||
- 'shell'
|
||||
- '2022'
|
||||
script:
|
||||
# We need to pass an array and to resolve the env vars, so we can't use a variable:
|
||||
- $DOCKER_BUILD_ARGS = @("--build-arg", "DEFAULT_BRANCH=$GST_UPSTREAM_BRANCH", "--build-arg", "RUST_VERSION=$RUST_VERSION")
|
||||
|
||||
- "& ci/windows-docker/container.ps1 $CI_REGISTRY $CI_REGISTRY_USER $CI_REGISTRY_PASSWORD $RUST_IMAGE $RUST_UPSTREAM_IMAGE $DOCKERFILE"
|
||||
- |
|
||||
if (!($?)) {
|
||||
echo "Failed to build the image"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
windows rust docker stable:
|
||||
extends: '.windows rust docker build'
|
||||
variables:
|
||||
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_STABLE_IMAGE"]
|
||||
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_STABLE_UPSTREAM_IMAGE"]
|
||||
RUST_VERSION: !reference [variables, "GST_RS_STABLE"]
|
||||
|
||||
windows rust docker msrv:
|
||||
extends: '.windows rust docker build'
|
||||
when: 'manual'
|
||||
variables:
|
||||
RUST_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_IMAGE"]
|
||||
RUST_UPSTREAM_IMAGE: !reference [variables, "WINDOWS_RUST_MINIMUM_UPSTREAM_IMAGE"]
|
||||
RUST_VERSION: !reference [variables, "GST_RS_MSRV"]
|
||||
|
||||
.msvc2019 build:
|
||||
stage: 'test'
|
||||
tags:
|
||||
- 'docker'
|
||||
- 'windows'
|
||||
- '2022'
|
||||
script:
|
||||
# Skip -sys tests as they don't work
|
||||
# https://github.com/gtk-rs/gtk3-rs/issues/54
|
||||
#
|
||||
# We need to build each crate separately to avoid crates like -egl,-wayland etc on windows
|
||||
- cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 &&
|
||||
powershell ./ci/run_windows_tests.ps1"
|
||||
|
||||
- |
|
||||
if (!$?) {
|
||||
Write-Host "Tests Failed!"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
test windows msrv:
|
||||
image: $WINDOWS_RUST_MINIMUM_IMAGE
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'windows rust docker msrv'
|
||||
artifacts: false
|
||||
extends: '.msvc2019 build'
|
||||
|
||||
test windows stable:
|
||||
needs:
|
||||
- job: 'trigger'
|
||||
artifacts: false
|
||||
- job: 'windows rust docker stable'
|
||||
artifacts: false
|
||||
image: "$WINDOWS_RUST_STABLE_IMAGE"
|
||||
extends: '.msvc2019 build'
|
||||
|
|
33
.gitlab/issue_templates/Bug.md
Normal file
33
.gitlab/issue_templates/Bug.md
Normal file
|
@ -0,0 +1,33 @@
|
|||
### Describe your issue
|
||||
<!-- a clear and concise summary of the bug. -->
|
||||
<!-- For any GStreamer usage question, please contact the community using the #gstreamer channel on IRC https://www.oftc.net/ or the mailing list on https://gstreamer.freedesktop.org/lists/ -->
|
||||
|
||||
#### Expected Behavior
|
||||
<!-- What did you expect to happen -->
|
||||
|
||||
#### Observed Behavior
|
||||
<!-- What actually happened -->
|
||||
|
||||
#### Setup
|
||||
- **Operating System:**
|
||||
- **Device:** Computer / Tablet / Mobile / Virtual Machine <!-- Delete as appropriate !-->
|
||||
- **gstreamer-rs Version:**
|
||||
- **GStreamer Version:**
|
||||
- **Command line:**
|
||||
|
||||
### Steps to reproduce the bug
|
||||
<!-- please fill in exact steps which reproduce the bug on your system, for example: -->
|
||||
1. open terminal
|
||||
2. type `command`
|
||||
|
||||
### How reproducible is the bug?
|
||||
<!-- The reproducibility of the bug is Always/Intermittent/Only once after doing a very specific set of steps-->
|
||||
|
||||
### Screenshots if relevant
|
||||
|
||||
### Solutions you have tried
|
||||
|
||||
### Related non-duplicate issues
|
||||
|
||||
### Additional Information
|
||||
<!-- Any other information such as logs. Make use of <details> for long output -->
|
9
.gitmodules
vendored
9
.gitmodules
vendored
|
@ -1,3 +1,12 @@
|
|||
[submodule "gir"]
|
||||
path = gir
|
||||
url = https://github.com/gtk-rs/gir
|
||||
update = none
|
||||
[submodule "gir-files"]
|
||||
path = gir-files
|
||||
url = https://github.com/gtk-rs/gir-files
|
||||
update = none
|
||||
[submodule "gst-gir-files"]
|
||||
path = gst-gir-files
|
||||
url = https://gitlab.freedesktop.org/gstreamer/gir-files-rs.git
|
||||
update = none
|
||||
|
|
23
COPYRIGHT
Normal file
23
COPYRIGHT
Normal file
|
@ -0,0 +1,23 @@
|
|||
The gstreamer-rs project is dual-licensed under Apache 2.0 and MIT terms, with
|
||||
the exception of the sys crates which are licensed only under the terms of the
|
||||
MIT license.
|
||||
|
||||
Copyrights in the gstreamer-rs project are retained by their contributors. No
|
||||
copyright assignment is required to contribute to the gstreamer-rs project.
|
||||
|
||||
Some files include explicit copyright notices and/or license notices. For full
|
||||
authorship information, see the version control history.
|
||||
|
||||
Except as otherwise noted (below and/or in individual files), gstreamer-rs is
|
||||
licensed under the Apache License, Version 2.0 <LICENSE-APACHE> or
|
||||
<http://www.apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT>
|
||||
or <http://opensource.org/licenses/MIT>, at your option.
|
||||
|
||||
All the sys crates (e.g. gstreamer/sys and gstreamer-base/sys) are licensed
|
||||
only under the terms of the MIT license.
|
||||
|
||||
This project provides interoperability with various GStreamer libraries but
|
||||
doesn't distribute any parts of them. Distributing compiled libraries and
|
||||
executables that link to those libraries may be subject to terms of the GNU
|
||||
LGPL or other licenses. For more information check the license of each
|
||||
GStreamer library.
|
3067
Cargo.lock
generated
Normal file
3067
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
158
Cargo.toml
158
Cargo.toml
|
@ -1,25 +1,169 @@
|
|||
[workspace]
|
||||
resolver = "2"
|
||||
|
||||
members = [
|
||||
default-members = [
|
||||
"gstreamer/sys",
|
||||
"gstreamer-analytics/sys",
|
||||
"gstreamer-app/sys",
|
||||
"gstreamer-audio/sys",
|
||||
"gstreamer-base/sys",
|
||||
"gstreamer-check/sys",
|
||||
"gstreamer-controller/sys",
|
||||
"gstreamer-editing-services/sys",
|
||||
"gstreamer-mpegts/sys",
|
||||
"gstreamer-net/sys",
|
||||
"gstreamer-pbutils/sys",
|
||||
"gstreamer-play/sys",
|
||||
"gstreamer-player/sys",
|
||||
"gstreamer-rtp/sys",
|
||||
"gstreamer-rtsp/sys",
|
||||
"gstreamer-rtsp-server/sys",
|
||||
"gstreamer-sdp/sys",
|
||||
"gstreamer-tag/sys",
|
||||
"gstreamer-video/sys",
|
||||
"gstreamer-webrtc/sys",
|
||||
"gstreamer",
|
||||
"gstreamer-analytics",
|
||||
"gstreamer-app",
|
||||
"gstreamer-audio",
|
||||
"gstreamer-base",
|
||||
"gstreamer-check",
|
||||
"gstreamer-controller",
|
||||
"gstreamer-editing-services",
|
||||
"gstreamer-mpegts",
|
||||
"gstreamer-net",
|
||||
"gstreamer-pbutils",
|
||||
"gstreamer-play",
|
||||
"gstreamer-player",
|
||||
"gstreamer-rtp",
|
||||
"gstreamer-rtsp",
|
||||
"gstreamer-rtsp-server",
|
||||
"gstreamer-sdp",
|
||||
"gstreamer-tag",
|
||||
"gstreamer-validate",
|
||||
"gstreamer-video",
|
||||
"gstreamer-pbutils",
|
||||
"gstreamer-webrtc",
|
||||
"gstreamer-check",
|
||||
"gstreamer-editing-services",
|
||||
"gstreamer-gl",
|
||||
"gstreamer-rtp",
|
||||
"examples",
|
||||
"tutorials",
|
||||
"docs",
|
||||
]
|
||||
|
||||
members = [
|
||||
"gstreamer/sys",
|
||||
"gstreamer-analytics/sys",
|
||||
"gstreamer-app/sys",
|
||||
"gstreamer-audio/sys",
|
||||
"gstreamer-base/sys",
|
||||
"gstreamer-check/sys",
|
||||
"gstreamer-controller/sys",
|
||||
"gstreamer-editing-services/sys",
|
||||
"gstreamer-gl/sys",
|
||||
"gstreamer-gl/egl/sys",
|
||||
"gstreamer-gl/wayland/sys",
|
||||
"gstreamer-gl/x11/sys",
|
||||
"gstreamer-mpegts/sys",
|
||||
"gstreamer-net/sys",
|
||||
"gstreamer-pbutils/sys",
|
||||
"gstreamer-play/sys",
|
||||
"gstreamer-player/sys",
|
||||
"gstreamer-rtp/sys",
|
||||
"gstreamer-rtsp/sys",
|
||||
"gstreamer-rtsp-server/sys",
|
||||
"gstreamer-sdp/sys",
|
||||
"gstreamer-tag/sys",
|
||||
"gstreamer-video/sys",
|
||||
"gstreamer-webrtc/sys",
|
||||
"gstreamer-allocators/sys",
|
||||
"gstreamer",
|
||||
"gstreamer-analytics",
|
||||
"gstreamer-app",
|
||||
"gstreamer-audio",
|
||||
"gstreamer-base",
|
||||
"gstreamer-check",
|
||||
"gstreamer-controller",
|
||||
"gstreamer-editing-services",
|
||||
"gstreamer-gl",
|
||||
"gstreamer-gl/egl",
|
||||
"gstreamer-gl/wayland",
|
||||
"gstreamer-gl/x11",
|
||||
"gstreamer-mpegts",
|
||||
"gstreamer-net",
|
||||
"gstreamer-pbutils",
|
||||
"gstreamer-play",
|
||||
"gstreamer-player",
|
||||
"gstreamer-rtp",
|
||||
"gstreamer-rtsp",
|
||||
"gstreamer-rtsp-server",
|
||||
"gstreamer-sdp",
|
||||
"gstreamer-tag",
|
||||
"gstreamer-validate",
|
||||
"gstreamer-video",
|
||||
"gstreamer-webrtc",
|
||||
"gstreamer-allocators",
|
||||
"gstreamer-utils",
|
||||
"examples",
|
||||
"tutorials",
|
||||
]
|
||||
|
||||
exclude = ["gir"]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.23.0"
|
||||
categories = ["api-bindings", "multimedia"]
|
||||
repository = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs"
|
||||
homepage = "https://gstreamer.freedesktop.org"
|
||||
edition = "2021"
|
||||
rust-version = "1.70"
|
||||
|
||||
[workspace.dependencies]
|
||||
gio = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
gio-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
glib = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
glib-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
gobject-sys = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
cairo-rs = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
pango = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
pangocairo = { git = "https://github.com/gtk-rs/gtk-rs-core", branch = "master" }
|
||||
gstreamer-allocators-sys = { path = "gstreamer-allocators/sys" }
|
||||
gstreamer-analytics-sys = { path = "gstreamer-analytics/sys" }
|
||||
gstreamer-app-sys = { path = "gstreamer-app/sys" }
|
||||
gstreamer-audio-sys = { path = "./gstreamer-audio/sys"}
|
||||
gstreamer-base-sys = { path = "./gstreamer-base/sys"}
|
||||
gstreamer-check-sys = { path = "./gstreamer-check/sys" }
|
||||
gstreamer-controller-sys = { path = "./gstreamer-controller/sys" }
|
||||
gstreamer-editing-services-sys = { path = "./gstreamer-editing-services/sys"}
|
||||
gstreamer-gl-egl-sys = { path = "./gstreamer-gl/egl/sys"}
|
||||
gstreamer-gl-wayland-sys = { path = "./gstreamer-gl/wayland/sys"}
|
||||
gstreamer-gl-x11-sys = { path = "./gstreamer-gl/x11/sys"}
|
||||
gstreamer-gl-sys = { path = "./gstreamer-gl/sys"}
|
||||
gstreamer-mpegts-sys = { path = "./gstreamer-mpegts/sys"}
|
||||
gstreamer-net-sys = { path = "./gstreamer-net/sys"}
|
||||
gstreamer-pbutils-sys = { path = "./gstreamer-pbutils/sys"}
|
||||
gstreamer-play-sys = { path = "./gstreamer-play/sys" }
|
||||
gstreamer-player-sys = { path = "./gstreamer-player/sys" }
|
||||
gstreamer-rtp-sys = { path = "./gstreamer-rtp/sys" }
|
||||
gstreamer-rtsp-sys = { path = "./gstreamer-rtsp/sys"}
|
||||
gstreamer-rtsp-server-sys = { path = "./gstreamer-rtsp-server/sys" }
|
||||
gstreamer-sdp-sys = { path = "./gstreamer-sdp/sys"}
|
||||
gstreamer-tag-sys = { path = "./gstreamer-tag/sys" }
|
||||
gstreamer-sys = { path = "./gstreamer/sys"}
|
||||
gstreamer-validate-sys = { path = "./gstreamer-validate/sys" }
|
||||
gstreamer-video-sys = { path = "./gstreamer-video/sys"}
|
||||
gstreamer-webrtc-sys = { path = "./gstreamer-webrtc/sys" }
|
||||
ges = { package = "gstreamer-editing-services", path = "./gstreamer-editing-services" }
|
||||
gst = { package = "gstreamer", path = "./gstreamer" }
|
||||
gst-allocators = { package = "gstreamer-allocators", path = "./gstreamer-allocators" }
|
||||
gst-app = { package = "gstreamer-app", path = "./gstreamer-app" }
|
||||
gst-audio = { package = "gstreamer-audio", path = "./gstreamer-audio" }
|
||||
gst-base = { package = "gstreamer-base", path = "./gstreamer-base" }
|
||||
gst-check = { package = "gstreamer-check", path = "./gstreamer-check" }
|
||||
gst-gl = { package = "gstreamer-gl", path = "./gstreamer-gl" }
|
||||
gst-gl-egl = { package = "gstreamer-gl-egl", path = "./gstreamer-gl/egl" }
|
||||
gst-gl-x11 = { package = "gstreamer-gl-x11", path = "./gstreamer-gl/x11" }
|
||||
gst-net = { package = "gstreamer-net", path = "./gstreamer-net" }
|
||||
gst-pbutils = { package = "gstreamer-pbutils", path = "./gstreamer-pbutils" }
|
||||
gst-play = { package = "gstreamer-play", path = "./gstreamer-play" }
|
||||
gst-player = { package = "gstreamer-player", path = "./gstreamer-player" }
|
||||
gst-rtsp = { package = "gstreamer-rtsp", path = "./gstreamer-rtsp" }
|
||||
gst-rtsp-server = { package = "gstreamer-rtsp-server", path = "./gstreamer-rtsp-server" }
|
||||
gst-sdp = { package = "gstreamer-sdp", path = "./gstreamer-sdp" }
|
||||
gst-video = { package = "gstreamer-video", path = "./gstreamer-video" }
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
# How to update the bindings
|
||||
|
||||
* Make sure gstreamer-rs-sys is up to date
|
||||
* Take the updated .gir files from gstreamer-rs-sys and copy them over
|
||||
* Take the updated .gir files (e.g. from your gst-build checkout) and put
|
||||
them in the gir-files directory
|
||||
* In the gir-files directory, run ./fix.sh
|
||||
* If there is a new GStreamer version: Manually update `gst*/Cargo.toml`
|
||||
* Run generator.py
|
||||
* Investigate the diff, fix any mess-ups, look at commented functions and
|
||||
|
|
|
@ -1,255 +0,0 @@
|
|||
[options]
|
||||
girs_dir = "gir-files"
|
||||
library = "GstAudio"
|
||||
version = "1.0"
|
||||
min_cfg_version = "1.8"
|
||||
target_path = "gstreamer-audio"
|
||||
work_mode = "normal"
|
||||
concurrency = "send+sync"
|
||||
generate_safety_asserts = true
|
||||
single_version_file = true
|
||||
generate_display_trait = false
|
||||
|
||||
external_libraries = [
|
||||
"GLib",
|
||||
"GObject",
|
||||
"Gst",
|
||||
"GstBase",
|
||||
]
|
||||
|
||||
generate = [
|
||||
"GstAudio.AudioFormatFlags",
|
||||
"GstAudio.AudioLayout",
|
||||
"GstAudio.AudioChannelPosition",
|
||||
"GstAudio.StreamVolume",
|
||||
"GstAudio.StreamVolumeFormat",
|
||||
"GstAudio.AudioSink",
|
||||
"GstAudio.AudioSrc",
|
||||
"GstAudio.AudioBaseSink",
|
||||
"GstAudio.AudioBaseSrc",
|
||||
]
|
||||
|
||||
manual = [
|
||||
"GObject.Object",
|
||||
"Gst.Object",
|
||||
"Gst.Element",
|
||||
"Gst.Allocator",
|
||||
"Gst.AllocationParams",
|
||||
"Gst.TagList",
|
||||
"Gst.TagMergeMode",
|
||||
"GstBase.BaseSink",
|
||||
"GstBase.BaseSrc",
|
||||
"GstAudio.AudioInfo",
|
||||
"GstAudio.AudioFormatInfo",
|
||||
]
|
||||
|
||||
[[object]]
|
||||
name = "Gst.Caps"
|
||||
status = "manual"
|
||||
ref_mode = "ref"
|
||||
|
||||
[[object]]
|
||||
name = "Gst.Buffer"
|
||||
status = "manual"
|
||||
ref_mode = "ref"
|
||||
|
||||
[[object]]
|
||||
name = "Gst.ClockTime"
|
||||
status = "manual"
|
||||
conversion_type = "scalar"
|
||||
|
||||
[[object]]
|
||||
name = "GstAudio.AudioFormat"
|
||||
status = "generate"
|
||||
|
||||
[[object.derive]]
|
||||
name = "Debug, Eq, PartialEq, Hash"
|
||||
|
||||
[[object.member]]
|
||||
name = "s16"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "u16"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "s24_32"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "u24_32"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "s32"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "u32"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "s24"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "u24"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "s20"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "u20"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "s18"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "u18"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "f32"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object.member]]
|
||||
name = "f64"
|
||||
# Platform dependant
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstAudio.AudioStreamAlign"
|
||||
status = "generate"
|
||||
|
||||
[[object.function]]
|
||||
name = "process"
|
||||
# bool does not signal error
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
pattern = "get_.*"
|
||||
[[object.function.parameter]]
|
||||
name = "align"
|
||||
const = true
|
||||
|
||||
[[object]]
|
||||
name = "GstAudio.AudioDecoder"
|
||||
status = "generate"
|
||||
manual_traits = ["AudioDecoderExtManual"]
|
||||
|
||||
[[object.function]]
|
||||
name = "finish_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "finish_subframe"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "negotiate"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_output_caps"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_output_format"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_allocator"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "proxy_getcaps"
|
||||
[object.function.return]
|
||||
nullable = false
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_buffer"
|
||||
[object.function.return]
|
||||
nullable_return_is_error = "Failed to allocate output buffer"
|
||||
|
||||
[[object]]
|
||||
name = "GstAudio.AudioEncoder"
|
||||
status = "generate"
|
||||
manual_traits = ["AudioEncoderExtManual"]
|
||||
|
||||
[[object.function]]
|
||||
name = "finish_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "negotiate"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_output_format"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_allocator"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_latency"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "proxy_getcaps"
|
||||
[object.function.return]
|
||||
nullable = false
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_buffer"
|
||||
[object.function.return]
|
||||
nullable_return_is_error = "Failed to allocate output buffer"
|
||||
|
||||
[[object]]
|
||||
name = "GstAudio.AudioRingBufferFormatType"
|
||||
status = "generate"
|
||||
|
||||
[[object.member]]
|
||||
name = "mpeg2_aac_raw"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "mpeg4_aac_raw"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "flac"
|
||||
version = "1.12"
|
||||
|
||||
[[object]]
|
||||
name = "GstAudio.AudioFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstAudio.AudioPackFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
|
@ -1,39 +0,0 @@
|
|||
[options]
|
||||
girs_dir = "gir-files"
|
||||
library = "GstCheck"
|
||||
version = "1.0"
|
||||
min_cfg_version = "1.8"
|
||||
target_path = "gstreamer-check"
|
||||
work_mode = "normal"
|
||||
concurrency = "send+sync"
|
||||
generate_safety_asserts = true
|
||||
single_version_file = true
|
||||
generate_display_trait = false
|
||||
|
||||
external_libraries = [
|
||||
"GLib",
|
||||
"GObject",
|
||||
"Gst",
|
||||
]
|
||||
|
||||
generate = [
|
||||
]
|
||||
|
||||
manual = [
|
||||
"GObject.Object",
|
||||
"Gst.Object",
|
||||
"Gst.Clock",
|
||||
"Gst.ClockTimeDiff",
|
||||
"Gst.ClockType",
|
||||
"GstCheck.Harness",
|
||||
]
|
||||
|
||||
[[object]]
|
||||
name = "Gst.ClockTime"
|
||||
status = "manual"
|
||||
conversion_type = "scalar"
|
||||
|
||||
[[object]]
|
||||
name = "GstCheck.TestClock"
|
||||
status = "generate"
|
||||
final_type = true
|
|
@ -1,64 +0,0 @@
|
|||
[options]
|
||||
girs_dir = "gir-files"
|
||||
library = "GstRtp"
|
||||
version = "1.0"
|
||||
min_cfg_version = "1.8"
|
||||
target_path = "gstreamer-rtp"
|
||||
work_mode = "normal"
|
||||
concurrency = "send+sync"
|
||||
generate_safety_asserts = true
|
||||
single_version_file = true
|
||||
doc_target_path = "docs/gstreamer-rtp/docs.md"
|
||||
generate_display_trait = false
|
||||
|
||||
external_libraries = [
|
||||
"GLib",
|
||||
"GObject",
|
||||
"Gst",
|
||||
]
|
||||
|
||||
generate = [
|
||||
"GstRtp.RTCPFBType",
|
||||
"GstRtp.RTCPSDESType",
|
||||
"GstRtp.RTCPType",
|
||||
"GstRtp.RTCPXRType",
|
||||
"GstRtp.RTPPayload",
|
||||
"GstRtp.RTPProfile",
|
||||
]
|
||||
|
||||
[[object]]
|
||||
name = "Gst.Buffer"
|
||||
status = "manual"
|
||||
ref_mode = "ref"
|
||||
|
||||
[[object]]
|
||||
name = "GstRtp.*"
|
||||
status = "generate"
|
||||
# expected enum `std::ffi::c_void`, found u8
|
||||
[[object.function]]
|
||||
name = "rtp_hdrext_get_ntp_56"
|
||||
ignore = true
|
||||
|
||||
# expected enum `std::ffi::c_void`, found u8
|
||||
[[object.function]]
|
||||
name = "rtp_hdrext_get_ntp_64"
|
||||
ignore = true
|
||||
|
||||
# manual bindings are needed for GstMeta
|
||||
[[object.function]]
|
||||
name = "rtp_source_meta_api_get_type"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstRtp.RTPBufferFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "last"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstRtp.RTPBufferMapFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "last"
|
||||
ignore = true
|
|
@ -1,510 +0,0 @@
|
|||
[options]
|
||||
girs_dir = "gir-files"
|
||||
library = "GstVideo"
|
||||
version = "1.0"
|
||||
min_cfg_version = "1.8"
|
||||
target_path = "gstreamer-video"
|
||||
work_mode = "normal"
|
||||
concurrency = "send+sync"
|
||||
generate_safety_asserts = true
|
||||
single_version_file = true
|
||||
generate_display_trait = false
|
||||
|
||||
external_libraries = [
|
||||
"GLib",
|
||||
"GObject",
|
||||
"Gst",
|
||||
"GstBase",
|
||||
]
|
||||
|
||||
generate = [
|
||||
"GstVideo.VideoCodecFrameFlags",
|
||||
"GstVideo.VideoFormatFlags",
|
||||
"GstVideo.VideoTileMode",
|
||||
"GstVideo.VideoColorMatrix",
|
||||
"GstVideo.VideoMultiviewMode",
|
||||
"GstVideo.VideoFieldOrder",
|
||||
"GstVideo.VideoMultiviewFramePacking",
|
||||
"GstVideo.VideoFilter",
|
||||
"GstVideo.VideoCaptionType",
|
||||
"GstVideo.VideoBufferPool",
|
||||
"GstVideo.VideoAlphaMode",
|
||||
"GstVideo.VideoChromaMode",
|
||||
"GstVideo.VideoMatrixMode",
|
||||
"GstVideo.VideoGammaMode",
|
||||
"GstVideo.VideoPrimariesMode",
|
||||
"GstVideo.VideoResamplerMethod",
|
||||
"GstVideo.VideoDitherMethod",
|
||||
"GstVideo.VideoAFDValue",
|
||||
"GstVideo.VideoAFDSpec",
|
||||
]
|
||||
|
||||
manual = [
|
||||
"GLib.DateTime",
|
||||
"GObject.Object",
|
||||
"Gst.Object",
|
||||
"Gst.Element",
|
||||
"Gst.Buffer",
|
||||
"Gst.BufferPool",
|
||||
"Gst.BufferPoolAcquireParams",
|
||||
"Gst.Allocator",
|
||||
"Gst.AllocationParams",
|
||||
"Gst.ClockTimeDiff",
|
||||
"Gst.FlowReturn",
|
||||
"Gst.TagList",
|
||||
"Gst.TagMergeMode",
|
||||
"GstBase.BaseSink",
|
||||
"GstBase.BaseTransform",
|
||||
"GstVideo.VideoCodecState",
|
||||
"GstVideo.VideoCodecFrame",
|
||||
"GstVideo.VideoInfo",
|
||||
"GstVideo.VideoFormatInfo",
|
||||
"GstVideo.VideoColorimetry",
|
||||
"GstVideo.VideoColorRange",
|
||||
"GstVideo.VideoFrame",
|
||||
"GstVideo.VideoTimeCode",
|
||||
"GstVideo.VideoTimeCodeInterval",
|
||||
]
|
||||
|
||||
[[object]]
|
||||
name = "Gst.ClockTime"
|
||||
status = "manual"
|
||||
conversion_type = "scalar"
|
||||
|
||||
[[object]]
|
||||
name = "Gst.Caps"
|
||||
status = "manual"
|
||||
ref_mode = "ref"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoOverlay"
|
||||
status = "generate"
|
||||
manual_traits = ["VideoOverlayExtManual"]
|
||||
|
||||
[[object.function]]
|
||||
name = "set_render_rectangle"
|
||||
[object.function.return]
|
||||
bool_return_is_error = "Failed to set render rectangle"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoDecoder"
|
||||
status = "generate"
|
||||
manual_traits = ["VideoDecoderExtManual"]
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_frame_with_params"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "finish_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "release_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "drop_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "have_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_latency"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_latency"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_frames"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_oldest_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_output_state"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_output_state"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_interlaced_output_state"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "negotiate"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_allocator"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "proxy_getcaps"
|
||||
[object.function.return]
|
||||
nullable = false
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_buffer"
|
||||
[object.function.return]
|
||||
nullable_return_is_error = "Failed to allocate output buffer"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoEncoder"
|
||||
status = "generate"
|
||||
manual_traits = ["VideoEncoderExtManual"]
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_frame_with_params"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "finish_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "finish_subframe"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_latency"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_latency"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_frames"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_oldest_frame"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_output_state"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "set_output_state"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "negotiate"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "get_allocator"
|
||||
ignore = true
|
||||
|
||||
[[object.function]]
|
||||
name = "proxy_getcaps"
|
||||
[object.function.return]
|
||||
nullable = false
|
||||
|
||||
[[object.function]]
|
||||
name = "allocate_output_buffer"
|
||||
[object.function.return]
|
||||
nullable_return_is_error = "Failed to allocate output buffer"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoFormat"
|
||||
status = "generate"
|
||||
|
||||
[[object.derive]]
|
||||
name = "Debug, Eq, PartialEq, Hash"
|
||||
|
||||
[[object.member]]
|
||||
name = "p010_10be"
|
||||
version = "1.10"
|
||||
|
||||
[[object.member]]
|
||||
name = "p010_10le"
|
||||
version = "1.10"
|
||||
|
||||
[[object.member]]
|
||||
name = "iyu2"
|
||||
version = "1.10"
|
||||
|
||||
[[object.member]]
|
||||
name = "vyuy"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gbra"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gbra_10be"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gbra_10le"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gbr_12be"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gbr_12le"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gbra_12be"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gbra_12le"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "i420_12be"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "i420_12le"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "i422_12be"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "i422_12le"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "y444_12be"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "y444_12le"
|
||||
version = "1.12"
|
||||
|
||||
[[object.member]]
|
||||
name = "gray10_le32"
|
||||
version = "1.14"
|
||||
|
||||
[[object.member]]
|
||||
name = "nv12_10le32"
|
||||
version = "1.14"
|
||||
|
||||
[[object.member]]
|
||||
name = "nv16_10le32"
|
||||
version = "1.14"
|
||||
|
||||
[[object.member]]
|
||||
name = "nv12_10le40"
|
||||
version = "1.16"
|
||||
|
||||
[[object.member]]
|
||||
name = "y210"
|
||||
version = "1.16"
|
||||
|
||||
[[object.member]]
|
||||
name = "y410"
|
||||
version = "1.16"
|
||||
|
||||
[[object.member]]
|
||||
name = "vuya"
|
||||
version = "1.16"
|
||||
|
||||
[[object.member]]
|
||||
name = "bgr10a2_le"
|
||||
version = "1.16"
|
||||
|
||||
[[object.member]]
|
||||
name = "rgb10a2_le"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "y444_16be"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "y444_16le"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "p016_be"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "p016_le"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "p012_be"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "p012_le"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "y212_be"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "y212_le"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "y412_be"
|
||||
version = "1.18"
|
||||
|
||||
[[object.member]]
|
||||
name = "y412_le"
|
||||
version = "1.18"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoSink"
|
||||
status = "generate"
|
||||
|
||||
[[object.function]]
|
||||
name = "center_rect"
|
||||
# Implemented in video_rectangle
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoOverlayFormatFlags"
|
||||
status = "generate"
|
||||
[[object.function]]
|
||||
name = "get_type"
|
||||
version = "1.16"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoTimeCodeFlags"
|
||||
status = "generate"
|
||||
[[object.function]]
|
||||
name = "get_type"
|
||||
version = "1.18"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoFrameFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "top_field"
|
||||
version = "1.16"
|
||||
[[object.member]]
|
||||
name = "bottom_field"
|
||||
version = "1.16"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoBufferFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "top_field"
|
||||
version = "1.16"
|
||||
[[object.member]]
|
||||
name = "bottom_field"
|
||||
version = "1.16"
|
||||
[[object.member]]
|
||||
name = "marker"
|
||||
version = "1.18"
|
||||
[[object.member]]
|
||||
name = "last"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoInterlaceMode"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "alternate"
|
||||
version = "1.16"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoChromaSite"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "unknown"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoFrameFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoMultiviewFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoPackFlags"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "none"
|
||||
ignore = true
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoTransferFunction"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "bt2020_10"
|
||||
version = "1.18"
|
||||
[[object.member]]
|
||||
name = "smpte2084"
|
||||
version = "1.18"
|
||||
[[object.member]]
|
||||
name = "arib_std_b67"
|
||||
version = "1.18"
|
||||
|
||||
[[object]]
|
||||
name = "GstVideo.VideoColorPrimaries"
|
||||
status = "generate"
|
||||
[[object.member]]
|
||||
name = "smptest428"
|
||||
version = "1.16"
|
||||
[[object.member]]
|
||||
name = "smpterp431"
|
||||
version = "1.16"
|
||||
[[object.member]]
|
||||
name = "smpteeg432"
|
||||
version = "1.16"
|
||||
[[object.member]]
|
||||
name = "ebu3213"
|
||||
version = "1.16"
|
118
README.md
118
README.md
|
@ -1,7 +1,7 @@
|
|||
# gstreamer-rs [![crates.io](https://img.shields.io/crates/v/gstreamer.svg)](https://crates.io/crates/gstreamer) [![pipeline status](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/badges/master/pipeline.svg)](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/commits/master)
|
||||
# gstreamer-rs [![crates.io](https://img.shields.io/crates/v/gstreamer.svg)](https://crates.io/crates/gstreamer) [![pipeline status](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/badges/main/pipeline.svg)](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/commits/main)
|
||||
|
||||
[GStreamer](https://gstreamer.freedesktop.org/) bindings for Rust.
|
||||
Documentation can be found [here](https://slomo.pages.freedesktop.org/rustdocs/gstreamer/gstreamer/).
|
||||
Documentation can be found [here](https://gstreamer.pages.freedesktop.org/gstreamer-rs/stable/latest/docs/gstreamer/).
|
||||
|
||||
These bindings are providing a safe API that can be used to interface with
|
||||
GStreamer, e.g. for writing GStreamer-based applications and GStreamer plugins.
|
||||
|
@ -24,7 +24,7 @@ API metadata provided by the GStreamer project.
|
|||
## Installation
|
||||
|
||||
To build the GStreamer bindings or anything depending on them, you need to
|
||||
have at least GStreamer 1.8 and gst-plugins-base 1.8 installed. In addition,
|
||||
have at least GStreamer 1.14 and gst-plugins-base 1.14 installed. In addition,
|
||||
some of the examples/tutorials require various GStreamer plugins to be
|
||||
available, which can be found in gst-plugins-base, gst-plugins-good,
|
||||
gst-plugins-bad, gst-plugins-ugly and/or gst-libav.
|
||||
|
@ -38,23 +38,20 @@ package manager, or build them from source.
|
|||
|
||||
On Debian/Ubuntu they can be installed with
|
||||
|
||||
```
|
||||
```console
|
||||
$ apt-get install libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
|
||||
gstreamer1.0-plugins-base gstreamer1.0-plugins-good \
|
||||
gstreamer1.0-plugins-bad gstreamer1.0-plugins-ugly \
|
||||
gstreamer1.0-libav libgstrtspserver-1.0-dev libges-1.0-dev
|
||||
```
|
||||
|
||||
The minimum required version of the above libraries is >= 1.8. If you
|
||||
The minimum required version of the above libraries is >= 1.14. If you
|
||||
build the gstreamer-player sub-crate, or any of the examples that
|
||||
depend on gstreamer-player, you must ensure that in addition to the
|
||||
above packages, `libgstreamer-plugins-bad1.0-dev` is installed and
|
||||
that the version is >= 1.12. See the `Cargo.toml` files for the full
|
||||
details,
|
||||
depend on gstreamer-player, you must ensure that in addition to the above
|
||||
packages, `libgstreamer-plugins-bad1.0-dev` is installed. See the `Cargo.toml`
|
||||
files for the full details,
|
||||
|
||||
```
|
||||
# Only if you wish to install gstreamer-player, make sure the version
|
||||
# of this package is >= 1.12.
|
||||
```console
|
||||
$ apt-get install libgstreamer-plugins-bad1.0-dev
|
||||
```
|
||||
|
||||
|
@ -69,31 +66,41 @@ You can install GStreamer and the plugins via [Homebrew](https://brew.sh/) or
|
|||
by installing the [binaries](https://gstreamer.freedesktop.org/data/pkg/osx/)
|
||||
provided by the GStreamer project.
|
||||
|
||||
#### Homebrew
|
||||
|
||||
```
|
||||
$ brew install gstreamer gst-plugins-base gst-plugins-good \
|
||||
gst-plugins-bad gst-plugins-ugly gst-libav gst-rtsp-server \
|
||||
gst-editing-services
|
||||
```
|
||||
|
||||
If you wish to install the gstreamer-player sub-crate, make sure the
|
||||
version of these libraries is >= 1.12. Otherwise, a version >= 1.8 is
|
||||
sufficient.
|
||||
We recommend using the official GStreamer binaries over Homebrew, especially
|
||||
as GStreamer in Homebrew is [currently broken](https://github.com/orgs/Homebrew/discussions/3740#discussioncomment-3804964).
|
||||
|
||||
#### GStreamer Binaries
|
||||
|
||||
You need to download the *two* `.pkg` files from the GStreamer website and
|
||||
install them, e.g. `gstreamer-1.0-1.12.3-x86_64.pkg` and
|
||||
`gstreamer-1.0-devel-1.12.3-x86_64.pkg`.
|
||||
install them, e.g. `gstreamer-1.0-1.20.4-universal.pkg` and
|
||||
`gstreamer-1.0-devel-1.20.4-universal.pkg`.
|
||||
|
||||
After installation, you also need to install `pkg-config` (e.g. via Homebrew)
|
||||
and set the `PKG_CONFIG_PATH` environment variable
|
||||
After installation, you also need to set the `PATH` environment variable as
|
||||
follows
|
||||
|
||||
```console
|
||||
$ export PATH="/Library/Frameworks/GStreamer.framework/Versions/1.0/bin${PATH:+:$PATH}"
|
||||
```
|
||||
$ export PKG_CONFIG_PATH="/Library/Frameworks/GStreamer.framework/Versions/Current/lib/pkgconfig${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}"
|
||||
|
||||
Also note that the `pkg-config` from GStreamer should be the first one in
|
||||
the `PATH` as other versions have all kinds of quirks that will cause
|
||||
problems.
|
||||
|
||||
#### Homebrew
|
||||
|
||||
Homebrew only installs various plugins if explicitly enabled, so some extra
|
||||
`--with-*` flags may be required.
|
||||
|
||||
```console
|
||||
$ brew install gstreamer gst-plugins-base gst-plugins-good \
|
||||
gst-plugins-bad gst-plugins-ugly gst-libav gst-rtsp-server \
|
||||
gst-editing-services --with-orc --with-libogg --with-opus \
|
||||
--with-pango --with-theora --with-libvorbis --with-libvpx \
|
||||
--enable-gtk3
|
||||
```
|
||||
|
||||
Make sure the version of these libraries is >= 1.14.
|
||||
|
||||
<a name="installation-windows"/>
|
||||
|
||||
### Windows
|
||||
|
@ -103,44 +110,55 @@ with `pacman` or by installing the
|
|||
[binaries](https://gstreamer.freedesktop.org/data/pkg/windows/) provided by
|
||||
the GStreamer project.
|
||||
|
||||
We recommend using the official GStreamer binaries over MSYS2.
|
||||
|
||||
#### GStreamer Binaries
|
||||
|
||||
You need to download the *two* `.msi` files for your platform from the
|
||||
GStreamer website and install them, e.g. `gstreamer-1.0-x86_64-1.20.4.msi` and
|
||||
`gstreamer-1.0-devel-x86_64-1.20.4.msi`. Make sure to select the version that
|
||||
matches your Rust toolchain, i.e. MinGW or MSVC.
|
||||
|
||||
After installation set the ``PATH` environment variable as follows:
|
||||
|
||||
```console
|
||||
# For a UNIX-style shell:
|
||||
$ export PATH="c:/gstreamer/1.0/msvc_x86_64/bin${PATH:+:$PATH}"
|
||||
|
||||
# For cmd.exe:
|
||||
$ set PATH=C:\gstreamer\1.0\msvc_x86_64\bin;%PATH%
|
||||
```
|
||||
|
||||
Make sure to update the path to where you have actually installed GStreamer
|
||||
and for the corresponding toolchain.
|
||||
|
||||
Also note that the `pkg-config.exe` from GStreamer should be the first one in
|
||||
the `PATH` as other versions have all kinds of quirks that will cause
|
||||
problems.
|
||||
|
||||
#### MSYS2 / pacman
|
||||
|
||||
```
|
||||
$ pacman -S pkg-config mingw-w64-x86_64-gstreamer mingw-w64-x86_64-gst-plugins-base \
|
||||
```console
|
||||
$ pacman -S glib2-devel pkg-config \
|
||||
mingw-w64-x86_64-gstreamer mingw-w64-x86_64-gst-plugins-base \
|
||||
mingw-w64-x86_64-gst-plugins-good mingw-w64-x86_64-gst-plugins-bad \
|
||||
mingw-w64-x86_64-gst-plugins-ugly mingw-w64-x86_64-gst-libav \
|
||||
mingw-w64-x86_64-gst-rtsp-server
|
||||
```
|
||||
|
||||
If you wish to install the gstreamer-player sub-crate, make sure the
|
||||
version of these libraries is >= 1.12. Otherwise, a version >= 1.8 is
|
||||
sufficient.
|
||||
Make sure the version of these libraries is >= 1.14.
|
||||
|
||||
Note that the version of `pkg-config` included in `MSYS2` is
|
||||
[known to have problems](https://github.com/rust-lang/pkg-config-rs/issues/51#issuecomment-346300858)
|
||||
compiling GStreamer, so you may need to install another version. One option
|
||||
would be [`pkg-config-lite`](https://sourceforge.net/projects/pkgconfiglite/).
|
||||
|
||||
#### GStreamer Binaries
|
||||
|
||||
You need to download the *two* `.msi` files for your platform from the
|
||||
GStreamer website and install them, e.g. `gstreamer-1.0-x86_64-1.12.3.msi` and
|
||||
`gstreamer-1.0-devel-x86_64-1.12.3.msi`.
|
||||
|
||||
After installation, you also need to install `pkg-config` (e.g. via MSYS2 or
|
||||
from [here](https://sourceforge.net/projects/pkgconfiglite/))
|
||||
and set the `PKG_CONFIG_PATH` environment variable
|
||||
|
||||
```
|
||||
$ export PKG_CONFIG_PATH="c:\\gstreamer\\1.0\\x86_64\\lib\\pkgconfig${PKG_CONFIG_PATH:+:$PKG_CONFIG_PATH}"
|
||||
```
|
||||
|
||||
<a name="getting-started"/>
|
||||
|
||||
## Getting Started
|
||||
|
||||
The API reference can be found
|
||||
[here](https://slomo.pages.freedesktop.org/rustdocs/gstreamer/gstreamer/), however it is
|
||||
[here](https://gstreamer.pages.freedesktop.org/gstreamer-rs/stable/latest/docs/gstreamer/), however it is
|
||||
only the Rust API reference and does not explain any of the concepts.
|
||||
|
||||
For getting started with GStreamer development, the best would be to follow
|
||||
|
@ -156,12 +174,12 @@ In addition there are
|
|||
[tutorials](https://gstreamer.freedesktop.org/documentation/tutorials/) on the
|
||||
GStreamer website. Many of them were ported to Rust already and the code can
|
||||
be found in the
|
||||
[tutorials](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/master/tutorials)
|
||||
[tutorials](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/main/tutorials)
|
||||
directory.
|
||||
|
||||
Some further examples for various aspects of GStreamer and how to use it from
|
||||
Rust can be found in the
|
||||
[examples](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/master/examples)
|
||||
[examples](https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/tree/main/examples)
|
||||
directory.
|
||||
|
||||
Various GStreamer plugins written in Rust can be found in the
|
||||
|
|
42
ci/gir-checks.py
Normal file
42
ci/gir-checks.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
from itertools import chain
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path as P
|
||||
from subprocess import check_call as exec
|
||||
|
||||
NATIVE_CRATES = ["gstreamer-utils"]
|
||||
|
||||
def git(*args):
|
||||
exec(["git"] + list(args))
|
||||
|
||||
def check_no_git_diff():
|
||||
git("diff", "--exit-code")
|
||||
|
||||
check_no_git_diff()
|
||||
git("clone", "--depth", "1", "https://github.com/gtk-rs/checker")
|
||||
check_no_git_diff()
|
||||
|
||||
rootdir = P(".")
|
||||
checker_dir = P("checker")
|
||||
with (checker_dir / "Cargo.toml").open("a") as f:
|
||||
f.write("[workspace]\n")
|
||||
|
||||
check_no_git_diff()
|
||||
exec(['cargo', 'build', '--locked', '--color=always', '--release'], cwd=checker_dir)
|
||||
check_no_git_diff()
|
||||
|
||||
exec('cargo run --color=always --release -- ../gstreamer* ../gstreamer-gl/{egl,wayland,x11}', cwd=checker_dir, shell=True)
|
||||
|
||||
gl_dir = rootdir / 'gstreamer-gl'
|
||||
for crate in chain(rootdir.glob('gstreamer*'), [gl_dir / 'egl', gl_dir / 'wayland', gl_dir / 'x11']):
|
||||
# Ignore "native" crates
|
||||
if crate.name in NATIVE_CRATES:
|
||||
continue
|
||||
|
||||
print(f'--> Checking doc aliases in {crate.absolute()}')
|
||||
exec(['python3', 'doc_aliases.py', crate.absolute()], cwd=checker_dir)
|
||||
|
||||
print(f'--> {crate.absolute()}')
|
||||
exec(['./checker/check_init_asserts', crate.absolute()])
|
||||
|
||||
check_no_git_diff()
|
|
@ -1,2 +1,7 @@
|
|||
variables:
|
||||
GST_RS_IMG_TAG: '2020-07-05.0'
|
||||
GST_RS_IMG_TAG: "2024-05-10.0"
|
||||
GST_RS_STABLE: "1.78.0"
|
||||
GST_RS_MSRV: "1.70.0"
|
||||
# The branch we use to build GStreamer from in the docker images
|
||||
# Ex. main, 1.24, my-test-branch
|
||||
GST_UPSTREAM_BRANCH: 'main'
|
||||
|
|
11
ci/install-dav1d.sh
Normal file
11
ci/install-dav1d.sh
Normal file
|
@ -0,0 +1,11 @@
|
|||
set -e
|
||||
|
||||
RELEASE=1.4.1
|
||||
|
||||
git clone https://code.videolan.org/videolan/dav1d.git --branch $RELEASE
|
||||
cd dav1d
|
||||
meson build -D prefix=/usr/local
|
||||
ninja -C build
|
||||
ninja -C build install
|
||||
cd ..
|
||||
rm -rf dav1d
|
|
@ -1,8 +1,49 @@
|
|||
pip3 install meson==0.54.3
|
||||
#! /bin/bash
|
||||
|
||||
git clone --depth 1 https://gitlab.freedesktop.org/gstreamer/gst-build.git --branch master
|
||||
cd gst-build
|
||||
set -e
|
||||
|
||||
meson build -D prefix=/usr/local -D devtools=disabled -D examples=disabled -D gtk_doc=disabled -D introspection=disabled -D libav=disabled -D libnice=disabled -D python=disabled -D ugly=disabled -D vaapi=disabled
|
||||
ninja -C build
|
||||
ninja -C build install
|
||||
DEFAULT_BRANCH="$GST_UPSTREAM_BRANCH"
|
||||
|
||||
pip3 install meson==1.1.1 --break-system-packages
|
||||
|
||||
# gstreamer-rs already has a 'gstreamer' directory so don't clone there
|
||||
pushd .
|
||||
cd ..
|
||||
git clone https://gitlab.freedesktop.org/gstreamer/gstreamer.git \
|
||||
--depth 1 \
|
||||
--branch "$DEFAULT_BRANCH"
|
||||
|
||||
cd gstreamer
|
||||
|
||||
# plugins required by tests
|
||||
PLUGINS="-D gst-plugins-base:ogg=enabled \
|
||||
-D gst-plugins-base:vorbis=enabled \
|
||||
-D gst-plugins-base:theora=enabled \
|
||||
-D gst-plugins-good:matroska=enabled \
|
||||
-D gst-plugins-good:vpx=enabled \
|
||||
-D gst-plugins-bad:opus=enabled \
|
||||
-D gst-plugins-ugly:x264=enabled"
|
||||
|
||||
echo "subproject('gtk')" >> meson.build
|
||||
meson setup build \
|
||||
-D prefix=/usr/local \
|
||||
-D gpl=enabled \
|
||||
-D ugly=enabled \
|
||||
-D examples=disabled \
|
||||
-D gtk_doc=disabled \
|
||||
-D introspection=disabled \
|
||||
-D libav=disabled \
|
||||
-D python=disabled \
|
||||
-D vaapi=disabled \
|
||||
$PLUGINS
|
||||
meson compile -C build
|
||||
meson install -C build
|
||||
ldconfig
|
||||
|
||||
cd ..
|
||||
rm -rf gstreamer/
|
||||
|
||||
# Check what plugins we installed
|
||||
gst-inspect-1.0
|
||||
|
||||
popd
|
||||
|
|
|
@ -1,8 +1,13 @@
|
|||
#! /bin/bash
|
||||
|
||||
source ./ci/env.sh
|
||||
|
||||
set -e
|
||||
export CARGO_HOME='/usr/local/cargo'
|
||||
|
||||
RUSTUP_VERSION=1.21.1
|
||||
RUSTUP_VERSION=1.27.1
|
||||
RUST_VERSION=$1
|
||||
RUST_IMAGE_FULL=$2
|
||||
RUST_ARCH="x86_64-unknown-linux-gnu"
|
||||
|
||||
RUSTUP_URL=https://static.rust-lang.org/rustup/archive/$RUSTUP_VERSION/$RUST_ARCH/rustup-init
|
||||
|
@ -17,9 +22,30 @@ rustup --version
|
|||
cargo --version
|
||||
rustc --version
|
||||
|
||||
if [ "$RUST_VERSION" = "stable" ]; then
|
||||
if [ "$RUST_IMAGE_FULL" = "1" ]; then
|
||||
rustup component add clippy-preview
|
||||
rustup component add rustfmt
|
||||
cargo install --force cargo-deny
|
||||
cargo install --force --git https://github.com/kbknapp/cargo-outdated
|
||||
|
||||
cargo install --locked --force cargo-deny
|
||||
cargo install --locked --force cargo-outdated
|
||||
cargo install --locked --force typos-cli --version "1.19.0"
|
||||
|
||||
# Coverage tools
|
||||
rustup component add llvm-tools-preview
|
||||
cargo install --locked --force grcov
|
||||
fi
|
||||
|
||||
if [ "$RUST_VERSION" = "nightly" ]; then
|
||||
# FIXME: Don't build cargo-c with --locked for now because otherwise a
|
||||
# version of ahash is used that doesn't build on nightly anymore
|
||||
cargo install cargo-c --version 0.9.22+cargo-0.72
|
||||
else
|
||||
cargo install --locked cargo-c --version 0.9.22+cargo-0.72
|
||||
fi
|
||||
|
||||
if [ "$RUST_VERSION" = "nightly" ]; then
|
||||
rustup component add rustfmt --toolchain nightly
|
||||
|
||||
# Documentation tools
|
||||
cargo install --locked --force rustdoc-stripper
|
||||
fi
|
||||
|
|
30
ci/run-cargo-test.sh
Executable file
30
ci/run-cargo-test.sh
Executable file
|
@ -0,0 +1,30 @@
|
|||
#! /bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
rustc --version
|
||||
cargo --version
|
||||
|
||||
for crate in gstreamer* gstreamer-gl/{egl,wayland,x11}; do
|
||||
if [ -e "$crate/Cargo.toml" ]; then
|
||||
if [ -n "$ALL_FEATURES" ]; then
|
||||
FEATURES="--all-features"
|
||||
else
|
||||
FEATURES=""
|
||||
fi
|
||||
|
||||
echo "Building and testing $crate with $FEATURES"
|
||||
|
||||
cargo build --locked --color=always --manifest-path "$crate/Cargo.toml" $FEATURES
|
||||
RUST_BACKTRACE=1 G_DEBUG=fatal_warnings cargo test --color=always --manifest-path "$crate/Cargo.toml" $FEATURES
|
||||
fi
|
||||
done
|
||||
|
||||
if [ -n "$EXAMPLES_TUTORIALS" ]; then
|
||||
# Keep in sync with examples/Cargo.toml
|
||||
# List all features except windows/win32
|
||||
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
|
||||
|
||||
cargo build --locked --color=always --manifest-path examples/Cargo.toml --bins --examples "$EXAMPLES_FEATURES"
|
||||
cargo build --locked --color=always --manifest-path tutorials/Cargo.toml --bins --examples --all-features
|
||||
fi
|
38
ci/run-clippy.sh
Executable file
38
ci/run-clippy.sh
Executable file
|
@ -0,0 +1,38 @@
|
|||
#! /bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
rustc --version
|
||||
cargo --version
|
||||
cargo clippy --version
|
||||
|
||||
# Keep features in sync with run-cargo-test.sh
|
||||
get_features() {
|
||||
crate=$1
|
||||
case "$crate" in
|
||||
gstreamer-audio|gstreamer-editing-services|gstreamer-gl|gstreamer-pbutils|gstreamer-rtp|gstreamer-rtsp|gstreamer-video|gstreamer)
|
||||
echo "--features=serde,v1_26"
|
||||
;;
|
||||
*)
|
||||
echo "--features=v1_26"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
for crate in gstreamer* gstreamer-gl/{egl,wayland,x11}; do
|
||||
if [ -e "$crate/Cargo.toml" ]; then
|
||||
FEATURES=$(get_features "$crate")
|
||||
|
||||
echo "Running clippy on $crate with $FEATURES"
|
||||
|
||||
cargo clippy --locked --color=always --manifest-path "$crate/Cargo.toml" $FEATURES --all-targets -- $CLIPPY_LINTS
|
||||
fi
|
||||
done
|
||||
|
||||
# Keep in sync with examples/Cargo.toml
|
||||
# List all features except windows/win32
|
||||
EXAMPLES_FEATURES="--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gl,gst-gl-x11,gst-gl-egl,allocators,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18"
|
||||
|
||||
# And also run over all the examples/tutorials
|
||||
cargo clippy --locked --color=always --manifest-path examples/Cargo.toml --all-targets "$EXAMPLES_FEATURES" -- $CLIPPY_LINTS
|
||||
cargo clippy --locked --color=always --manifest-path tutorials/Cargo.toml --all-targets --all-features -- $CLIPPY_LINTS
|
43
ci/run-sys-cargo-test.sh
Executable file
43
ci/run-sys-cargo-test.sh
Executable file
|
@ -0,0 +1,43 @@
|
|||
#! /bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
rustc --version
|
||||
cargo --version
|
||||
|
||||
for crate in gstreamer*/sys gstreamer-gl/*/sys; do
|
||||
if [ -e "$crate/Cargo.toml" ]; then
|
||||
echo "Building $crate with --all-features"
|
||||
cargo build --locked --color=always --manifest-path "$crate/Cargo.toml" --all-features
|
||||
fi
|
||||
done
|
||||
|
||||
for crate in gstreamer/sys \
|
||||
gstreamer-allocators/sys \
|
||||
gstreamer-analytics/sys \
|
||||
gstreamer-app/sys \
|
||||
gstreamer-audio/sys \
|
||||
gstreamer-base/sys \
|
||||
gstreamer-check/sys \
|
||||
gstreamer-controller/sys \
|
||||
gstreamer-editing-services/sys \
|
||||
gstreamer-gl/sys \
|
||||
gstreamer-gl/egl/sys \
|
||||
gstreamer-gl/wayland/sys \
|
||||
gstreamer-gl/x11/sys \
|
||||
gstreamer-mpegts/sys \
|
||||
gstreamer-net/sys \
|
||||
gstreamer-pbutils/sys \
|
||||
gstreamer-play/sys \
|
||||
gstreamer-player/sys \
|
||||
gstreamer-rtp/sys \
|
||||
gstreamer-rtsp-server/sys \
|
||||
gstreamer-rtsp/sys \
|
||||
gstreamer-sdp/sys \
|
||||
gstreamer-tag/sys \
|
||||
gstreamer-validate/sys \
|
||||
gstreamer-video/sys \
|
||||
gstreamer-webrtc/sys; do
|
||||
echo "Testing $crate with --all-features)"
|
||||
RUST_BACKTRACE=1 cargo test --locked --color=always --manifest-path $crate/Cargo.toml --all-features
|
||||
done
|
87
ci/run_windows_tests.ps1
Normal file
87
ci/run_windows_tests.ps1
Normal file
|
@ -0,0 +1,87 @@
|
|||
# List of all the crates we want to build
|
||||
# We need to do this manually to avoid trying
|
||||
# to build egl,wayland,x11 etc, which can't
|
||||
# work on windows
|
||||
[string[]] $crates = @(
|
||||
'gstreamer',
|
||||
# Unix specific atm
|
||||
# 'gstreamer-allocators'
|
||||
'gstreamer-app',
|
||||
'gstreamer-audio',
|
||||
'gstreamer-base',
|
||||
'gstreamer-check',
|
||||
'gstreamer-controller',
|
||||
'gstreamer-editing-services',
|
||||
'gstreamer-gl',
|
||||
# 'gstreamer-gl/egl',
|
||||
# 'gstreamer-gl/wayland',
|
||||
# 'gstreamer-gl/x11',
|
||||
'gstreamer-mpegts',
|
||||
'gstreamer-mpegts/sys',
|
||||
'gstreamer-net',
|
||||
'gstreamer-pbutils',
|
||||
'gstreamer-player',
|
||||
'gstreamer-rtp',
|
||||
'gstreamer-rtsp',
|
||||
'gstreamer-rtsp-server',
|
||||
'gstreamer-sdp',
|
||||
'gstreamer-tag',
|
||||
'gstreamer-tag/sys',
|
||||
'gstreamer-video',
|
||||
'gstreamer-webrtc',
|
||||
'tutorials',
|
||||
'examples'
|
||||
)
|
||||
|
||||
# "" is the default build, no flags appended
|
||||
[string[]] $features_matrix = @(
|
||||
# "--no-default-features",
|
||||
# "--features=v1_18",
|
||||
# "--features=v1_20",
|
||||
"",
|
||||
"--all-features"
|
||||
)
|
||||
|
||||
foreach($features in $features_matrix) {
|
||||
foreach($crate in $crates)
|
||||
{
|
||||
Write-Host "Building crate: $crate"
|
||||
Write-Host "Features: $features"
|
||||
$env:LocalFeatures = $features
|
||||
|
||||
# Don't append feature flags if the string is null/empty
|
||||
# Or when we want to build without default features
|
||||
if ($env:LocalFeatures -and ($env:LocalFeatures -ne '--no-default-features')) {
|
||||
if ($crate -eq 'examples') {
|
||||
# FIXME: We can do --all-features for examples once we have gtk3 installed in the image
|
||||
$env:LocalFeatures = "--features=rtsp-server,rtsp-server-record,pango-cairo,overlay-composition,gst-play,gst-player,ges,image,cairo-rs,gst-video/v1_18,windows"
|
||||
}
|
||||
|
||||
if ($crate -eq 'tutorials') {
|
||||
$env:LocalFeatures = ''
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "with features: $env:LocalFeatures"
|
||||
cargo build --color=always --manifest-path $crate/Cargo.toml --all-targets $env:LocalFeatures
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Failed to build crate: $crate"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
if (($crate -eq "gstreamer-tag/sys") -or ($crate -eq "gstreamer-mpegts/sys")) {
|
||||
Write-Host "Skipping tests for $crate"
|
||||
continue
|
||||
}
|
||||
|
||||
$env:G_DEBUG="fatal_warnings"
|
||||
$env:RUST_BACKTRACE="1"
|
||||
cargo test --no-fail-fast --color=always --manifest-path $crate/Cargo.toml $env:LocalFeatures
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Tests failed to for crate: $crate"
|
||||
Exit 1
|
||||
}
|
||||
}
|
||||
}
|
22
ci/windows-docker/Dockerfile
Normal file
22
ci/windows-docker/Dockerfile
Normal file
|
@ -0,0 +1,22 @@
|
|||
# escape=`
|
||||
|
||||
FROM "registry.freedesktop.org/gstreamer/gstreamer/amd64/windows:2023-07-17.0-main"
|
||||
|
||||
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
|
||||
|
||||
ARG DEFAULT_BRANCH="1.24"
|
||||
ARG RUST_VERSION="invalid"
|
||||
|
||||
RUN choco install -y pkgconfiglite nasm llvm openssl
|
||||
|
||||
# https://stackoverflow.com/a/50716450
|
||||
RUN setx PATH '%PATH%;C:\Program Files\NASM;C:\gst-install\bin'
|
||||
ENV PKG_CONFIG_PATH="C:\gst-install\lib\pkgconfig"
|
||||
|
||||
COPY install_gst.ps1 install_dav1d.ps1 C:\
|
||||
RUN C:\install_gst.ps1
|
||||
RUN C:\install_dav1d.ps1
|
||||
|
||||
RUN Invoke-WebRequest -Uri https://win.rustup.rs/x86_64 -OutFile C:\rustup-init.exe
|
||||
RUN C:\rustup-init.exe -y --profile minimal --default-toolchain $env:RUST_VERSION
|
||||
RUN cargo install --locked cargo-c --version 0.9.22+cargo-0.72
|
60
ci/windows-docker/container.ps1
Normal file
60
ci/windows-docker/container.ps1
Normal file
|
@ -0,0 +1,60 @@
|
|||
# Copied from mesa, big kudos
|
||||
#
|
||||
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/master/.gitlab-ci/windows/mesa_container.ps1
|
||||
# https://gitlab.freedesktop.org/mesa/mesa/-/blob/34e3e164936d1d3cef267da7780e87f062fedf39/.gitlab-ci/windows/mesa_container.ps1
|
||||
|
||||
# Implements the equivalent of ci-templates container-ifnot-exists, using
|
||||
# Docker directly as we don't have buildah/podman/skopeo available under
|
||||
# Windows, nor can we execute Docker-in-Docker
|
||||
$registry_uri = $args[0]
|
||||
$registry_username = $args[1]
|
||||
$registry_password = $args[2]
|
||||
$registry_user_image = $args[3]
|
||||
$registry_central_image = $args[4]
|
||||
$dockerfile = $args[5]
|
||||
|
||||
docker --config "windows-docker.conf" login -u "$registry_username" -p "$registry_password" "$registry_uri"
|
||||
if (!$?) {
|
||||
Write-Host "docker login failed to $registry_uri"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
# if the image already exists, don't rebuild it
|
||||
docker --config "windows-docker.conf" pull "$registry_user_image"
|
||||
if ($?) {
|
||||
Write-Host "User image $registry_user_image already exists; not rebuilding"
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
Exit 0
|
||||
}
|
||||
|
||||
# if the image already exists upstream, copy it
|
||||
docker --config "windows-docker.conf" pull "$registry_central_image"
|
||||
if ($?) {
|
||||
Write-Host "Copying central image $registry_central_image to user image $registry_user_image"
|
||||
docker --config "windows-docker.conf" tag "$registry_central_image" "$registry_user_image"
|
||||
docker --config "windows-docker.conf" push "$registry_user_image"
|
||||
$pushstatus = $?
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
if (!$pushstatus) {
|
||||
Write-Host "Pushing image to $registry_user_image failed"
|
||||
Exit 1
|
||||
}
|
||||
Exit 0
|
||||
}
|
||||
|
||||
Write-Host "No image found at $registry_user_image or $registry_central_image; rebuilding"
|
||||
docker --config "windows-docker.conf" build $DOCKER_BUILD_ARGS --no-cache -t "$registry_user_image" -f "$dockerfile" "./ci/windows-docker"
|
||||
if (!$?) {
|
||||
Write-Host "Container build failed"
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
Exit 1
|
||||
}
|
||||
Get-Date
|
||||
|
||||
docker --config "windows-docker.conf" push "$registry_user_image"
|
||||
$pushstatus = $?
|
||||
docker --config "windows-docker.conf" logout "$registry_uri"
|
||||
if (!$pushstatus) {
|
||||
Write-Host "Pushing image to $registry_user_image failed"
|
||||
Exit 1
|
||||
}
|
28
ci/windows-docker/install_dav1d.ps1
Normal file
28
ci/windows-docker/install_dav1d.ps1
Normal file
|
@ -0,0 +1,28 @@
|
|||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
|
||||
|
||||
# Download gstreamer and all its subprojects
|
||||
git clone -b 1.4.1 --depth 1 https://code.videolan.org/videolan/dav1d.git C:\dav1d
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone dav1d"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Set-Location C:\dav1d
|
||||
|
||||
# This is fine, we are not going to use the GtkMedia* apis
|
||||
$env:MESON_ARGS = "--prefix=C:\gst-install\"
|
||||
|
||||
Write-Output "Building dav1d"
|
||||
cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=amd64 && meson _build $env:MESON_ARGS && meson compile -C _build && ninja -C _build install"
|
||||
|
||||
if (!$?) {
|
||||
Write-Host "Failed to build and install dav1d"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
cd C:\
|
||||
cmd /c rmdir /s /q C:\dav1d
|
||||
if (!$?) {
|
||||
Write-Host "Failed to remove dav1d checkout"
|
||||
Exit 1
|
||||
}
|
71
ci/windows-docker/install_gst.ps1
Normal file
71
ci/windows-docker/install_gst.ps1
Normal file
|
@ -0,0 +1,71 @@
|
|||
[Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12;
|
||||
|
||||
# Download gstreamer and all its subprojects
|
||||
git clone -b $env:DEFAULT_BRANCH --depth 1 https://gitlab.freedesktop.org/gstreamer/gstreamer.git C:\gstreamer
|
||||
if (!$?) {
|
||||
Write-Host "Failed to clone gstreamer"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Set-Location C:\gstreamer
|
||||
|
||||
# Copy the cache we already have in the image to avoid massive redownloads
|
||||
Move-Item C:/subprojects/* C:\gstreamer\subprojects
|
||||
|
||||
# Update the subprojects cache
|
||||
Write-Output "Running meson subproject reset"
|
||||
meson subprojects update --reset
|
||||
if (!$?) {
|
||||
Write-Host "Failed to update gstreamer subprojects"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
$MESON_ARGS = @(`
|
||||
"--prefix=C:\gst-install", `
|
||||
"-Dglib:installed_tests=false", `
|
||||
"-Dlibnice:tests=disabled", `
|
||||
"-Dlibnice:examples=disabled", `
|
||||
"-Dffmpeg:tests=disabled", `
|
||||
"-Dopenh264:tests=disabled", `
|
||||
"-Dpygobject:tests=false", `
|
||||
"-Dgpl=enabled", `
|
||||
"-Dugly=enabled", `
|
||||
"-Dbad=enabled", `
|
||||
"-Dges=enabled", `
|
||||
"-Drtsp_server=enabled", `
|
||||
"-Ddevtools=enabled", `
|
||||
"-Dsharp=disabled", `
|
||||
"-Dpython=disabled", `
|
||||
"-Dlibav=disabled", `
|
||||
"-Dvaapi=disabled", `
|
||||
"-Dgst-plugins-base:pango=enabled", `
|
||||
"-Dgst-plugins-good:cairo=enabled", `
|
||||
"-Dgst-plugins-good:lame=disabled"
|
||||
)
|
||||
|
||||
$PSDefaultParameterValues['Out-File:Encoding'] = 'utf8'
|
||||
echo "subproject('gtk')" >> meson.build
|
||||
|
||||
Write-Output "Building gstreamer"
|
||||
meson setup --vsenv $MESON_ARGS _build
|
||||
if (!$?) {
|
||||
type "_build\meson-logs\meson-log.txt"
|
||||
Write-Host "Failed to run meson setup, see log above"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
Write-Output "Compiling gstreamer"
|
||||
meson compile -C _build
|
||||
if (!$?) {
|
||||
Write-Host "Failed to run meson compile"
|
||||
Exit 1
|
||||
}
|
||||
# meson install does a spurious rebuild sometimes that then fails
|
||||
meson install --no-rebuild -C _build
|
||||
if (!$?) {
|
||||
Write-Host "Failed to run meson install"
|
||||
Exit 1
|
||||
}
|
||||
|
||||
cd c:\
|
||||
Remove-Item -LiteralPath "C:\gstreamer" -Force -Recurse
|
58
deny.toml
58
deny.toml
|
@ -1,6 +1,11 @@
|
|||
exclude = [
|
||||
"examples",
|
||||
"tutorials",
|
||||
]
|
||||
|
||||
[advisories]
|
||||
db-path = "~/.cargo/advisory-db"
|
||||
db-url = "https://github.com/rustsec/advisory-db"
|
||||
db-urls = ["https://github.com/rustsec/advisory-db"]
|
||||
vulnerability = "deny"
|
||||
unmaintained = "warn"
|
||||
notice = "warn"
|
||||
|
@ -8,62 +13,25 @@ ignore = []
|
|||
|
||||
[licenses]
|
||||
unlicensed = "deny"
|
||||
allow = [
|
||||
"Apache-2.0",
|
||||
]
|
||||
deny = [
|
||||
"GPL-1.0",
|
||||
"GPL-2.0",
|
||||
"GPL-3.0",
|
||||
"AGPL-1.0",
|
||||
"AGPL-3.0",
|
||||
]
|
||||
default = "deny"
|
||||
copyleft = "deny"
|
||||
allow-osi-fsf-free = "either"
|
||||
confidence-threshold = 0.8
|
||||
|
||||
[[licenses.exceptions]]
|
||||
allow = ["LGPL-2.0"]
|
||||
name = "gstreamer-rs-lgpl-docs"
|
||||
|
||||
[bans]
|
||||
multiple-versions = "deny"
|
||||
wildcards = "allow"
|
||||
highlight = "all"
|
||||
|
||||
# ignore duplicated deps because of outdated glutin
|
||||
# https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/-/merge_requests/409
|
||||
# proc-macro-crate depends on an older version of toml_edit
|
||||
# https://github.com/bkchr/proc-macro-crate/pull/50
|
||||
[[bans.skip]]
|
||||
name = "unicode-xid"
|
||||
version = "0.1.0"
|
||||
[[bans.skip]]
|
||||
name = "rusttype"
|
||||
version = "0.7.9"
|
||||
[[bans.skip]]
|
||||
name = "quote"
|
||||
version = "0.6.13"
|
||||
[[bans.skip]]
|
||||
name = "proc-macro2"
|
||||
version = "0.4.30"
|
||||
[[bans.skip]]
|
||||
name = "gl_generator"
|
||||
version = "0.13.1"
|
||||
[[bans.skip]]
|
||||
name = "libloading"
|
||||
version = "0.5.2"
|
||||
name = "toml_edit"
|
||||
version = "0.21"
|
||||
|
||||
[sources]
|
||||
unknown-registry = "deny"
|
||||
unknown-git = "deny"
|
||||
allow-git = [
|
||||
"https://gitlab.freedesktop.org/gstreamer/gstreamer-rs-sys",
|
||||
"https://github.com/gtk-rs/sys",
|
||||
"https://github.com/gtk-rs/glib",
|
||||
"https://github.com/gtk-rs/gio",
|
||||
"https://github.com/gtk-rs/cairo",
|
||||
"https://github.com/gtk-rs/pango",
|
||||
"https://github.com/gtk-rs/pangocairo",
|
||||
"https://github.com/gtk-rs/atk",
|
||||
"https://github.com/gtk-rs/gdk-pixbuf",
|
||||
"https://github.com/gtk-rs/gdk",
|
||||
"https://github.com/gtk-rs/gtk",
|
||||
"https://github.com/gtk-rs/gtk-rs-core",
|
||||
]
|
||||
|
|
|
@ -1,14 +0,0 @@
|
|||
[package]
|
||||
name = "gstreamer-rs-lgpl-docs"
|
||||
version = "0.16.0"
|
||||
authors = ["Sebastian Dröge <sebastian@centricular.com>"]
|
||||
license = "LGPL-2.0"
|
||||
description = "LGPL-licensed docs for gstreamer-rs crates"
|
||||
repository = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs"
|
||||
homepage = "https://gstreamer.freedesktop.org"
|
||||
|
||||
[lib]
|
||||
name = "lgpl_docs"
|
||||
|
||||
[dependencies]
|
||||
rustdoc-stripper = "0.1.6"
|
|
@ -1,688 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- struct AppSink -->
|
||||
Appsink is a sink plugin that supports many different methods for making
|
||||
the application get a handle on the GStreamer data in a pipeline. Unlike
|
||||
most GStreamer elements, Appsink provides external API functions.
|
||||
|
||||
appsink can be used by linking to the gstappsink.h header file to access the
|
||||
methods or by using the appsink action signals and properties.
|
||||
|
||||
The normal way of retrieving samples from appsink is by using the
|
||||
`AppSink::pull_sample` and `AppSink::pull_preroll` methods.
|
||||
These methods block until a sample becomes available in the sink or when the
|
||||
sink is shut down or reaches EOS. There are also timed variants of these
|
||||
methods, `AppSink::try_pull_sample` and `AppSink::try_pull_preroll`,
|
||||
which accept a timeout parameter to limit the amount of time to wait.
|
||||
|
||||
Appsink will internally use a queue to collect buffers from the streaming
|
||||
thread. If the application is not pulling samples fast enough, this queue
|
||||
will consume a lot of memory over time. The "max-buffers" property can be
|
||||
used to limit the queue size. The "drop" property controls whether the
|
||||
streaming thread blocks or if older buffers are dropped when the maximum
|
||||
queue size is reached. Note that blocking the streaming thread can negatively
|
||||
affect real-time performance and should be avoided.
|
||||
|
||||
If a blocking behaviour is not desirable, setting the "emit-signals" property
|
||||
to `true` will make appsink emit the "new-sample" and "new-preroll" signals
|
||||
when a sample can be pulled without blocking.
|
||||
|
||||
The "caps" property on appsink can be used to control the formats that
|
||||
appsink can receive. This property can contain non-fixed caps, the format of
|
||||
the pulled samples can be obtained by getting the sample caps.
|
||||
|
||||
If one of the pull-preroll or pull-sample methods return `None`, the appsink
|
||||
is stopped or in the EOS state. You can check for the EOS state with the
|
||||
"eos" property or with the `AppSink::is_eos` method.
|
||||
|
||||
The eos signal can also be used to be informed when the EOS state is reached
|
||||
to avoid polling.
|
||||
|
||||
# Implements
|
||||
|
||||
[`gst_base::BaseSinkExt`](../gst_base/trait.BaseSinkExt.html), [`gst::ElementExt`](../gst/trait.ElementExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`gst::URIHandlerExt`](../gst/trait.URIHandlerExt.html)
|
||||
<!-- impl AppSink::fn get_buffer_list_support -->
|
||||
Check if `self` supports buffer lists.
|
||||
|
||||
Feature: `v1_12`
|
||||
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` supports buffer lists.
|
||||
<!-- impl AppSink::fn get_caps -->
|
||||
Get the configured caps on `self`.
|
||||
|
||||
# Returns
|
||||
|
||||
the `gst::Caps` accepted by the sink. `gst::Caps::unref` after usage.
|
||||
<!-- impl AppSink::fn get_drop -->
|
||||
Check if `self` will drop old buffers when the maximum amount of queued
|
||||
buffers is reached.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` is dropping old buffers when the queue is
|
||||
filled.
|
||||
<!-- impl AppSink::fn get_emit_signals -->
|
||||
Check if appsink will emit the "new-preroll" and "new-sample" signals.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` is emitting the "new-preroll" and "new-sample"
|
||||
signals.
|
||||
<!-- impl AppSink::fn get_max_buffers -->
|
||||
Get the maximum amount of buffers that can be queued in `self`.
|
||||
|
||||
# Returns
|
||||
|
||||
The maximum amount of buffers that can be queued.
|
||||
<!-- impl AppSink::fn get_wait_on_eos -->
|
||||
Check if `self` will wait for all buffers to be consumed when an EOS is
|
||||
received.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` will wait for all buffers to be consumed when an
|
||||
EOS is received.
|
||||
<!-- impl AppSink::fn is_eos -->
|
||||
Check if `self` is EOS, which is when no more samples can be pulled because
|
||||
an EOS event was received.
|
||||
|
||||
This function also returns `true` when the appsink is not in the PAUSED or
|
||||
PLAYING state.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if no more samples can be pulled and the appsink is EOS.
|
||||
<!-- impl AppSink::fn pull_preroll -->
|
||||
Get the last preroll sample in `self`. This was the sample that caused the
|
||||
appsink to preroll in the PAUSED state.
|
||||
|
||||
This function is typically used when dealing with a pipeline in the PAUSED
|
||||
state. Calling this function after doing a seek will give the sample right
|
||||
after the seek position.
|
||||
|
||||
Calling this function will clear the internal reference to the preroll
|
||||
buffer.
|
||||
|
||||
Note that the preroll sample will also be returned as the first sample
|
||||
when calling `AppSink::pull_sample`.
|
||||
|
||||
If an EOS event was received before any buffers, this function returns
|
||||
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
|
||||
|
||||
This function blocks until a preroll sample or EOS is received or the appsink
|
||||
element is set to the READY/NULL state.
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS.
|
||||
Call `gst::Sample::unref` after usage.
|
||||
<!-- impl AppSink::fn pull_sample -->
|
||||
This function blocks until a sample or EOS becomes available or the appsink
|
||||
element is set to the READY/NULL state.
|
||||
|
||||
This function will only return samples when the appsink is in the PLAYING
|
||||
state. All rendered buffers will be put in a queue so that the application
|
||||
can pull samples at its own rate. Note that when the application does not
|
||||
pull samples fast enough, the queued buffers could consume a lot of memory,
|
||||
especially when dealing with raw video frames.
|
||||
|
||||
If an EOS event was received before any buffers, this function returns
|
||||
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS.
|
||||
Call `gst::Sample::unref` after usage.
|
||||
<!-- impl AppSink::fn set_buffer_list_support -->
|
||||
Instruct `self` to enable or disable buffer list support.
|
||||
|
||||
For backwards-compatibility reasons applications need to opt in
|
||||
to indicate that they will be able to handle buffer lists.
|
||||
|
||||
Feature: `v1_12`
|
||||
|
||||
## `enable_lists`
|
||||
enable or disable buffer list support
|
||||
<!-- impl AppSink::fn set_callbacks -->
|
||||
Set callbacks which will be executed for each new preroll, new sample and eos.
|
||||
This is an alternative to using the signals, it has lower overhead and is thus
|
||||
less expensive, but also less flexible.
|
||||
|
||||
If callbacks are installed, no signals will be emitted for performance
|
||||
reasons.
|
||||
|
||||
Before 1.16.3 it was not possible to change the callbacks in a thread-safe
|
||||
way.
|
||||
## `callbacks`
|
||||
the callbacks
|
||||
## `user_data`
|
||||
a user_data argument for the callbacks
|
||||
## `notify`
|
||||
a destroy notify function
|
||||
<!-- impl AppSink::fn set_caps -->
|
||||
Set the capabilities on the appsink element. This function takes
|
||||
a copy of the caps structure. After calling this method, the sink will only
|
||||
accept caps that match `caps`. If `caps` is non-fixed, or incomplete,
|
||||
you must check the caps on the samples to get the actual used caps.
|
||||
## `caps`
|
||||
caps to set
|
||||
<!-- impl AppSink::fn set_drop -->
|
||||
Instruct `self` to drop old buffers when the maximum amount of queued
|
||||
buffers is reached.
|
||||
## `drop`
|
||||
the new state
|
||||
<!-- impl AppSink::fn set_emit_signals -->
|
||||
Make appsink emit the "new-preroll" and "new-sample" signals. This option is
|
||||
by default disabled because signal emission is expensive and unneeded when
|
||||
the application prefers to operate in pull mode.
|
||||
## `emit`
|
||||
the new state
|
||||
<!-- impl AppSink::fn set_max_buffers -->
|
||||
Set the maximum amount of buffers that can be queued in `self`. After this
|
||||
amount of buffers are queued in appsink, any more buffers will block upstream
|
||||
elements until a sample is pulled from `self`.
|
||||
## `max`
|
||||
the maximum number of buffers to queue
|
||||
<!-- impl AppSink::fn set_wait_on_eos -->
|
||||
Instruct `self` to wait for all buffers to be consumed when an EOS is received.
|
||||
## `wait`
|
||||
the new state
|
||||
<!-- impl AppSink::fn try_pull_preroll -->
|
||||
Get the last preroll sample in `self`. This was the sample that caused the
|
||||
appsink to preroll in the PAUSED state.
|
||||
|
||||
This function is typically used when dealing with a pipeline in the PAUSED
|
||||
state. Calling this function after doing a seek will give the sample right
|
||||
after the seek position.
|
||||
|
||||
Calling this function will clear the internal reference to the preroll
|
||||
buffer.
|
||||
|
||||
Note that the preroll sample will also be returned as the first sample
|
||||
when calling `AppSink::pull_sample`.
|
||||
|
||||
If an EOS event was received before any buffers or the timeout expires,
|
||||
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
|
||||
condition.
|
||||
|
||||
This function blocks until a preroll sample or EOS is received, the appsink
|
||||
element is set to the READY/NULL state, or the timeout expires.
|
||||
|
||||
Feature: `v1_10`
|
||||
|
||||
## `timeout`
|
||||
the maximum amount of time to wait for the preroll sample
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
|
||||
Call `gst::Sample::unref` after usage.
|
||||
<!-- impl AppSink::fn try_pull_sample -->
|
||||
This function blocks until a sample or EOS becomes available or the appsink
|
||||
element is set to the READY/NULL state or the timeout expires.
|
||||
|
||||
This function will only return samples when the appsink is in the PLAYING
|
||||
state. All rendered buffers will be put in a queue so that the application
|
||||
can pull samples at its own rate. Note that when the application does not
|
||||
pull samples fast enough, the queued buffers could consume a lot of memory,
|
||||
especially when dealing with raw video frames.
|
||||
|
||||
If an EOS event was received before any buffers or the timeout expires,
|
||||
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
|
||||
condition.
|
||||
|
||||
Feature: `v1_10`
|
||||
|
||||
## `timeout`
|
||||
the maximum amount of time to wait for a sample
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
|
||||
Call `gst::Sample::unref` after usage.
|
||||
<!-- impl AppSink::fn connect_eos -->
|
||||
Signal that the end-of-stream has been reached. This signal is emitted from
|
||||
the streaming thread.
|
||||
<!-- impl AppSink::fn connect_new_preroll -->
|
||||
Signal that a new preroll sample is available.
|
||||
|
||||
This signal is emitted from the streaming thread and only when the
|
||||
"emit-signals" property is `true`.
|
||||
|
||||
The new preroll sample can be retrieved with the "pull-preroll" action
|
||||
signal or `AppSink::pull_preroll` either from this signal callback
|
||||
or from any other thread.
|
||||
|
||||
Note that this signal is only emitted when the "emit-signals" property is
|
||||
set to `true`, which it is not by default for performance reasons.
|
||||
<!-- impl AppSink::fn connect_new_sample -->
|
||||
Signal that a new sample is available.
|
||||
|
||||
This signal is emitted from the streaming thread and only when the
|
||||
"emit-signals" property is `true`.
|
||||
|
||||
The new sample can be retrieved with the "pull-sample" action
|
||||
signal or `AppSink::pull_sample` either from this signal callback
|
||||
or from any other thread.
|
||||
|
||||
Note that this signal is only emitted when the "emit-signals" property is
|
||||
set to `true`, which it is not by default for performance reasons.
|
||||
<!-- impl AppSink::fn connect_pull_preroll -->
|
||||
Get the last preroll sample in `appsink`. This was the sample that caused the
|
||||
appsink to preroll in the PAUSED state.
|
||||
|
||||
This function is typically used when dealing with a pipeline in the PAUSED
|
||||
state. Calling this function after doing a seek will give the sample right
|
||||
after the seek position.
|
||||
|
||||
Calling this function will clear the internal reference to the preroll
|
||||
buffer.
|
||||
|
||||
Note that the preroll sample will also be returned as the first sample
|
||||
when calling `AppSink::pull_sample` or the "pull-sample" action signal.
|
||||
|
||||
If an EOS event was received before any buffers, this function returns
|
||||
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
|
||||
|
||||
This function blocks until a preroll sample or EOS is received or the appsink
|
||||
element is set to the READY/NULL state.
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS.
|
||||
<!-- impl AppSink::fn connect_pull_sample -->
|
||||
This function blocks until a sample or EOS becomes available or the appsink
|
||||
element is set to the READY/NULL state.
|
||||
|
||||
This function will only return samples when the appsink is in the PLAYING
|
||||
state. All rendered samples will be put in a queue so that the application
|
||||
can pull samples at its own rate.
|
||||
|
||||
Note that when the application does not pull samples fast enough, the
|
||||
queued samples could consume a lot of memory, especially when dealing with
|
||||
raw video frames. It's possible to control the behaviour of the queue with
|
||||
the "drop" and "max-buffers" properties.
|
||||
|
||||
If an EOS event was received before any buffers, this function returns
|
||||
`None`. Use gst_app_sink_is_eos () to check for the EOS condition.
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS.
|
||||
<!-- impl AppSink::fn connect_try_pull_preroll -->
|
||||
Get the last preroll sample in `appsink`. This was the sample that caused the
|
||||
appsink to preroll in the PAUSED state.
|
||||
|
||||
This function is typically used when dealing with a pipeline in the PAUSED
|
||||
state. Calling this function after doing a seek will give the sample right
|
||||
after the seek position.
|
||||
|
||||
Calling this function will clear the internal reference to the preroll
|
||||
buffer.
|
||||
|
||||
Note that the preroll sample will also be returned as the first sample
|
||||
when calling `AppSink::pull_sample` or the "pull-sample" action signal.
|
||||
|
||||
If an EOS event was received before any buffers or the timeout expires,
|
||||
this function returns `None`. Use gst_app_sink_is_eos () to check for the EOS
|
||||
condition.
|
||||
|
||||
This function blocks until a preroll sample or EOS is received, the appsink
|
||||
element is set to the READY/NULL state, or the timeout expires.
|
||||
|
||||
Feature: `v1_10`
|
||||
|
||||
## `timeout`
|
||||
the maximum amount of time to wait for the preroll sample
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
|
||||
<!-- impl AppSink::fn connect_try_pull_sample -->
|
||||
This function blocks until a sample or EOS becomes available or the appsink
|
||||
element is set to the READY/NULL state or the timeout expires.
|
||||
|
||||
This function will only return samples when the appsink is in the PLAYING
|
||||
state. All rendered samples will be put in a queue so that the application
|
||||
can pull samples at its own rate.
|
||||
|
||||
Note that when the application does not pull samples fast enough, the
|
||||
queued samples could consume a lot of memory, especially when dealing with
|
||||
raw video frames. It's possible to control the behaviour of the queue with
|
||||
the "drop" and "max-buffers" properties.
|
||||
|
||||
If an EOS event was received before any buffers or the timeout expires,
|
||||
this function returns `None`. Use gst_app_sink_is_eos () to check
|
||||
for the EOS condition.
|
||||
|
||||
Feature: `v1_10`
|
||||
|
||||
## `timeout`
|
||||
the maximum amount of time to wait for a sample
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::Sample` or NULL when the appsink is stopped or EOS or the timeout expires.
|
||||
<!-- struct AppSrc -->
|
||||
The appsrc element can be used by applications to insert data into a
|
||||
GStreamer pipeline. Unlike most GStreamer elements, appsrc provides
|
||||
external API functions.
|
||||
|
||||
appsrc can be used by linking with the libgstapp library to access the
|
||||
methods directly or by using the appsrc action signals.
|
||||
|
||||
Before operating appsrc, the caps property must be set to fixed caps
|
||||
describing the format of the data that will be pushed with appsrc. An
|
||||
exception to this is when pushing buffers with unknown caps, in which case no
|
||||
caps should be set. This is typically true of file-like sources that push raw
|
||||
byte buffers. If you don't want to explicitly set the caps, you can use
|
||||
gst_app_src_push_sample. This method gets the caps associated with the
|
||||
sample and sets them on the appsrc replacing any previously set caps (if
|
||||
different from sample's caps).
|
||||
|
||||
The main way of handing data to the appsrc element is by calling the
|
||||
`AppSrc::push_buffer` method or by emitting the push-buffer action signal.
|
||||
This will put the buffer onto a queue from which appsrc will read from in its
|
||||
streaming thread. It is important to note that data transport will not happen
|
||||
from the thread that performed the push-buffer call.
|
||||
|
||||
The "max-bytes" property controls how much data can be queued in appsrc
|
||||
before appsrc considers the queue full. A filled internal queue will always
|
||||
signal the "enough-data" signal, which signals the application that it should
|
||||
stop pushing data into appsrc. The "block" property will cause appsrc to
|
||||
block the push-buffer method until free data becomes available again.
|
||||
|
||||
When the internal queue is running out of data, the "need-data" signal is
|
||||
emitted, which signals the application that it should start pushing more data
|
||||
into appsrc.
|
||||
|
||||
In addition to the "need-data" and "enough-data" signals, appsrc can emit the
|
||||
"seek-data" signal when the "stream-mode" property is set to "seekable" or
|
||||
"random-access". The signal argument will contain the new desired position in
|
||||
the stream expressed in the unit set with the "format" property. After
|
||||
receiving the seek-data signal, the application should push-buffers from the
|
||||
new position.
|
||||
|
||||
These signals allow the application to operate the appsrc in two different
|
||||
ways:
|
||||
|
||||
The push mode, in which the application repeatedly calls the push-buffer/push-sample
|
||||
method with a new buffer/sample. Optionally, the queue size in the appsrc
|
||||
can be controlled with the enough-data and need-data signals by respectively
|
||||
stopping/starting the push-buffer/push-sample calls. This is a typical
|
||||
mode of operation for the stream-type "stream" and "seekable". Use this
|
||||
mode when implementing various network protocols or hardware devices.
|
||||
|
||||
The pull mode, in which the need-data signal triggers the next push-buffer call.
|
||||
This mode is typically used in the "random-access" stream-type. Use this
|
||||
mode for file access or other randomly accessible sources. In this mode, a
|
||||
buffer of exactly the amount of bytes given by the need-data signal should be
|
||||
pushed into appsrc.
|
||||
|
||||
In all modes, the size property on appsrc should contain the total stream
|
||||
size in bytes. Setting this property is mandatory in the random-access mode.
|
||||
For the stream and seekable modes, setting this property is optional but
|
||||
recommended.
|
||||
|
||||
When the application has finished pushing data into appsrc, it should call
|
||||
`AppSrc::end_of_stream` or emit the end-of-stream action signal. After
|
||||
this call, no more buffers can be pushed into appsrc until a flushing seek
|
||||
occurs or the state of the appsrc has gone through READY.
|
||||
|
||||
# Implements
|
||||
|
||||
[`gst_base::BaseSrcExt`](../gst_base/trait.BaseSrcExt.html), [`gst::ElementExt`](../gst/trait.ElementExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`gst::URIHandlerExt`](../gst/trait.URIHandlerExt.html)
|
||||
<!-- impl AppSrc::fn end_of_stream -->
|
||||
Indicates to the appsrc element that the last buffer queued in the
|
||||
element is the last buffer of the stream.
|
||||
|
||||
# Returns
|
||||
|
||||
`gst::FlowReturn::Ok` when the EOS was successfully queued.
|
||||
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
|
||||
<!-- impl AppSrc::fn get_caps -->
|
||||
Get the configured caps on `self`.
|
||||
|
||||
# Returns
|
||||
|
||||
the `gst::Caps` produced by the source. `gst::Caps::unref` after usage.
|
||||
<!-- impl AppSrc::fn get_current_level_bytes -->
|
||||
Get the number of currently queued bytes inside `self`.
|
||||
|
||||
# Returns
|
||||
|
||||
The number of currently queued bytes.
|
||||
<!-- impl AppSrc::fn get_duration -->
|
||||
Get the duration of the stream in nanoseconds. A value of GST_CLOCK_TIME_NONE means that the duration is
|
||||
not known.
|
||||
|
||||
Feature: `v1_10`
|
||||
|
||||
|
||||
# Returns
|
||||
|
||||
the duration of the stream previously set with `AppSrc::set_duration`;
|
||||
<!-- impl AppSrc::fn get_emit_signals -->
|
||||
Check if appsrc will emit the "new-preroll" and "new-buffer" signals.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` is emitting the "new-preroll" and "new-buffer"
|
||||
signals.
|
||||
<!-- impl AppSrc::fn get_latency -->
|
||||
Retrieve the min and max latencies in `min` and `max` respectively.
|
||||
## `min`
|
||||
the min latency
|
||||
## `max`
|
||||
the max latency
|
||||
<!-- impl AppSrc::fn get_max_bytes -->
|
||||
Get the maximum amount of bytes that can be queued in `self`.
|
||||
|
||||
# Returns
|
||||
|
||||
The maximum amount of bytes that can be queued.
|
||||
<!-- impl AppSrc::fn get_size -->
|
||||
Get the size of the stream in bytes. A value of -1 means that the size is
|
||||
not known.
|
||||
|
||||
# Returns
|
||||
|
||||
the size of the stream previously set with `AppSrc::set_size`;
|
||||
<!-- impl AppSrc::fn get_stream_type -->
|
||||
Get the stream type. Control the stream type of `self`
|
||||
with `AppSrc::set_stream_type`.
|
||||
|
||||
# Returns
|
||||
|
||||
the stream type.
|
||||
<!-- impl AppSrc::fn push_buffer -->
|
||||
Adds a buffer to the queue of buffers that the appsrc element will
|
||||
push to its source pad. This function takes ownership of the buffer.
|
||||
|
||||
When the block property is TRUE, this function can block until free
|
||||
space becomes available in the queue.
|
||||
## `buffer`
|
||||
a `gst::Buffer` to push
|
||||
|
||||
# Returns
|
||||
|
||||
`gst::FlowReturn::Ok` when the buffer was successfully queued.
|
||||
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
|
||||
`gst::FlowReturn::Eos` when EOS occurred.
|
||||
<!-- impl AppSrc::fn push_buffer_list -->
|
||||
Adds a buffer list to the queue of buffers and buffer lists that the
|
||||
appsrc element will push to its source pad. This function takes ownership
|
||||
of `buffer_list`.
|
||||
|
||||
When the block property is TRUE, this function can block until free
|
||||
space becomes available in the queue.
|
||||
|
||||
Feature: `v1_14`
|
||||
|
||||
## `buffer_list`
|
||||
a `gst::BufferList` to push
|
||||
|
||||
# Returns
|
||||
|
||||
`gst::FlowReturn::Ok` when the buffer list was successfully queued.
|
||||
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
|
||||
`gst::FlowReturn::Eos` when EOS occurred.
|
||||
<!-- impl AppSrc::fn push_sample -->
|
||||
Extract a buffer from the provided sample and adds it to the queue of
|
||||
buffers that the appsrc element will push to its source pad. Any
|
||||
previous caps that were set on appsrc will be replaced by the caps
|
||||
associated with the sample if not equal.
|
||||
|
||||
This function does not take ownership of the
|
||||
sample so the sample needs to be unreffed after calling this function.
|
||||
|
||||
When the block property is TRUE, this function can block until free
|
||||
space becomes available in the queue.
|
||||
## `sample`
|
||||
a `gst::Sample` from which buffer and caps may be
|
||||
extracted
|
||||
|
||||
# Returns
|
||||
|
||||
`gst::FlowReturn::Ok` when the buffer was successfully queued.
|
||||
`gst::FlowReturn::Flushing` when `self` is not PAUSED or PLAYING.
|
||||
`gst::FlowReturn::Eos` when EOS occurred.
|
||||
<!-- impl AppSrc::fn set_callbacks -->
|
||||
Set callbacks which will be executed when data is needed, enough data has
|
||||
been collected or when a seek should be performed.
|
||||
This is an alternative to using the signals, it has lower overhead and is thus
|
||||
less expensive, but also less flexible.
|
||||
|
||||
If callbacks are installed, no signals will be emitted for performance
|
||||
reasons.
|
||||
|
||||
Before 1.16.3 it was not possible to change the callbacks in a thread-safe
|
||||
way.
|
||||
## `callbacks`
|
||||
the callbacks
|
||||
## `user_data`
|
||||
a user_data argument for the callbacks
|
||||
## `notify`
|
||||
a destroy notify function
|
||||
<!-- impl AppSrc::fn set_caps -->
|
||||
Set the capabilities on the appsrc element. This function takes
|
||||
a copy of the caps structure. After calling this method, the source will
|
||||
only produce caps that match `caps`. `caps` must be fixed and the caps on the
|
||||
buffers must match the caps or left NULL.
|
||||
## `caps`
|
||||
caps to set
|
||||
<!-- impl AppSrc::fn set_duration -->
|
||||
Set the duration of the stream in nanoseconds. A value of GST_CLOCK_TIME_NONE means that the duration is
|
||||
not known.
|
||||
|
||||
Feature: `v1_10`
|
||||
|
||||
## `duration`
|
||||
the duration to set
|
||||
<!-- impl AppSrc::fn set_emit_signals -->
|
||||
Make appsrc emit the "new-preroll" and "new-buffer" signals. This option is
|
||||
by default disabled because signal emission is expensive and unneeded when
|
||||
the application prefers to operate in pull mode.
|
||||
## `emit`
|
||||
the new state
|
||||
<!-- impl AppSrc::fn set_latency -->
|
||||
Configure the `min` and `max` latency in `src`. If `min` is set to -1, the
|
||||
default latency calculations for pseudo-live sources will be used.
|
||||
## `min`
|
||||
the min latency
|
||||
## `max`
|
||||
the max latency
|
||||
<!-- impl AppSrc::fn set_max_bytes -->
|
||||
Set the maximum amount of bytes that can be queued in `self`.
|
||||
After the maximum amount of bytes are queued, `self` will emit the
|
||||
"enough-data" signal.
|
||||
## `max`
|
||||
the maximum number of bytes to queue
|
||||
<!-- impl AppSrc::fn set_size -->
|
||||
Set the size of the stream in bytes. A value of -1 means that the size is
|
||||
not known.
|
||||
## `size`
|
||||
the size to set
|
||||
<!-- impl AppSrc::fn set_stream_type -->
|
||||
Set the stream type on `self`. For seekable streams, the "seek" signal must
|
||||
be connected to.
|
||||
|
||||
A stream_type stream
|
||||
## `type_`
|
||||
the new state
|
||||
<!-- impl AppSrc::fn connect_end_of_stream -->
|
||||
Notify `appsrc` that no more buffer are available.
|
||||
<!-- impl AppSrc::fn connect_enough_data -->
|
||||
Signal that the source has enough data. It is recommended that the
|
||||
application stops calling push-buffer until the need-data signal is
|
||||
emitted again to avoid excessive buffer queueing.
|
||||
<!-- impl AppSrc::fn connect_need_data -->
|
||||
Signal that the source needs more data. In the callback or from another
|
||||
thread you should call push-buffer or end-of-stream.
|
||||
|
||||
`length` is just a hint and when it is set to -1, any number of bytes can be
|
||||
pushed into `appsrc`.
|
||||
|
||||
You can call push-buffer multiple times until the enough-data signal is
|
||||
fired.
|
||||
## `length`
|
||||
the amount of bytes needed.
|
||||
<!-- impl AppSrc::fn connect_push_buffer -->
|
||||
Adds a buffer to the queue of buffers that the appsrc element will
|
||||
push to its source pad. This function does not take ownership of the
|
||||
buffer so the buffer needs to be unreffed after calling this function.
|
||||
|
||||
When the block property is TRUE, this function can block until free space
|
||||
becomes available in the queue.
|
||||
## `buffer`
|
||||
a buffer to push
|
||||
<!-- impl AppSrc::fn connect_push_buffer_list -->
|
||||
Adds a buffer list to the queue of buffers and buffer lists that the
|
||||
appsrc element will push to its source pad. This function does not take
|
||||
ownership of the buffer list so the buffer list needs to be unreffed
|
||||
after calling this function.
|
||||
|
||||
When the block property is TRUE, this function can block until free space
|
||||
becomes available in the queue.
|
||||
|
||||
Feature: `v1_14`
|
||||
|
||||
## `buffer_list`
|
||||
a buffer list to push
|
||||
<!-- impl AppSrc::fn connect_push_sample -->
|
||||
Extract a buffer from the provided sample and adds the extracted buffer
|
||||
to the queue of buffers that the appsrc element will
|
||||
push to its source pad. This function set the appsrc caps based on the caps
|
||||
in the sample and reset the caps if they change.
|
||||
Only the caps and the buffer of the provided sample are used and not
|
||||
for example the segment in the sample.
|
||||
This function does not take ownership of the
|
||||
sample so the sample needs to be unreffed after calling this function.
|
||||
|
||||
When the block property is TRUE, this function can block until free space
|
||||
becomes available in the queue.
|
||||
## `sample`
|
||||
a sample from which extract buffer to push
|
||||
<!-- impl AppSrc::fn connect_seek_data -->
|
||||
Seek to the given offset. The next push-buffer should produce buffers from
|
||||
the new `offset`.
|
||||
This callback is only called for seekable stream types.
|
||||
## `offset`
|
||||
the offset to seek to
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the seek succeeded.
|
||||
<!-- enum AppStreamType -->
|
||||
The stream type.
|
||||
<!-- enum AppStreamType::variant Stream -->
|
||||
No seeking is supported in the stream, such as a
|
||||
live stream.
|
||||
<!-- enum AppStreamType::variant Seekable -->
|
||||
The stream is seekable but seeking might not
|
||||
be very fast, such as data from a webserver.
|
||||
<!-- enum AppStreamType::variant RandomAccess -->
|
||||
The stream is seekable and seeking is fast,
|
||||
such as in a local file.
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,339 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- struct TestClock -->
|
||||
GstTestClock is an implementation of `gst::Clock` which has different
|
||||
behaviour compared to `gst::SystemClock`. Time for `gst::SystemClock` advances
|
||||
according to the system time, while time for `TestClock` changes only
|
||||
when `TestClock::set_time` or `TestClock::advance_time` are
|
||||
called. `TestClock` provides unit tests with the possibility to
|
||||
precisely advance the time in a deterministic manner, independent of the
|
||||
system time or any other external factors.
|
||||
|
||||
## Advancing the time of a `TestClock`
|
||||
|
||||
|
||||
```C
|
||||
#include <gst/gst.h>
|
||||
#include <gst/check/gsttestclock.h>
|
||||
|
||||
GstClock *clock;
|
||||
GstTestClock *test_clock;
|
||||
|
||||
clock = gst_test_clock_new ();
|
||||
test_clock = GST_TEST_CLOCK (clock);
|
||||
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
|
||||
gst_test_clock_advance_time ( test_clock, 1 * GST_SECOND);
|
||||
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
|
||||
g_usleep (10 * G_USEC_PER_SEC);
|
||||
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
|
||||
gst_test_clock_set_time (test_clock, 42 * GST_SECOND);
|
||||
GST_INFO ("Time: %" GST_TIME_FORMAT, GST_TIME_ARGS (gst_clock_get_time (clock)));
|
||||
...
|
||||
```
|
||||
|
||||
`gst::Clock` allows for setting up single shot or periodic clock notifications
|
||||
as well as waiting for these notifications synchronously (using
|
||||
`gst::Clock::id_wait`) or asynchronously (using `gst::Clock::id_wait_async` or
|
||||
`gst::Clock::id_wait_async`). This is used by many GStreamer elements,
|
||||
among them `GstBaseSrc` and `GstBaseSink`.
|
||||
|
||||
`TestClock` keeps track of these clock notifications. By calling
|
||||
`TestClock::wait_for_next_pending_id` or
|
||||
`TestClock::wait_for_multiple_pending_ids` a unit tests may wait for the
|
||||
next one or several clock notifications to be requested. Additionally unit
|
||||
tests may release blocked waits in a controlled fashion by calling
|
||||
`TestClock::process_next_clock_id`. This way a unit test can control the
|
||||
inaccuracy (jitter) of clock notifications, since the test can decide to
|
||||
release blocked waits when the clock time has advanced exactly to, or past,
|
||||
the requested clock notification time.
|
||||
|
||||
There are also interfaces for determining if a notification belongs to a
|
||||
`TestClock` or not, as well as getting the number of requested clock
|
||||
notifications so far.
|
||||
|
||||
N.B.: When a unit test waits for a certain amount of clock notifications to
|
||||
be requested in `TestClock::wait_for_next_pending_id` or
|
||||
`TestClock::wait_for_multiple_pending_ids` then these functions may block
|
||||
for a long time. If they block forever then the expected clock notifications
|
||||
were never requested from `TestClock`, and so the assumptions in the code
|
||||
of the unit test are wrong. The unit test case runner in gstcheck is
|
||||
expected to catch these cases either by the default test case timeout or the
|
||||
one set for the unit test by calling tcase_set_timeout\(\).
|
||||
|
||||
The sample code below assumes that the element under test will delay a
|
||||
buffer pushed on the source pad by some latency until it arrives on the sink
|
||||
pad. Moreover it is assumed that the element will at some point call
|
||||
`gst::Clock::id_wait` to synchronously wait for a specific time. The first
|
||||
buffer sent will arrive exactly on time only delayed by the latency. The
|
||||
second buffer will arrive a little late (7ms) due to simulated jitter in the
|
||||
clock notification.
|
||||
|
||||
## Demonstration of how to work with clock notifications and `TestClock`
|
||||
|
||||
|
||||
```C
|
||||
#include <gst/gst.h>
|
||||
#include <gst/check/gstcheck.h>
|
||||
#include <gst/check/gsttestclock.h>
|
||||
|
||||
GstClockTime latency;
|
||||
GstElement *element;
|
||||
GstPad *srcpad;
|
||||
GstClock *clock;
|
||||
GstTestClock *test_clock;
|
||||
GstBuffer buf;
|
||||
GstClockID pending_id;
|
||||
GstClockID processed_id;
|
||||
|
||||
latency = 42 * GST_MSECOND;
|
||||
element = create_element (latency, ...);
|
||||
srcpad = get_source_pad (element);
|
||||
|
||||
clock = gst_test_clock_new ();
|
||||
test_clock = GST_TEST_CLOCK (clock);
|
||||
gst_element_set_clock (element, clock);
|
||||
|
||||
GST_INFO ("Set time, create and push the first buffer\n");
|
||||
gst_test_clock_set_time (test_clock, 0);
|
||||
buf = create_test_buffer (gst_clock_get_time (clock), ...);
|
||||
gst_assert_cmpint (gst_pad_push (srcpad, buf), ==, GST_FLOW_OK);
|
||||
|
||||
GST_INFO ("Block until element is waiting for a clock notification\n");
|
||||
gst_test_clock_wait_for_next_pending_id (test_clock, &pending_id);
|
||||
GST_INFO ("Advance to the requested time of the clock notification\n");
|
||||
gst_test_clock_advance_time (test_clock, latency);
|
||||
GST_INFO ("Release the next blocking wait and make sure it is the one from element\n");
|
||||
processed_id = gst_test_clock_process_next_clock_id (test_clock);
|
||||
g_assert (processed_id == pending_id);
|
||||
g_assert_cmpint (GST_CLOCK_ENTRY_STATUS (processed_id), ==, GST_CLOCK_OK);
|
||||
gst_clock_id_unref (pending_id);
|
||||
gst_clock_id_unref (processed_id);
|
||||
|
||||
GST_INFO ("Validate that element produced an output buffer and check its timestamp\n");
|
||||
g_assert_cmpint (get_number_of_output_buffer (...), ==, 1);
|
||||
buf = get_buffer_pushed_by_element (element, ...);
|
||||
g_assert_cmpint (GST_BUFFER_TIMESTAMP (buf), ==, latency);
|
||||
gst_buffer_unref (buf);
|
||||
GST_INFO ("Check that element does not wait for any clock notification\n");
|
||||
g_assert (!gst_test_clock_peek_next_pending_id (test_clock, NULL));
|
||||
|
||||
GST_INFO ("Set time, create and push the second buffer\n");
|
||||
gst_test_clock_advance_time (test_clock, 10 * GST_SECOND);
|
||||
buf = create_test_buffer (gst_clock_get_time (clock), ...);
|
||||
gst_assert_cmpint (gst_pad_push (srcpad, buf), ==, GST_FLOW_OK);
|
||||
|
||||
GST_INFO ("Block until element is waiting for a new clock notification\n");
|
||||
(gst_test_clock_wait_for_next_pending_id (test_clock, &pending_id);
|
||||
GST_INFO ("Advance past 7ms beyond the requested time of the clock notification\n");
|
||||
gst_test_clock_advance_time (test_clock, latency + 7 * GST_MSECOND);
|
||||
GST_INFO ("Release the next blocking wait and make sure it is the one from element\n");
|
||||
processed_id = gst_test_clock_process_next_clock_id (test_clock);
|
||||
g_assert (processed_id == pending_id);
|
||||
g_assert_cmpint (GST_CLOCK_ENTRY_STATUS (processed_id), ==, GST_CLOCK_OK);
|
||||
gst_clock_id_unref (pending_id);
|
||||
gst_clock_id_unref (processed_id);
|
||||
|
||||
GST_INFO ("Validate that element produced an output buffer and check its timestamp\n");
|
||||
g_assert_cmpint (get_number_of_output_buffer (...), ==, 1);
|
||||
buf = get_buffer_pushed_by_element (element, ...);
|
||||
g_assert_cmpint (GST_BUFFER_TIMESTAMP (buf), ==,
|
||||
10 * GST_SECOND + latency + 7 * GST_MSECOND);
|
||||
gst_buffer_unref (buf);
|
||||
GST_INFO ("Check that element does not wait for any clock notification\n");
|
||||
g_assert (!gst_test_clock_peek_next_pending_id (test_clock, NULL));
|
||||
...
|
||||
```
|
||||
|
||||
Since `TestClock` is only supposed to be used in unit tests it calls
|
||||
`g_assert`, `g_assert_cmpint` or `g_assert_cmpuint` to validate all function
|
||||
arguments. This will highlight any issues with the unit test code itself.
|
||||
|
||||
# Implements
|
||||
|
||||
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl TestClock::fn new -->
|
||||
Creates a new test clock with its time set to zero.
|
||||
|
||||
MT safe.
|
||||
|
||||
# Returns
|
||||
|
||||
a `TestClock` cast to `gst::Clock`.
|
||||
<!-- impl TestClock::fn new_with_start_time -->
|
||||
Creates a new test clock with its time set to the specified time.
|
||||
|
||||
MT safe.
|
||||
## `start_time`
|
||||
a `gst::ClockTime` set to the desired start time of the clock.
|
||||
|
||||
# Returns
|
||||
|
||||
a `TestClock` cast to `gst::Clock`.
|
||||
<!-- impl TestClock::fn id_list_get_latest_time -->
|
||||
Finds the latest time inside the list.
|
||||
|
||||
MT safe.
|
||||
## `pending_list`
|
||||
List
|
||||
of of pending `GstClockIDs`
|
||||
<!-- impl TestClock::fn advance_time -->
|
||||
Advances the time of the `self` by the amount given by `delta`. The
|
||||
time of `self` is monotonically increasing, therefore providing a
|
||||
`delta` which is negative or zero is a programming error.
|
||||
|
||||
MT safe.
|
||||
## `delta`
|
||||
a positive `gst::ClockTimeDiff` to be added to the time of the clock
|
||||
<!-- impl TestClock::fn crank -->
|
||||
A "crank" consists of three steps:
|
||||
1: Wait for a `gst::ClockID` to be registered with the `TestClock`.
|
||||
2: Advance the `TestClock` to the time the `gst::ClockID` is waiting, unless
|
||||
the clock time is already passed the clock id (Since: 1.18).
|
||||
3: Release the `gst::ClockID` wait.
|
||||
A "crank" can be though of as the notion of
|
||||
manually driving the clock forward to its next logical step.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the crank was successful, `false` otherwise.
|
||||
|
||||
MT safe.
|
||||
<!-- impl TestClock::fn get_next_entry_time -->
|
||||
Retrieve the requested time for the next pending clock notification.
|
||||
|
||||
MT safe.
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::ClockTime` set to the time of the next pending clock
|
||||
notification. If no clock notifications have been requested
|
||||
`GST_CLOCK_TIME_NONE` will be returned.
|
||||
<!-- impl TestClock::fn has_id -->
|
||||
Checks whether `self` was requested to provide the clock notification
|
||||
given by `id`.
|
||||
|
||||
MT safe.
|
||||
## `id`
|
||||
a `gst::ClockID` clock notification
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the clock has been asked to provide the given clock
|
||||
notification, `false` otherwise.
|
||||
<!-- impl TestClock::fn peek_id_count -->
|
||||
Determine the number of pending clock notifications that have been
|
||||
requested from the `self`.
|
||||
|
||||
MT safe.
|
||||
|
||||
# Returns
|
||||
|
||||
the number of pending clock notifications.
|
||||
<!-- impl TestClock::fn peek_next_pending_id -->
|
||||
Determines if the `pending_id` is the next clock notification scheduled to
|
||||
be triggered given the current time of the `self`.
|
||||
|
||||
MT safe.
|
||||
## `pending_id`
|
||||
a `gst::ClockID` clock
|
||||
notification to look for
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `pending_id` is the next clock notification to be
|
||||
triggered, `false` otherwise.
|
||||
<!-- impl TestClock::fn process_id -->
|
||||
Processes and releases the pending ID.
|
||||
|
||||
MT safe.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `pending_id`
|
||||
`gst::ClockID`
|
||||
<!-- impl TestClock::fn process_id_list -->
|
||||
Processes and releases the pending IDs in the list.
|
||||
|
||||
MT safe.
|
||||
## `pending_list`
|
||||
List
|
||||
of pending `GstClockIDs`
|
||||
<!-- impl TestClock::fn process_next_clock_id -->
|
||||
MT safe.
|
||||
|
||||
# Returns
|
||||
|
||||
a `gst::ClockID` containing the next pending clock
|
||||
notification.
|
||||
<!-- impl TestClock::fn set_time -->
|
||||
Sets the time of `self` to the time given by `new_time`. The time of
|
||||
`self` is monotonically increasing, therefore providing a `new_time`
|
||||
which is earlier or equal to the time of the clock as given by
|
||||
`gst::ClockExt::get_time` is a programming error.
|
||||
|
||||
MT safe.
|
||||
## `new_time`
|
||||
a `gst::ClockTime` later than that returned by `gst::ClockExt::get_time`
|
||||
<!-- impl TestClock::fn timed_wait_for_multiple_pending_ids -->
|
||||
Blocks until at least `count` clock notifications have been requested from
|
||||
`self`, or the timeout expires.
|
||||
|
||||
MT safe.
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
## `count`
|
||||
the number of pending clock notifications to wait for
|
||||
## `timeout_ms`
|
||||
the timeout in milliseconds
|
||||
## `pending_list`
|
||||
Address
|
||||
of a `glib::List` pointer variable to store the list of pending `GstClockIDs`
|
||||
that expired, or `None`
|
||||
|
||||
# Returns
|
||||
|
||||
a `gboolean` `true` if the waits have been registered, `false` if not.
|
||||
(Could be that it timed out waiting or that more waits than waits was found)
|
||||
<!-- impl TestClock::fn wait_for_multiple_pending_ids -->
|
||||
Blocks until at least `count` clock notifications have been requested from
|
||||
`self`. There is no timeout for this wait, see the main description of
|
||||
`TestClock`.
|
||||
|
||||
MT safe.
|
||||
## `count`
|
||||
the number of pending clock notifications to wait for
|
||||
## `pending_list`
|
||||
Address
|
||||
of a `glib::List` pointer variable to store the list of pending `GstClockIDs`
|
||||
that expired, or `None`
|
||||
<!-- impl TestClock::fn wait_for_next_pending_id -->
|
||||
Waits until a clock notification is requested from `self`. There is no
|
||||
timeout for this wait, see the main description of `TestClock`. A reference
|
||||
to the pending clock notification is stored in `pending_id`.
|
||||
|
||||
MT safe.
|
||||
## `pending_id`
|
||||
`gst::ClockID`
|
||||
with information about the pending clock notification
|
||||
<!-- impl TestClock::fn wait_for_pending_id_count -->
|
||||
Blocks until at least `count` clock notifications have been requested from
|
||||
`self`. There is no timeout for this wait, see the main description of
|
||||
`TestClock`.
|
||||
|
||||
# Deprecated
|
||||
|
||||
use `TestClock::wait_for_multiple_pending_ids` instead.
|
||||
## `count`
|
||||
the number of pending clock notifications to wait for
|
||||
<!-- impl TestClock::fn get_property_start_time -->
|
||||
When a `TestClock` is constructed it will have a certain start time set.
|
||||
If the clock was created using `TestClock::new_with_start_time` then
|
||||
this property contains the value of the `start_time` argument. If
|
||||
`TestClock::new` was called the clock started at time zero, and thus
|
||||
this property contains the value 0.
|
||||
<!-- impl TestClock::fn set_property_start_time -->
|
||||
When a `TestClock` is constructed it will have a certain start time set.
|
||||
If the clock was created using `TestClock::new_with_start_time` then
|
||||
this property contains the value of the `start_time` argument. If
|
||||
`TestClock::new` was called the clock started at time zero, and thus
|
||||
this property contains the value 0.
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -1,140 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- struct NetClientClock -->
|
||||
`NetClientClock` implements a custom `gst::Clock` that synchronizes its time
|
||||
to a remote time provider such as `NetTimeProvider`. `NtpClock`
|
||||
implements a `gst::Clock` that synchronizes its time to a remote NTPv4 server.
|
||||
|
||||
A new clock is created with `NetClientClock::new` or
|
||||
`NtpClock::new`, which takes the address and port of the remote time
|
||||
provider along with a name and an initial time.
|
||||
|
||||
This clock will poll the time provider and will update its calibration
|
||||
parameters based on the local and remote observations.
|
||||
|
||||
The "round-trip" property limits the maximum round trip packets can take.
|
||||
|
||||
Various parameters of the clock can be configured with the parent `gst::Clock`
|
||||
"timeout", "window-size" and "window-threshold" object properties.
|
||||
|
||||
A `NetClientClock` and `NtpClock` is typically set on a `gst::Pipeline` with
|
||||
`gst::Pipeline::use_clock`.
|
||||
|
||||
If you set a `gst::Bus` on the clock via the "bus" object property, it will
|
||||
send `gst::MessageType::Element` messages with an attached `gst::Structure` containing
|
||||
statistics about clock accuracy and network traffic.
|
||||
|
||||
# Implements
|
||||
|
||||
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl NetClientClock::fn new -->
|
||||
Create a new `NetClientClock` that will report the time
|
||||
provided by the `NetTimeProvider` on `remote_address` and
|
||||
`remote_port`.
|
||||
## `name`
|
||||
a name for the clock
|
||||
## `remote_address`
|
||||
the address or hostname of the remote clock provider
|
||||
## `remote_port`
|
||||
the port of the remote clock provider
|
||||
## `base_time`
|
||||
initial time of the clock
|
||||
|
||||
# Returns
|
||||
|
||||
a new `gst::Clock` that receives a time from the remote
|
||||
clock.
|
||||
<!-- struct NetTimeProvider -->
|
||||
This object exposes the time of a `gst::Clock` on the network.
|
||||
|
||||
A `NetTimeProvider` is created with `NetTimeProvider::new` which
|
||||
takes a `gst::Clock`, an address and a port number as arguments.
|
||||
|
||||
After creating the object, a client clock such as `NetClientClock` can
|
||||
query the exposed clock over the network for its values.
|
||||
|
||||
The `NetTimeProvider` typically wraps the clock used by a `gst::Pipeline`.
|
||||
|
||||
# Implements
|
||||
|
||||
[`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl NetTimeProvider::fn new -->
|
||||
Allows network clients to get the current time of `clock`.
|
||||
## `clock`
|
||||
a `gst::Clock` to export over the network
|
||||
## `address`
|
||||
an address to bind on as a dotted quad
|
||||
(xxx.xxx.xxx.xxx), IPv6 address, or NULL to bind to all addresses
|
||||
## `port`
|
||||
a port to bind on, or 0 to let the kernel choose
|
||||
|
||||
# Returns
|
||||
|
||||
the new `NetTimeProvider`, or NULL on error
|
||||
<!-- struct NtpClock -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`NetClientClockExt`](trait.NetClientClockExt.html), [`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl NtpClock::fn new -->
|
||||
Create a new `NtpClock` that will report the time provided by
|
||||
the NTPv4 server on `remote_address` and `remote_port`.
|
||||
## `name`
|
||||
a name for the clock
|
||||
## `remote_address`
|
||||
the address or hostname of the remote clock provider
|
||||
## `remote_port`
|
||||
the port of the remote clock provider
|
||||
## `base_time`
|
||||
initial time of the clock
|
||||
|
||||
# Returns
|
||||
|
||||
a new `gst::Clock` that receives a time from the remote
|
||||
clock.
|
||||
<!-- struct PtpClock -->
|
||||
GstPtpClock implements a PTP (IEEE1588:2008) ordinary clock in slave-only
|
||||
mode, that allows a GStreamer pipeline to synchronize to a PTP network
|
||||
clock in some specific domain.
|
||||
|
||||
The PTP subsystem can be initialized with `gst_ptp_init`, which then starts
|
||||
a helper process to do the actual communication via the PTP ports. This is
|
||||
required as PTP listens on ports < 1024 and thus requires special
|
||||
privileges. Once this helper process is started, the main process will
|
||||
synchronize to all PTP domains that are detected on the selected
|
||||
interfaces.
|
||||
|
||||
`PtpClock::new` then allows to create a GstClock that provides the PTP
|
||||
time from a master clock inside a specific PTP domain. This clock will only
|
||||
return valid timestamps once the timestamps in the PTP domain are known. To
|
||||
check this, you can use `gst::ClockExt::wait_for_sync`, the GstClock::synced
|
||||
signal and `gst::ClockExt::is_synced`.
|
||||
|
||||
To gather statistics about the PTP clock synchronization,
|
||||
`gst_ptp_statistics_callback_add` can be used. This gives the application
|
||||
the possibility to collect all kinds of statistics from the clock
|
||||
synchronization.
|
||||
|
||||
# Implements
|
||||
|
||||
[`gst::ClockExt`](../gst/trait.ClockExt.html), [`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl PtpClock::fn new -->
|
||||
Creates a new PTP clock instance that exports the PTP time of the master
|
||||
clock in `domain`. This clock can be slaved to other clocks as needed.
|
||||
|
||||
If `gst_ptp_init` was not called before, this will call `gst_ptp_init` with
|
||||
default parameters.
|
||||
|
||||
This clock only returns valid timestamps after it received the first
|
||||
times from the PTP master clock on the network. Once this happens the
|
||||
GstPtpClock::internal-clock property will become non-NULL. You can
|
||||
check this with `gst::ClockExt::wait_for_sync`, the GstClock::synced signal and
|
||||
`gst::ClockExt::is_synced`.
|
||||
## `name`
|
||||
Name of the clock
|
||||
## `domain`
|
||||
PTP domain
|
||||
|
||||
# Returns
|
||||
|
||||
A new `gst::Clock`
|
|
@ -1,921 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- struct Discoverer -->
|
||||
The `Discoverer` is a utility object which allows to get as much
|
||||
information as possible from one or many URIs.
|
||||
|
||||
It provides two APIs, allowing usage in blocking or non-blocking mode.
|
||||
|
||||
The blocking mode just requires calling `Discoverer::discover_uri`
|
||||
with the URI one wishes to discover.
|
||||
|
||||
The non-blocking mode requires a running `glib::MainLoop` iterating a
|
||||
`glib::MainContext`, where one connects to the various signals, appends the
|
||||
URIs to be processed (through `Discoverer::discover_uri_async`) and then
|
||||
asks for the discovery to begin (through `Discoverer::start`).
|
||||
By default this will use the GLib default main context unless you have
|
||||
set a custom context using `glib::MainContext::push_thread_default`.
|
||||
|
||||
All the information is returned in a `DiscovererInfo` structure.
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl Discoverer::fn new -->
|
||||
Creates a new `Discoverer` with the provided timeout.
|
||||
## `timeout`
|
||||
timeout per file, in nanoseconds. Allowed are values between
|
||||
one second (`GST_SECOND`) and one hour (3600 * `GST_SECOND`)
|
||||
|
||||
# Returns
|
||||
|
||||
The new `Discoverer`.
|
||||
If an error occurred when creating the discoverer, `err` will be set
|
||||
accordingly and `None` will be returned. If `err` is set, the caller must
|
||||
free it when no longer needed using `glib::Error::free`.
|
||||
<!-- impl Discoverer::fn discover_uri -->
|
||||
Synchronously discovers the given `uri`.
|
||||
|
||||
A copy of `uri` will be made internally, so the caller can safely `g_free`
|
||||
afterwards.
|
||||
## `uri`
|
||||
The URI to run on.
|
||||
|
||||
# Returns
|
||||
|
||||
the result of the scanning. Can be `None` if an
|
||||
error occurred.
|
||||
<!-- impl Discoverer::fn discover_uri_async -->
|
||||
Appends the given `uri` to the list of URIs to discoverer. The actual
|
||||
discovery of the `uri` will only take place if `Discoverer::start` has
|
||||
been called.
|
||||
|
||||
A copy of `uri` will be made internally, so the caller can safely `g_free`
|
||||
afterwards.
|
||||
## `uri`
|
||||
the URI to add.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the `uri` was successfully appended to the list of pending
|
||||
uris, else `false`
|
||||
<!-- impl Discoverer::fn start -->
|
||||
Allow asynchronous discovering of URIs to take place.
|
||||
A `glib::MainLoop` must be available for `Discoverer` to properly work in
|
||||
asynchronous mode.
|
||||
<!-- impl Discoverer::fn stop -->
|
||||
Stop the discovery of any pending URIs and clears the list of
|
||||
pending URIS (if any).
|
||||
<!-- impl Discoverer::fn connect_discovered -->
|
||||
Will be emitted in async mode when all information on a URI could be
|
||||
discovered, or an error occurred.
|
||||
|
||||
When an error occurs, `info` might still contain some partial information,
|
||||
depending on the circumstances of the error.
|
||||
## `info`
|
||||
the results `DiscovererInfo`
|
||||
## `error`
|
||||
`glib::Error`, which will be non-NULL
|
||||
if an error occurred during
|
||||
discovery. You must not free
|
||||
this `glib::Error`, it will be freed by
|
||||
the discoverer.
|
||||
<!-- impl Discoverer::fn connect_finished -->
|
||||
Will be emitted in async mode when all pending URIs have been processed.
|
||||
<!-- impl Discoverer::fn connect_source_setup -->
|
||||
This signal is emitted after the source element has been created for, so
|
||||
the URI being discovered, so it can be configured by setting additional
|
||||
properties (e.g. set a proxy server for an http source, or set the device
|
||||
and read speed for an audio cd source).
|
||||
|
||||
This signal is usually emitted from the context of a GStreamer streaming
|
||||
thread.
|
||||
## `source`
|
||||
source element
|
||||
<!-- impl Discoverer::fn connect_starting -->
|
||||
Will be emitted when the discover starts analyzing the pending URIs
|
||||
<!-- impl Discoverer::fn get_property_timeout -->
|
||||
The duration (in nanoseconds) after which the discovery of an individual
|
||||
URI will timeout.
|
||||
|
||||
If the discovery of a URI times out, the `DiscovererResult::Timeout` will be
|
||||
set on the result flags.
|
||||
<!-- impl Discoverer::fn set_property_timeout -->
|
||||
The duration (in nanoseconds) after which the discovery of an individual
|
||||
URI will timeout.
|
||||
|
||||
If the discovery of a URI times out, the `DiscovererResult::Timeout` will be
|
||||
set on the result flags.
|
||||
<!-- struct DiscovererAudioInfo -->
|
||||
`DiscovererStreamInfo` specific to audio streams.
|
||||
|
||||
# Implements
|
||||
|
||||
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl DiscovererAudioInfo::fn get_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the average or nominal bitrate of the stream in bits/second.
|
||||
<!-- impl DiscovererAudioInfo::fn get_channel_mask -->
|
||||
|
||||
Feature: `v1_14`
|
||||
|
||||
|
||||
# Returns
|
||||
|
||||
the channel-mask of the stream, refer to
|
||||
`gst_audio_channel_positions_from_mask` for more
|
||||
information.
|
||||
<!-- impl DiscovererAudioInfo::fn get_channels -->
|
||||
|
||||
# Returns
|
||||
|
||||
the number of channels in the stream.
|
||||
<!-- impl DiscovererAudioInfo::fn get_depth -->
|
||||
|
||||
# Returns
|
||||
|
||||
the number of bits used per sample in each channel.
|
||||
<!-- impl DiscovererAudioInfo::fn get_language -->
|
||||
|
||||
# Returns
|
||||
|
||||
the language of the stream, or NULL if unknown.
|
||||
<!-- impl DiscovererAudioInfo::fn get_max_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the maximum bitrate of the stream in bits/second.
|
||||
<!-- impl DiscovererAudioInfo::fn get_sample_rate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the sample rate of the stream in Hertz.
|
||||
<!-- struct DiscovererContainerInfo -->
|
||||
`DiscovererStreamInfo` specific to container streams.
|
||||
|
||||
# Implements
|
||||
|
||||
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl DiscovererContainerInfo::fn get_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
the list of
|
||||
`DiscovererStreamInfo` this container stream offers.
|
||||
Free with `DiscovererStreamInfo::list_free` after usage.
|
||||
<!-- struct DiscovererInfo -->
|
||||
Structure containing the information of a URI analyzed by `Discoverer`.
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl DiscovererInfo::fn from_variant -->
|
||||
Parses a `glib::Variant` as produced by `DiscovererInfo::to_variant`
|
||||
back to a `DiscovererInfo`.
|
||||
## `variant`
|
||||
A `glib::Variant` to deserialize into a `DiscovererInfo`.
|
||||
|
||||
# Returns
|
||||
|
||||
A newly-allocated `DiscovererInfo`.
|
||||
<!-- impl DiscovererInfo::fn copy -->
|
||||
|
||||
# Returns
|
||||
|
||||
A copy of the `DiscovererInfo`
|
||||
<!-- impl DiscovererInfo::fn get_audio_streams -->
|
||||
Finds all the `DiscovererAudioInfo` contained in `self`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `DiscovererStreamInfo`. The caller should free it with
|
||||
`DiscovererStreamInfo::list_free`.
|
||||
<!-- impl DiscovererInfo::fn get_container_streams -->
|
||||
Finds all the `DiscovererContainerInfo` contained in `self`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `DiscovererStreamInfo`. The caller should free it with
|
||||
`DiscovererStreamInfo::list_free`.
|
||||
<!-- impl DiscovererInfo::fn get_duration -->
|
||||
|
||||
# Returns
|
||||
|
||||
the duration of the URI in `gst::ClockTime` (nanoseconds).
|
||||
<!-- impl DiscovererInfo::fn get_live -->
|
||||
|
||||
Feature: `v1_14`
|
||||
|
||||
|
||||
# Returns
|
||||
|
||||
whether the URI is live.
|
||||
<!-- impl DiscovererInfo::fn get_misc -->
|
||||
|
||||
# Deprecated
|
||||
|
||||
This functions is deprecated since version 1.4, use
|
||||
`DiscovererInfo::get_missing_elements_installer_details`
|
||||
|
||||
# Returns
|
||||
|
||||
Miscellaneous information stored as a `gst::Structure`
|
||||
(for example: information about missing plugins). If you wish to use the
|
||||
`gst::Structure` after the life-time of `self`, you will need to copy it.
|
||||
<!-- impl DiscovererInfo::fn get_missing_elements_installer_details -->
|
||||
Get the installer details for missing elements
|
||||
|
||||
# Returns
|
||||
|
||||
An array of strings
|
||||
containing information about how to install the various missing elements
|
||||
for `self` to be usable. If you wish to use the strings after the life-time
|
||||
of `self`, you will need to copy them.
|
||||
<!-- impl DiscovererInfo::fn get_result -->
|
||||
|
||||
# Returns
|
||||
|
||||
the result of the discovery as a `DiscovererResult`.
|
||||
<!-- impl DiscovererInfo::fn get_seekable -->
|
||||
|
||||
# Returns
|
||||
|
||||
the whether the URI is seekable.
|
||||
<!-- impl DiscovererInfo::fn get_stream_info -->
|
||||
|
||||
# Returns
|
||||
|
||||
the structure (or topology) of the URI as a
|
||||
`DiscovererStreamInfo`.
|
||||
This structure can be traversed to see the original hierarchy. Unref with
|
||||
`gst_discoverer_stream_info_unref` after usage.
|
||||
<!-- impl DiscovererInfo::fn get_stream_list -->
|
||||
|
||||
# Returns
|
||||
|
||||
the list of
|
||||
all streams contained in the `info`. Free after usage
|
||||
with `DiscovererStreamInfo::list_free`.
|
||||
<!-- impl DiscovererInfo::fn get_streams -->
|
||||
Finds the `DiscovererStreamInfo` contained in `self` that match the
|
||||
given `streamtype`.
|
||||
## `streamtype`
|
||||
a `glib::Type` derived from `DiscovererStreamInfo`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `DiscovererStreamInfo`. The caller should free it with
|
||||
`DiscovererStreamInfo::list_free`.
|
||||
<!-- impl DiscovererInfo::fn get_subtitle_streams -->
|
||||
Finds all the `DiscovererSubtitleInfo` contained in `self`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `DiscovererStreamInfo`. The caller should free it with
|
||||
`DiscovererStreamInfo::list_free`.
|
||||
<!-- impl DiscovererInfo::fn get_tags -->
|
||||
|
||||
# Returns
|
||||
|
||||
all tags contained in the URI. If you wish to use
|
||||
the tags after the life-time of `self`, you will need to copy them.
|
||||
<!-- impl DiscovererInfo::fn get_toc -->
|
||||
|
||||
# Returns
|
||||
|
||||
TOC contained in the URI. If you wish to use
|
||||
the TOC after the life-time of `self`, you will need to copy it.
|
||||
<!-- impl DiscovererInfo::fn get_uri -->
|
||||
|
||||
# Returns
|
||||
|
||||
the URI to which this information corresponds to.
|
||||
Copy it if you wish to use it after the life-time of `self`.
|
||||
<!-- impl DiscovererInfo::fn get_video_streams -->
|
||||
Finds all the `DiscovererVideoInfo` contained in `self`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `DiscovererStreamInfo`. The caller should free it with
|
||||
`DiscovererStreamInfo::list_free`.
|
||||
<!-- impl DiscovererInfo::fn to_variant -->
|
||||
Serializes `self` to a `glib::Variant` that can be parsed again
|
||||
through `DiscovererInfo::from_variant`.
|
||||
|
||||
Note that any `gst::Toc` (s) that might have been discovered will not be serialized
|
||||
for now.
|
||||
## `flags`
|
||||
A combination of `DiscovererSerializeFlags` to specify
|
||||
what needs to be serialized.
|
||||
|
||||
# Returns
|
||||
|
||||
A newly-allocated `glib::Variant` representing `self`.
|
||||
<!-- enum DiscovererResult -->
|
||||
Result values for the discovery process.
|
||||
<!-- enum DiscovererResult::variant Ok -->
|
||||
The discovery was successful
|
||||
<!-- enum DiscovererResult::variant UriInvalid -->
|
||||
the URI is invalid
|
||||
<!-- enum DiscovererResult::variant Error -->
|
||||
an error happened and the GError is set
|
||||
<!-- enum DiscovererResult::variant Timeout -->
|
||||
the discovery timed-out
|
||||
<!-- enum DiscovererResult::variant Busy -->
|
||||
the discoverer was already discovering a file
|
||||
<!-- enum DiscovererResult::variant MissingPlugins -->
|
||||
Some plugins are missing for full discovery
|
||||
<!-- struct DiscovererStreamInfo -->
|
||||
Base structure for information concerning a media stream. Depending on the
|
||||
stream type, one can find more media-specific information in
|
||||
`DiscovererAudioInfo`, `DiscovererVideoInfo`, and
|
||||
`DiscovererContainerInfo`.
|
||||
|
||||
The `DiscovererStreamInfo` represents the topology of the stream. Siblings
|
||||
can be iterated over with `DiscovererStreamInfoExt::get_next` and
|
||||
`DiscovererStreamInfoExt::get_previous`. Children (sub-streams) of a
|
||||
stream can be accessed using the `DiscovererContainerInfo` API.
|
||||
|
||||
As a simple example, if you run `Discoverer` on an AVI file with one audio
|
||||
and one video stream, you will get a `DiscovererContainerInfo`
|
||||
corresponding to the AVI container, which in turn will have a
|
||||
`DiscovererAudioInfo` sub-stream and a `DiscovererVideoInfo` sub-stream
|
||||
for the audio and video streams respectively.
|
||||
|
||||
# Implements
|
||||
|
||||
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- trait DiscovererStreamInfoExt -->
|
||||
Trait containing all `DiscovererStreamInfo` methods.
|
||||
|
||||
# Implementors
|
||||
|
||||
[`DiscovererAudioInfo`](struct.DiscovererAudioInfo.html), [`DiscovererContainerInfo`](struct.DiscovererContainerInfo.html), [`DiscovererStreamInfo`](struct.DiscovererStreamInfo.html), [`DiscovererSubtitleInfo`](struct.DiscovererSubtitleInfo.html), [`DiscovererVideoInfo`](struct.DiscovererVideoInfo.html)
|
||||
<!-- impl DiscovererStreamInfo::fn list_free -->
|
||||
Decrements the reference count of all contained `DiscovererStreamInfo`
|
||||
and fress the `glib::List`.
|
||||
## `infos`
|
||||
a `glib::List` of `DiscovererStreamInfo`
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_caps -->
|
||||
|
||||
# Returns
|
||||
|
||||
the `gst::Caps` of the stream. Unref with
|
||||
`gst::Caps::unref` after usage.
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_misc -->
|
||||
|
||||
# Deprecated
|
||||
|
||||
This functions is deprecated since version 1.4, use
|
||||
`DiscovererInfo::get_missing_elements_installer_details`
|
||||
|
||||
# Returns
|
||||
|
||||
additional information regarding the stream (for
|
||||
example codec version, profile, etc..). If you wish to use the `gst::Structure`
|
||||
after the life-time of `self` you will need to copy it.
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_next -->
|
||||
|
||||
# Returns
|
||||
|
||||
the next `DiscovererStreamInfo` in a chain. `None`
|
||||
for final streams.
|
||||
Unref with `gst_discoverer_stream_info_unref` after usage.
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_previous -->
|
||||
|
||||
# Returns
|
||||
|
||||
the previous `DiscovererStreamInfo` in a chain.
|
||||
`None` for starting points. Unref with `gst_discoverer_stream_info_unref`
|
||||
after usage.
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_stream_id -->
|
||||
|
||||
# Returns
|
||||
|
||||
the stream ID of this stream. If you wish to
|
||||
use the stream ID after the life-time of `self` you will need to copy it.
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_stream_type_nick -->
|
||||
|
||||
# Returns
|
||||
|
||||
a human readable name for the stream type of the given `self` (ex : "audio",
|
||||
"container",...).
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_tags -->
|
||||
|
||||
# Returns
|
||||
|
||||
the tags contained in this stream. If you wish to
|
||||
use the tags after the life-time of `self` you will need to copy them.
|
||||
<!-- trait DiscovererStreamInfoExt::fn get_toc -->
|
||||
|
||||
# Returns
|
||||
|
||||
the TOC contained in this stream. If you wish to
|
||||
use the TOC after the life-time of `self` you will need to copy it.
|
||||
<!-- struct DiscovererSubtitleInfo -->
|
||||
`DiscovererStreamInfo` specific to subtitle streams (this includes text and
|
||||
image based ones).
|
||||
|
||||
# Implements
|
||||
|
||||
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl DiscovererSubtitleInfo::fn get_language -->
|
||||
|
||||
# Returns
|
||||
|
||||
the language of the stream, or NULL if unknown.
|
||||
<!-- struct DiscovererVideoInfo -->
|
||||
`DiscovererStreamInfo` specific to video streams (this includes images).
|
||||
|
||||
# Implements
|
||||
|
||||
[`DiscovererStreamInfoExt`](trait.DiscovererStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl DiscovererVideoInfo::fn get_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the average or nominal bitrate of the video stream in bits/second.
|
||||
<!-- impl DiscovererVideoInfo::fn get_depth -->
|
||||
|
||||
# Returns
|
||||
|
||||
the depth in bits of the video stream.
|
||||
<!-- impl DiscovererVideoInfo::fn get_framerate_denom -->
|
||||
|
||||
# Returns
|
||||
|
||||
the framerate of the video stream (denominator).
|
||||
<!-- impl DiscovererVideoInfo::fn get_framerate_num -->
|
||||
|
||||
# Returns
|
||||
|
||||
the framerate of the video stream (numerator).
|
||||
<!-- impl DiscovererVideoInfo::fn get_height -->
|
||||
|
||||
# Returns
|
||||
|
||||
the height of the video stream in pixels.
|
||||
<!-- impl DiscovererVideoInfo::fn get_max_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the maximum bitrate of the video stream in bits/second.
|
||||
<!-- impl DiscovererVideoInfo::fn get_par_denom -->
|
||||
|
||||
# Returns
|
||||
|
||||
the Pixel Aspect Ratio (PAR) of the video stream (denominator).
|
||||
<!-- impl DiscovererVideoInfo::fn get_par_num -->
|
||||
|
||||
# Returns
|
||||
|
||||
the Pixel Aspect Ratio (PAR) of the video stream (numerator).
|
||||
<!-- impl DiscovererVideoInfo::fn get_width -->
|
||||
|
||||
# Returns
|
||||
|
||||
the width of the video stream in pixels.
|
||||
<!-- impl DiscovererVideoInfo::fn is_image -->
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the video stream corresponds to an image (i.e. only contains
|
||||
one frame).
|
||||
<!-- impl DiscovererVideoInfo::fn is_interlaced -->
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the stream is interlaced, else `false`.
|
||||
<!-- struct EncodingAudioProfile -->
|
||||
Variant of `EncodingProfile` for audio streams.
|
||||
|
||||
# Implements
|
||||
|
||||
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl EncodingAudioProfile::fn new -->
|
||||
Creates a new `EncodingAudioProfile`
|
||||
|
||||
All provided allocatable arguments will be internally copied, so can be
|
||||
safely freed/unreferenced after calling this method.
|
||||
## `format`
|
||||
the `gst::Caps`
|
||||
## `preset`
|
||||
the preset(s) to use on the encoder, can be `None`
|
||||
## `restriction`
|
||||
the `gst::Caps` used to restrict the input to the encoder, can be
|
||||
NULL. See `EncodingProfile::get_restriction` for more details.
|
||||
## `presence`
|
||||
the number of time this stream must be used. 0 means any number of
|
||||
times (including never)
|
||||
|
||||
# Returns
|
||||
|
||||
the newly created `EncodingAudioProfile`.
|
||||
<!-- struct EncodingContainerProfile -->
|
||||
Encoding profiles for containers. Keeps track of a list of `EncodingProfile`
|
||||
|
||||
# Implements
|
||||
|
||||
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl EncodingContainerProfile::fn new -->
|
||||
Creates a new `EncodingContainerProfile`.
|
||||
## `name`
|
||||
The name of the container profile, can be `None`
|
||||
## `description`
|
||||
The description of the container profile,
|
||||
can be `None`
|
||||
## `format`
|
||||
The format to use for this profile
|
||||
## `preset`
|
||||
The preset to use for this profile.
|
||||
|
||||
# Returns
|
||||
|
||||
The newly created `EncodingContainerProfile`.
|
||||
<!-- impl EncodingContainerProfile::fn add_profile -->
|
||||
Add a `EncodingProfile` to the list of profiles handled by `self`.
|
||||
|
||||
No copy of `profile` will be made, if you wish to use it elsewhere after this
|
||||
method you should increment its reference count.
|
||||
## `profile`
|
||||
the `EncodingProfile` to add.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the `stream` was properly added, else `false`.
|
||||
<!-- impl EncodingContainerProfile::fn contains_profile -->
|
||||
Checks if `self` contains a `EncodingProfile` identical to
|
||||
`profile`.
|
||||
## `profile`
|
||||
a `EncodingProfile`
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` contains a `EncodingProfile` identical
|
||||
to `profile`, else `false`.
|
||||
<!-- impl EncodingContainerProfile::fn get_profiles -->
|
||||
|
||||
# Returns
|
||||
|
||||
|
||||
the list of contained `EncodingProfile`.
|
||||
<!-- struct EncodingProfile -->
|
||||
The opaque base class object for all encoding profiles. This contains generic
|
||||
information like name, description, format and preset.
|
||||
|
||||
# Implements
|
||||
|
||||
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- trait EncodingProfileExt -->
|
||||
Trait containing all `EncodingProfile` methods.
|
||||
|
||||
# Implementors
|
||||
|
||||
[`EncodingAudioProfile`](struct.EncodingAudioProfile.html), [`EncodingContainerProfile`](struct.EncodingContainerProfile.html), [`EncodingProfile`](struct.EncodingProfile.html), [`EncodingVideoProfile`](struct.EncodingVideoProfile.html)
|
||||
<!-- impl EncodingProfile::fn find -->
|
||||
Find the `EncodingProfile` with the specified name and category.
|
||||
## `targetname`
|
||||
The name of the target
|
||||
## `profilename`
|
||||
The name of the profile, if `None`
|
||||
provided, it will default to the encoding profile called `default`.
|
||||
## `category`
|
||||
The target category. Can be `None`
|
||||
|
||||
# Returns
|
||||
|
||||
The matching `EncodingProfile` or `None`.
|
||||
<!-- impl EncodingProfile::fn from_discoverer -->
|
||||
Creates a `EncodingProfile` matching the formats from the given
|
||||
`DiscovererInfo`. Streams other than audio or video (eg,
|
||||
subtitles), are currently ignored.
|
||||
## `info`
|
||||
The `DiscovererInfo` to read from
|
||||
|
||||
# Returns
|
||||
|
||||
The new `EncodingProfile` or `None`.
|
||||
<!-- trait EncodingProfileExt::fn copy -->
|
||||
Makes a deep copy of `self`
|
||||
|
||||
Feature: `v1_12`
|
||||
|
||||
|
||||
# Returns
|
||||
|
||||
The copy of `self`
|
||||
<!-- trait EncodingProfileExt::fn get_allow_dynamic_output -->
|
||||
Get whether the format that has been negotiated in at some point can be renegotiated
|
||||
later during the encoding.
|
||||
<!-- trait EncodingProfileExt::fn get_description -->
|
||||
|
||||
# Returns
|
||||
|
||||
the description of the profile, can be `None`.
|
||||
<!-- trait EncodingProfileExt::fn get_file_extension -->
|
||||
|
||||
# Returns
|
||||
|
||||
a suitable file extension for `self`, or NULL.
|
||||
<!-- trait EncodingProfileExt::fn get_format -->
|
||||
|
||||
# Returns
|
||||
|
||||
the `gst::Caps` corresponding to the media format used
|
||||
in the profile. Unref after usage.
|
||||
<!-- trait EncodingProfileExt::fn get_input_caps -->
|
||||
Computes the full output caps that this `self` will be able to consume.
|
||||
|
||||
# Returns
|
||||
|
||||
The full caps the given `self` can consume. Call
|
||||
`gst::Caps::unref` when you are done with the caps.
|
||||
<!-- trait EncodingProfileExt::fn get_name -->
|
||||
|
||||
# Returns
|
||||
|
||||
the name of the profile, can be `None`.
|
||||
<!-- trait EncodingProfileExt::fn get_presence -->
|
||||
|
||||
# Returns
|
||||
|
||||
The number of times the profile is used in its parent
|
||||
container profile. If 0, it is not a mandatory stream.
|
||||
<!-- trait EncodingProfileExt::fn get_preset -->
|
||||
|
||||
# Returns
|
||||
|
||||
the name of the `gst::Preset` to be used in the profile.
|
||||
This is the name that has been set when saving the preset.
|
||||
<!-- trait EncodingProfileExt::fn get_preset_name -->
|
||||
|
||||
# Returns
|
||||
|
||||
the name of the `gst::Preset` factory to be used in the profile.
|
||||
<!-- trait EncodingProfileExt::fn get_restriction -->
|
||||
|
||||
# Returns
|
||||
|
||||
The restriction `gst::Caps` to apply before the encoder
|
||||
that will be used in the profile. The fields present in restriction caps are
|
||||
properties of the raw stream (that is before encoding), such as height and
|
||||
width for video and depth and sampling rate for audio. Does not apply to
|
||||
`EncodingContainerProfile` (since there is no corresponding raw stream).
|
||||
Can be `None`. Unref after usage.
|
||||
<!-- trait EncodingProfileExt::fn get_single_segment -->
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the stream represented by `self` should use a single
|
||||
segment before the encoder, `false` otherwise. This means that buffers will be retimestamped
|
||||
and segments will be eat so as to appear as one segment.
|
||||
<!-- trait EncodingProfileExt::fn get_type_nick -->
|
||||
|
||||
# Returns
|
||||
|
||||
the human-readable name of the type of `self`.
|
||||
<!-- trait EncodingProfileExt::fn is_equal -->
|
||||
Checks whether the two `EncodingProfile` are equal
|
||||
## `b`
|
||||
a `EncodingProfile`
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` and `b` are equal, else `false`.
|
||||
<!-- trait EncodingProfileExt::fn set_allow_dynamic_output -->
|
||||
Sets whether the format that has been negotiated in at some point can be renegotiated
|
||||
later during the encoding.
|
||||
## `allow_dynamic_output`
|
||||
Whether the format that has been negotiated first can be renegotiated
|
||||
during the encoding
|
||||
<!-- trait EncodingProfileExt::fn set_description -->
|
||||
Set `description` as the given description for the `self`. A copy of
|
||||
`description` will be made internally.
|
||||
## `description`
|
||||
the description to set on the profile
|
||||
<!-- trait EncodingProfileExt::fn set_enabled -->
|
||||
Set whether the profile should be used or not.
|
||||
## `enabled`
|
||||
`false` to disable `self`, `true` to enable it
|
||||
<!-- trait EncodingProfileExt::fn set_format -->
|
||||
Sets the media format used in the profile.
|
||||
## `format`
|
||||
the media format to use in the profile.
|
||||
<!-- trait EncodingProfileExt::fn set_name -->
|
||||
Set `name` as the given name for the `self`. A copy of `name` will be made
|
||||
internally.
|
||||
## `name`
|
||||
the name to set on the profile
|
||||
<!-- trait EncodingProfileExt::fn set_presence -->
|
||||
Set the number of time the profile is used in its parent
|
||||
container profile. If 0, it is not a mandatory stream
|
||||
## `presence`
|
||||
the number of time the profile can be used
|
||||
<!-- trait EncodingProfileExt::fn set_preset -->
|
||||
Sets the name of the `gst::Element` that implements the `gst::Preset` interface
|
||||
to use for the profile.
|
||||
This is the name that has been set when saving the preset.
|
||||
## `preset`
|
||||
the element preset to use
|
||||
<!-- trait EncodingProfileExt::fn set_preset_name -->
|
||||
Sets the name of the `gst::Preset`'s factory to be used in the profile.
|
||||
## `preset_name`
|
||||
The name of the preset to use in this `self`.
|
||||
<!-- trait EncodingProfileExt::fn set_restriction -->
|
||||
Set the restriction `gst::Caps` to apply before the encoder
|
||||
that will be used in the profile. See `EncodingProfile::get_restriction`
|
||||
for more about restrictions. Does not apply to `EncodingContainerProfile`.
|
||||
## `restriction`
|
||||
the restriction to apply
|
||||
<!-- trait EncodingProfileExt::fn set_single_segment -->
|
||||
If using a single segment, buffers will be retimestamped
|
||||
and segments will be eat so as to appear as one segment.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `single_segment`
|
||||
`true` if the stream represented by `self` should use a single
|
||||
segment before the encoder `false` otherwise.
|
||||
<!-- struct EncodingTarget -->
|
||||
Collection of `EncodingProfile` for a specific target or use-case.
|
||||
|
||||
When being stored/loaded, targets come from a specific category, like
|
||||
`GST_ENCODING_CATEGORY_DEVICE`.
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl EncodingTarget::fn new -->
|
||||
Creates a new `EncodingTarget`.
|
||||
|
||||
The name and category can only consist of lowercase ASCII letters for the
|
||||
first character, followed by either lowercase ASCII letters, digits or
|
||||
hyphens ('-').
|
||||
|
||||
The `category` *should* be one of the existing
|
||||
well-defined categories, like `GST_ENCODING_CATEGORY_DEVICE`, but it
|
||||
*can* be a application or user specific category if
|
||||
needed.
|
||||
## `name`
|
||||
The name of the target.
|
||||
## `category`
|
||||
The name of the category to which this `target`
|
||||
belongs. For example: `GST_ENCODING_CATEGORY_DEVICE`.
|
||||
## `description`
|
||||
A description of `EncodingTarget` in the
|
||||
current locale.
|
||||
## `profiles`
|
||||
A `glib::List` of
|
||||
`EncodingProfile`.
|
||||
|
||||
# Returns
|
||||
|
||||
The newly created `EncodingTarget` or `None` if
|
||||
there was an error.
|
||||
<!-- impl EncodingTarget::fn load -->
|
||||
Searches for the `EncodingTarget` with the given name, loads it
|
||||
and returns it.
|
||||
|
||||
If the category name is specified only targets from that category will be
|
||||
searched for.
|
||||
## `name`
|
||||
the name of the `EncodingTarget` to load (automatically
|
||||
converted to lower case internally as capital letters are not
|
||||
valid for target names).
|
||||
## `category`
|
||||
the name of the target category, like
|
||||
`GST_ENCODING_CATEGORY_DEVICE`. Can be `None`
|
||||
|
||||
# Returns
|
||||
|
||||
The `EncodingTarget` if available, else `None`.
|
||||
<!-- impl EncodingTarget::fn load_from_file -->
|
||||
Opens the provided file and returns the contained `EncodingTarget`.
|
||||
## `filepath`
|
||||
The file location to load the `EncodingTarget` from
|
||||
|
||||
# Returns
|
||||
|
||||
The `EncodingTarget` contained in the file, else
|
||||
`None`
|
||||
<!-- impl EncodingTarget::fn add_profile -->
|
||||
Adds the given `profile` to the `self`. Each added profile must have
|
||||
a unique name within the profile.
|
||||
|
||||
The `self` will steal a reference to the `profile`. If you wish to use
|
||||
the profile after calling this method, you should increase its reference
|
||||
count.
|
||||
## `profile`
|
||||
the `EncodingProfile` to add
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the profile was added, else `false`.
|
||||
<!-- impl EncodingTarget::fn get_category -->
|
||||
|
||||
# Returns
|
||||
|
||||
The category of the `self`. For example:
|
||||
`GST_ENCODING_CATEGORY_DEVICE`.
|
||||
<!-- impl EncodingTarget::fn get_description -->
|
||||
|
||||
# Returns
|
||||
|
||||
The description of the `self`.
|
||||
<!-- impl EncodingTarget::fn get_name -->
|
||||
|
||||
# Returns
|
||||
|
||||
The name of the `self`.
|
||||
<!-- impl EncodingTarget::fn get_profile -->
|
||||
## `name`
|
||||
the name of the profile to retrieve
|
||||
|
||||
# Returns
|
||||
|
||||
The matching `EncodingProfile`, or `None`.
|
||||
<!-- impl EncodingTarget::fn get_profiles -->
|
||||
|
||||
# Returns
|
||||
|
||||
A list of
|
||||
`EncodingProfile`(s) this `self` handles.
|
||||
<!-- impl EncodingTarget::fn save -->
|
||||
Saves the `self` to a default user-local directory.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the target was correctly saved, else `false`.
|
||||
<!-- impl EncodingTarget::fn save_to_file -->
|
||||
Saves the `self` to the provided file location.
|
||||
## `filepath`
|
||||
the location to store the `self` at.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the target was correctly saved, else `false`.
|
||||
<!-- struct EncodingVideoProfile -->
|
||||
Variant of `EncodingProfile` for video streams, allows specifying the `pass`.
|
||||
|
||||
# Implements
|
||||
|
||||
[`EncodingProfileExt`](trait.EncodingProfileExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl EncodingVideoProfile::fn new -->
|
||||
Creates a new `EncodingVideoProfile`
|
||||
|
||||
All provided allocatable arguments will be internally copied, so can be
|
||||
safely freed/unreferenced after calling this method.
|
||||
|
||||
If you wish to control the pass number (in case of multi-pass scenarios),
|
||||
please refer to the `EncodingVideoProfile::set_pass` documentation.
|
||||
|
||||
If you wish to use/force a constant framerate please refer to the
|
||||
`EncodingVideoProfile::set_variableframerate` documentation.
|
||||
## `format`
|
||||
the `gst::Caps`
|
||||
## `preset`
|
||||
the preset(s) to use on the encoder, can be `None`
|
||||
## `restriction`
|
||||
the `gst::Caps` used to restrict the input to the encoder, can be
|
||||
NULL. See `EncodingProfile::get_restriction` for more details.
|
||||
## `presence`
|
||||
the number of time this stream must be used. 0 means any number of
|
||||
times (including never)
|
||||
|
||||
# Returns
|
||||
|
||||
the newly created `EncodingVideoProfile`.
|
||||
<!-- impl EncodingVideoProfile::fn get_pass -->
|
||||
Get the pass number if this is part of a multi-pass profile.
|
||||
|
||||
# Returns
|
||||
|
||||
The pass number. Starts at 1 for multi-pass. 0 if this is
|
||||
not a multi-pass profile
|
||||
<!-- impl EncodingVideoProfile::fn get_variableframerate -->
|
||||
|
||||
# Returns
|
||||
|
||||
Whether non-constant video framerate is allowed for encoding.
|
||||
<!-- impl EncodingVideoProfile::fn set_pass -->
|
||||
Sets the pass number of this video profile. The first pass profile should have
|
||||
this value set to 1. If this video profile isn't part of a multi-pass profile,
|
||||
you may set it to 0 (the default value).
|
||||
## `pass`
|
||||
the pass number for this profile
|
||||
<!-- impl EncodingVideoProfile::fn set_variableframerate -->
|
||||
If set to `true`, then the incoming stream will be allowed to have non-constant
|
||||
framerate. If set to `false` (default value), then the incoming stream will
|
||||
be normalized by dropping/duplicating frames in order to produce a
|
||||
constance framerate.
|
||||
## `variableframerate`
|
||||
a boolean
|
|
@ -1,740 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- struct Player -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`gst::ObjectExt`](../gst/trait.ObjectExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl Player::fn new -->
|
||||
Creates a new `Player` instance that uses `signal_dispatcher` to dispatch
|
||||
signals to some event loop system, or emits signals directly if NULL is
|
||||
passed. See `PlayerGMainContextSignalDispatcher::new`.
|
||||
|
||||
Video is going to be rendered by `video_renderer`, or if `None` is provided
|
||||
no special video set up will be done and some default handling will be
|
||||
performed.
|
||||
## `video_renderer`
|
||||
GstPlayerVideoRenderer to use
|
||||
## `signal_dispatcher`
|
||||
GstPlayerSignalDispatcher to use
|
||||
|
||||
# Returns
|
||||
|
||||
a new `Player` instance
|
||||
<!-- impl Player::fn config_get_position_update_interval -->
|
||||
## `config`
|
||||
a `Player` configuration
|
||||
|
||||
# Returns
|
||||
|
||||
current position update interval in milliseconds
|
||||
<!-- impl Player::fn config_get_seek_accurate -->
|
||||
## `config`
|
||||
a `Player` configuration
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if accurate seeking is enabled
|
||||
<!-- impl Player::fn config_get_user_agent -->
|
||||
Return the user agent which has been configured using
|
||||
`Player::config_set_user_agent` if any.
|
||||
## `config`
|
||||
a `Player` configuration
|
||||
|
||||
# Returns
|
||||
|
||||
the configured agent, or `None`
|
||||
<!-- impl Player::fn config_set_position_update_interval -->
|
||||
set interval in milliseconds between two position-updated signals.
|
||||
pass 0 to stop updating the position.
|
||||
## `config`
|
||||
a `Player` configuration
|
||||
## `interval`
|
||||
interval in ms
|
||||
<!-- impl Player::fn config_set_seek_accurate -->
|
||||
Enable or disable accurate seeking. When enabled, elements will try harder
|
||||
to seek as accurately as possible to the requested seek position. Generally
|
||||
it will be slower especially for formats that don't have any indexes or
|
||||
timestamp markers in the stream.
|
||||
|
||||
If accurate seeking is disabled, elements will seek as close as the request
|
||||
position without slowing down seeking too much.
|
||||
|
||||
Accurate seeking is disabled by default.
|
||||
## `config`
|
||||
a `Player` configuration
|
||||
## `accurate`
|
||||
accurate seek or not
|
||||
<!-- impl Player::fn config_set_user_agent -->
|
||||
Set the user agent to pass to the server if `player` needs to connect
|
||||
to a server during playback. This is typically used when playing HTTP
|
||||
or RTSP streams.
|
||||
## `config`
|
||||
a `Player` configuration
|
||||
## `agent`
|
||||
the string to use as user agent
|
||||
<!-- impl Player::fn get_audio_streams -->
|
||||
## `info`
|
||||
a `PlayerMediaInfo`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `PlayerAudioInfo`.
|
||||
<!-- impl Player::fn get_subtitle_streams -->
|
||||
## `info`
|
||||
a `PlayerMediaInfo`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `PlayerSubtitleInfo`.
|
||||
<!-- impl Player::fn get_video_streams -->
|
||||
## `info`
|
||||
a `PlayerMediaInfo`
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `PlayerVideoInfo`.
|
||||
<!-- impl Player::fn visualizations_free -->
|
||||
Frees a `None` terminated array of `PlayerVisualization`.
|
||||
## `viss`
|
||||
a `None` terminated array of `PlayerVisualization` to free
|
||||
<!-- impl Player::fn visualizations_get -->
|
||||
|
||||
# Returns
|
||||
|
||||
|
||||
a `None` terminated array containing all available
|
||||
visualizations. Use `Player::visualizations_free` after
|
||||
usage.
|
||||
<!-- impl Player::fn get_audio_video_offset -->
|
||||
Retrieve the current value of audio-video-offset property
|
||||
|
||||
# Returns
|
||||
|
||||
The current value of audio-video-offset in nanoseconds
|
||||
<!-- impl Player::fn get_color_balance -->
|
||||
Retrieve the current value of the indicated `type_`.
|
||||
## `type_`
|
||||
`PlayerColorBalanceType`
|
||||
|
||||
# Returns
|
||||
|
||||
The current value of `type_`, between [0,1]. In case of
|
||||
error -1 is returned.
|
||||
<!-- impl Player::fn get_config -->
|
||||
Get a copy of the current configuration of the player. This configuration
|
||||
can either be modified and used for the `Player::set_config` call
|
||||
or it must be freed after usage.
|
||||
|
||||
# Returns
|
||||
|
||||
a copy of the current configuration of `self`. Use
|
||||
`gst::Structure::free` after usage or `Player::set_config`.
|
||||
<!-- impl Player::fn get_current_audio_track -->
|
||||
A Function to get current audio `PlayerAudioInfo` instance.
|
||||
|
||||
# Returns
|
||||
|
||||
current audio track.
|
||||
|
||||
The caller should free it with `gobject::ObjectExt::unref`
|
||||
<!-- impl Player::fn get_current_subtitle_track -->
|
||||
A Function to get current subtitle `PlayerSubtitleInfo` instance.
|
||||
|
||||
# Returns
|
||||
|
||||
current subtitle track.
|
||||
|
||||
The caller should free it with `gobject::ObjectExt::unref`
|
||||
<!-- impl Player::fn get_current_video_track -->
|
||||
A Function to get current video `PlayerVideoInfo` instance.
|
||||
|
||||
# Returns
|
||||
|
||||
current video track.
|
||||
|
||||
The caller should free it with `gobject::ObjectExt::unref`
|
||||
<!-- impl Player::fn get_current_visualization -->
|
||||
|
||||
# Returns
|
||||
|
||||
Name of the currently enabled visualization.
|
||||
`g_free` after usage.
|
||||
<!-- impl Player::fn get_duration -->
|
||||
Retrieves the duration of the media stream that self represents.
|
||||
|
||||
# Returns
|
||||
|
||||
the duration of the currently-playing media stream, in
|
||||
nanoseconds.
|
||||
<!-- impl Player::fn get_media_info -->
|
||||
A Function to get the current media info `PlayerMediaInfo` instance.
|
||||
|
||||
# Returns
|
||||
|
||||
media info instance.
|
||||
|
||||
The caller should free it with `gobject::ObjectExt::unref`
|
||||
<!-- impl Player::fn get_multiview_flags -->
|
||||
Retrieve the current value of the indicated `type_`.
|
||||
|
||||
# Returns
|
||||
|
||||
The current value of `type_`, Default: 0x00000000 "none
|
||||
<!-- impl Player::fn get_multiview_mode -->
|
||||
Retrieve the current value of the indicated `type_`.
|
||||
|
||||
# Returns
|
||||
|
||||
The current value of `type_`, Default: -1 "none"
|
||||
<!-- impl Player::fn get_mute -->
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the currently-playing stream is muted.
|
||||
<!-- impl Player::fn get_pipeline -->
|
||||
|
||||
# Returns
|
||||
|
||||
The internal playbin instance
|
||||
<!-- impl Player::fn get_position -->
|
||||
|
||||
# Returns
|
||||
|
||||
the absolute position time, in nanoseconds, of the
|
||||
currently-playing stream.
|
||||
<!-- impl Player::fn get_rate -->
|
||||
|
||||
# Returns
|
||||
|
||||
current playback rate
|
||||
<!-- impl Player::fn get_subtitle_uri -->
|
||||
current subtitle URI
|
||||
|
||||
# Returns
|
||||
|
||||
URI of the current external subtitle.
|
||||
`g_free` after usage.
|
||||
<!-- impl Player::fn get_subtitle_video_offset -->
|
||||
Retrieve the current value of subtitle-video-offset property
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
|
||||
# Returns
|
||||
|
||||
The current value of subtitle-video-offset in nanoseconds
|
||||
<!-- impl Player::fn get_uri -->
|
||||
Gets the URI of the currently-playing stream.
|
||||
|
||||
# Returns
|
||||
|
||||
a string containing the URI of the
|
||||
currently-playing stream. `g_free` after usage.
|
||||
<!-- impl Player::fn get_video_snapshot -->
|
||||
Get a snapshot of the currently selected video stream, if any. The format can be
|
||||
selected with `format` and optional configuration is possible with `config`
|
||||
Currently supported settings are:
|
||||
- width, height of type G_TYPE_INT
|
||||
- pixel-aspect-ratio of type GST_TYPE_FRACTION
|
||||
Except for GST_PLAYER_THUMBNAIL_RAW_NATIVE format, if no config is set, pixel-aspect-ratio would be 1/1
|
||||
## `format`
|
||||
output format of the video snapshot
|
||||
## `config`
|
||||
Additional configuration
|
||||
|
||||
# Returns
|
||||
|
||||
Current video snapshot sample or `None` on failure
|
||||
<!-- impl Player::fn get_volume -->
|
||||
Returns the current volume level, as a percentage between 0 and 1.
|
||||
|
||||
# Returns
|
||||
|
||||
the volume as percentage between 0 and 1.
|
||||
<!-- impl Player::fn has_color_balance -->
|
||||
Checks whether the `self` has color balance support available.
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if `self` has color balance support. Otherwise,
|
||||
`false`.
|
||||
<!-- impl Player::fn pause -->
|
||||
Pauses the current stream.
|
||||
<!-- impl Player::fn play -->
|
||||
Request to play the loaded stream.
|
||||
<!-- impl Player::fn seek -->
|
||||
Seeks the currently-playing stream to the absolute `position` time
|
||||
in nanoseconds.
|
||||
## `position`
|
||||
position to seek in nanoseconds
|
||||
<!-- impl Player::fn set_audio_track -->
|
||||
## `stream_index`
|
||||
stream index
|
||||
|
||||
# Returns
|
||||
|
||||
`true` or `false`
|
||||
|
||||
Sets the audio track `stream_idex`.
|
||||
<!-- impl Player::fn set_audio_track_enabled -->
|
||||
Enable or disable the current audio track.
|
||||
## `enabled`
|
||||
TRUE or FALSE
|
||||
<!-- impl Player::fn set_audio_video_offset -->
|
||||
Sets audio-video-offset property by value of `offset`
|
||||
## `offset`
|
||||
`gint64` in nanoseconds
|
||||
<!-- impl Player::fn set_color_balance -->
|
||||
Sets the current value of the indicated channel `type_` to the passed
|
||||
value.
|
||||
## `type_`
|
||||
`PlayerColorBalanceType`
|
||||
## `value`
|
||||
The new value for the `type_`, ranged [0,1]
|
||||
<!-- impl Player::fn set_config -->
|
||||
Set the configuration of the player. If the player is already configured, and
|
||||
the configuration haven't change, this function will return `true`. If the
|
||||
player is not in the GST_PLAYER_STATE_STOPPED, this method will return `false`
|
||||
and active configuration will remain.
|
||||
|
||||
`config` is a `gst::Structure` that contains the configuration parameters for
|
||||
the player.
|
||||
|
||||
This function takes ownership of `config`.
|
||||
## `config`
|
||||
a `gst::Structure`
|
||||
|
||||
# Returns
|
||||
|
||||
`true` when the configuration could be set.
|
||||
<!-- impl Player::fn set_multiview_flags -->
|
||||
Sets the current value of the indicated mode `type_` to the passed
|
||||
value.
|
||||
## `flags`
|
||||
The new value for the `type_`
|
||||
<!-- impl Player::fn set_multiview_mode -->
|
||||
Sets the current value of the indicated mode `type_` to the passed
|
||||
value.
|
||||
## `mode`
|
||||
The new value for the `type_`
|
||||
<!-- impl Player::fn set_mute -->
|
||||
`true` if the currently-playing stream should be muted.
|
||||
## `val`
|
||||
Mute state the should be set
|
||||
<!-- impl Player::fn set_rate -->
|
||||
Playback at specified rate
|
||||
## `rate`
|
||||
playback rate
|
||||
<!-- impl Player::fn set_subtitle_track -->
|
||||
## `stream_index`
|
||||
stream index
|
||||
|
||||
# Returns
|
||||
|
||||
`true` or `false`
|
||||
|
||||
Sets the subtitle stack `stream_index`.
|
||||
<!-- impl Player::fn set_subtitle_track_enabled -->
|
||||
Enable or disable the current subtitle track.
|
||||
## `enabled`
|
||||
TRUE or FALSE
|
||||
<!-- impl Player::fn set_subtitle_uri -->
|
||||
Sets the external subtitle URI. This should be combined with a call to
|
||||
gst_player_set_subtitle_track_enabled(`self`, TRUE) so the subtitles are actually
|
||||
rendered.
|
||||
## `uri`
|
||||
subtitle URI
|
||||
<!-- impl Player::fn set_subtitle_video_offset -->
|
||||
Sets subtitle-video-offset property by value of `offset`
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
## `offset`
|
||||
`gint64` in nanoseconds
|
||||
<!-- impl Player::fn set_uri -->
|
||||
Sets the next URI to play.
|
||||
## `uri`
|
||||
next URI to play.
|
||||
<!-- impl Player::fn set_video_track -->
|
||||
## `stream_index`
|
||||
stream index
|
||||
|
||||
# Returns
|
||||
|
||||
`true` or `false`
|
||||
|
||||
Sets the video track `stream_index`.
|
||||
<!-- impl Player::fn set_video_track_enabled -->
|
||||
Enable or disable the current video track.
|
||||
## `enabled`
|
||||
TRUE or FALSE
|
||||
<!-- impl Player::fn set_visualization -->
|
||||
## `name`
|
||||
visualization element obtained from
|
||||
`Player::visualizations_get`()
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the visualizations was set correctly. Otherwise,
|
||||
`false`.
|
||||
<!-- impl Player::fn set_visualization_enabled -->
|
||||
Enable or disable the visualization.
|
||||
## `enabled`
|
||||
TRUE or FALSE
|
||||
<!-- impl Player::fn set_volume -->
|
||||
Sets the volume level of the stream as a percentage between 0 and 1.
|
||||
## `val`
|
||||
the new volume level, as a percentage between 0 and 1
|
||||
<!-- impl Player::fn stop -->
|
||||
Stops playing the current stream and resets to the first position
|
||||
in the stream.
|
||||
<!-- struct PlayerAudioInfo -->
|
||||
`PlayerStreamInfo` specific to audio streams.
|
||||
|
||||
# Implements
|
||||
|
||||
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl PlayerAudioInfo::fn get_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the audio bitrate in `PlayerAudioInfo`.
|
||||
<!-- impl PlayerAudioInfo::fn get_channels -->
|
||||
|
||||
# Returns
|
||||
|
||||
the number of audio channels in `PlayerAudioInfo`.
|
||||
<!-- impl PlayerAudioInfo::fn get_language -->
|
||||
|
||||
# Returns
|
||||
|
||||
the language of the stream, or NULL if unknown.
|
||||
<!-- impl PlayerAudioInfo::fn get_max_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the audio maximum bitrate in `PlayerAudioInfo`.
|
||||
<!-- impl PlayerAudioInfo::fn get_sample_rate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the audio sample rate in `PlayerAudioInfo`.
|
||||
<!-- enum PlayerColorBalanceType -->
|
||||
<!-- enum PlayerColorBalanceType::variant Hue -->
|
||||
hue or color balance.
|
||||
<!-- enum PlayerColorBalanceType::variant Brightness -->
|
||||
brightness or black level.
|
||||
<!-- enum PlayerColorBalanceType::variant Saturation -->
|
||||
color saturation or chroma
|
||||
gain.
|
||||
<!-- enum PlayerColorBalanceType::variant Contrast -->
|
||||
contrast or luma gain.
|
||||
<!-- enum PlayerError -->
|
||||
<!-- enum PlayerError::variant Failed -->
|
||||
generic error.
|
||||
<!-- struct PlayerGMainContextSignalDispatcher -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
|
||||
<!-- impl PlayerGMainContextSignalDispatcher::fn new -->
|
||||
Creates a new GstPlayerSignalDispatcher that uses `application_context`,
|
||||
or the thread default one if `None` is used. See `Player::new`.
|
||||
## `application_context`
|
||||
GMainContext to use or `None`
|
||||
|
||||
# Returns
|
||||
|
||||
the new GstPlayerSignalDispatcher
|
||||
<!-- struct PlayerMediaInfo -->
|
||||
Structure containing the media information of a URI.
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl PlayerMediaInfo::fn get_audio_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `PlayerAudioInfo`.
|
||||
<!-- impl PlayerMediaInfo::fn get_container_format -->
|
||||
|
||||
# Returns
|
||||
|
||||
the container format.
|
||||
<!-- impl PlayerMediaInfo::fn get_duration -->
|
||||
|
||||
# Returns
|
||||
|
||||
duration of the media.
|
||||
<!-- impl PlayerMediaInfo::fn get_image_sample -->
|
||||
Function to get the image (or preview-image) stored in taglist.
|
||||
Application can use `gst_sample_*_()` API's to get caps, buffer etc.
|
||||
|
||||
# Returns
|
||||
|
||||
GstSample or NULL.
|
||||
<!-- impl PlayerMediaInfo::fn get_number_of_audio_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
number of audio streams.
|
||||
<!-- impl PlayerMediaInfo::fn get_number_of_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
number of total streams.
|
||||
<!-- impl PlayerMediaInfo::fn get_number_of_subtitle_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
number of subtitle streams.
|
||||
<!-- impl PlayerMediaInfo::fn get_number_of_video_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
number of video streams.
|
||||
<!-- impl PlayerMediaInfo::fn get_stream_list -->
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `PlayerStreamInfo`.
|
||||
<!-- impl PlayerMediaInfo::fn get_subtitle_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `PlayerSubtitleInfo`.
|
||||
<!-- impl PlayerMediaInfo::fn get_tags -->
|
||||
|
||||
# Returns
|
||||
|
||||
the tags contained in media info.
|
||||
<!-- impl PlayerMediaInfo::fn get_title -->
|
||||
|
||||
# Returns
|
||||
|
||||
the media title.
|
||||
<!-- impl PlayerMediaInfo::fn get_uri -->
|
||||
|
||||
# Returns
|
||||
|
||||
the URI associated with `PlayerMediaInfo`.
|
||||
<!-- impl PlayerMediaInfo::fn get_video_streams -->
|
||||
|
||||
# Returns
|
||||
|
||||
A `glib::List` of
|
||||
matching `PlayerVideoInfo`.
|
||||
<!-- impl PlayerMediaInfo::fn is_live -->
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the media is live.
|
||||
<!-- impl PlayerMediaInfo::fn is_seekable -->
|
||||
|
||||
# Returns
|
||||
|
||||
`true` if the media is seekable.
|
||||
<!-- struct PlayerSignalDispatcher -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`PlayerSignalDispatcherExt`](trait.PlayerSignalDispatcherExt.html)
|
||||
<!-- trait PlayerSignalDispatcherExt -->
|
||||
Trait containing all `PlayerSignalDispatcher` methods.
|
||||
|
||||
# Implementors
|
||||
|
||||
[`PlayerGMainContextSignalDispatcher`](struct.PlayerGMainContextSignalDispatcher.html), [`PlayerSignalDispatcher`](struct.PlayerSignalDispatcher.html)
|
||||
<!-- enum PlayerSnapshotFormat -->
|
||||
<!-- enum PlayerState -->
|
||||
<!-- enum PlayerState::variant Stopped -->
|
||||
the player is stopped.
|
||||
<!-- enum PlayerState::variant Buffering -->
|
||||
the player is buffering.
|
||||
<!-- enum PlayerState::variant Paused -->
|
||||
the player is paused.
|
||||
<!-- enum PlayerState::variant Playing -->
|
||||
the player is currently playing a
|
||||
stream.
|
||||
<!-- struct PlayerStreamInfo -->
|
||||
Base structure for information concerning a media stream. Depending on
|
||||
the stream type, one can find more media-specific information in
|
||||
`PlayerVideoInfo`, `PlayerAudioInfo`, `PlayerSubtitleInfo`.
|
||||
|
||||
# Implements
|
||||
|
||||
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- trait PlayerStreamInfoExt -->
|
||||
Trait containing all `PlayerStreamInfo` methods.
|
||||
|
||||
# Implementors
|
||||
|
||||
[`PlayerAudioInfo`](struct.PlayerAudioInfo.html), [`PlayerStreamInfo`](struct.PlayerStreamInfo.html), [`PlayerSubtitleInfo`](struct.PlayerSubtitleInfo.html), [`PlayerVideoInfo`](struct.PlayerVideoInfo.html)
|
||||
<!-- trait PlayerStreamInfoExt::fn get_caps -->
|
||||
|
||||
# Returns
|
||||
|
||||
the `gst::Caps` of the stream.
|
||||
<!-- trait PlayerStreamInfoExt::fn get_codec -->
|
||||
A string describing codec used in `PlayerStreamInfo`.
|
||||
|
||||
# Returns
|
||||
|
||||
codec string or NULL on unknown.
|
||||
<!-- trait PlayerStreamInfoExt::fn get_index -->
|
||||
Function to get stream index from `PlayerStreamInfo` instance.
|
||||
|
||||
# Returns
|
||||
|
||||
the stream index of this stream.
|
||||
<!-- trait PlayerStreamInfoExt::fn get_stream_type -->
|
||||
Function to return human readable name for the stream type
|
||||
of the given `self` (ex: "audio", "video", "subtitle")
|
||||
|
||||
# Returns
|
||||
|
||||
a human readable name
|
||||
<!-- trait PlayerStreamInfoExt::fn get_tags -->
|
||||
|
||||
# Returns
|
||||
|
||||
the tags contained in this stream.
|
||||
<!-- struct PlayerSubtitleInfo -->
|
||||
`PlayerStreamInfo` specific to subtitle streams.
|
||||
|
||||
# Implements
|
||||
|
||||
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl PlayerSubtitleInfo::fn get_language -->
|
||||
|
||||
# Returns
|
||||
|
||||
the language of the stream, or NULL if unknown.
|
||||
<!-- struct PlayerVideoInfo -->
|
||||
`PlayerStreamInfo` specific to video streams.
|
||||
|
||||
# Implements
|
||||
|
||||
[`PlayerStreamInfoExt`](trait.PlayerStreamInfoExt.html), [`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl PlayerVideoInfo::fn get_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the current bitrate of video in `PlayerVideoInfo`.
|
||||
<!-- impl PlayerVideoInfo::fn get_framerate -->
|
||||
## `fps_n`
|
||||
Numerator of frame rate
|
||||
## `fps_d`
|
||||
Denominator of frame rate
|
||||
<!-- impl PlayerVideoInfo::fn get_height -->
|
||||
|
||||
# Returns
|
||||
|
||||
the height of video in `PlayerVideoInfo`.
|
||||
<!-- impl PlayerVideoInfo::fn get_max_bitrate -->
|
||||
|
||||
# Returns
|
||||
|
||||
the maximum bitrate of video in `PlayerVideoInfo`.
|
||||
<!-- impl PlayerVideoInfo::fn get_pixel_aspect_ratio -->
|
||||
Returns the pixel aspect ratio in `par_n` and `par_d`
|
||||
## `par_n`
|
||||
numerator
|
||||
## `par_d`
|
||||
denominator
|
||||
<!-- impl PlayerVideoInfo::fn get_width -->
|
||||
|
||||
# Returns
|
||||
|
||||
the width of video in `PlayerVideoInfo`.
|
||||
<!-- struct PlayerVideoOverlayVideoRenderer -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html), [`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
|
||||
<!-- impl PlayerVideoOverlayVideoRenderer::fn new -->
|
||||
## `window_handle`
|
||||
Window handle to use or `None`
|
||||
<!-- impl PlayerVideoOverlayVideoRenderer::fn new_with_sink -->
|
||||
## `window_handle`
|
||||
Window handle to use or `None`
|
||||
## `video_sink`
|
||||
the custom video_sink element to be set for the video renderer
|
||||
<!-- impl PlayerVideoOverlayVideoRenderer::fn expose -->
|
||||
Tell an overlay that it has been exposed. This will redraw the current frame
|
||||
in the drawable even if the pipeline is PAUSED.
|
||||
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_render_rectangle -->
|
||||
Return the currently configured render rectangle. See `PlayerVideoOverlayVideoRenderer::set_render_rectangle`
|
||||
for details.
|
||||
## `x`
|
||||
the horizontal offset of the render area inside the window
|
||||
## `y`
|
||||
the vertical offset of the render area inside the window
|
||||
## `width`
|
||||
the width of the render area inside the window
|
||||
## `height`
|
||||
the height of the render area inside the window
|
||||
<!-- impl PlayerVideoOverlayVideoRenderer::fn get_window_handle -->
|
||||
|
||||
# Returns
|
||||
|
||||
The currently set, platform specific window
|
||||
handle
|
||||
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_render_rectangle -->
|
||||
Configure a subregion as a video target within the window set by
|
||||
`PlayerVideoOverlayVideoRenderer::set_window_handle`. If this is not
|
||||
used or not supported the video will fill the area of the window set as the
|
||||
overlay to 100%. By specifying the rectangle, the video can be overlaid to
|
||||
a specific region of that window only. After setting the new rectangle one
|
||||
should call `PlayerVideoOverlayVideoRenderer::expose` to force a
|
||||
redraw. To unset the region pass -1 for the `width` and `height` parameters.
|
||||
|
||||
This method is needed for non fullscreen video overlay in UI toolkits that
|
||||
do not support subwindows.
|
||||
## `x`
|
||||
the horizontal offset of the render area inside the window
|
||||
## `y`
|
||||
the vertical offset of the render area inside the window
|
||||
## `width`
|
||||
the width of the render area inside the window
|
||||
## `height`
|
||||
the height of the render area inside the window
|
||||
<!-- impl PlayerVideoOverlayVideoRenderer::fn set_window_handle -->
|
||||
Sets the platform specific window handle into which the video
|
||||
should be rendered
|
||||
## `window_handle`
|
||||
handle referencing to the platform specific window
|
||||
<!-- struct PlayerVideoRenderer -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`PlayerVideoRendererExt`](trait.PlayerVideoRendererExt.html)
|
||||
<!-- trait PlayerVideoRendererExt -->
|
||||
Trait containing all `PlayerVideoRenderer` methods.
|
||||
|
||||
# Implementors
|
||||
|
||||
[`PlayerVideoOverlayVideoRenderer`](struct.PlayerVideoOverlayVideoRenderer.html), [`PlayerVideoRenderer`](struct.PlayerVideoRenderer.html)
|
||||
<!-- struct PlayerVisualization -->
|
||||
A `PlayerVisualization` descriptor.
|
||||
<!-- impl PlayerVisualization::fn copy -->
|
||||
Makes a copy of the `PlayerVisualization`. The result must be
|
||||
freed using `PlayerVisualization::free`.
|
||||
|
||||
# Returns
|
||||
|
||||
an allocated copy of `self`.
|
||||
<!-- impl PlayerVisualization::fn free -->
|
||||
Frees a `PlayerVisualization`.
|
|
@ -1,172 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- enum RTCPFBType -->
|
||||
Different types of feedback messages.
|
||||
<!-- enum RTCPFBType::variant FbTypeInvalid -->
|
||||
Invalid type
|
||||
<!-- enum RTCPFBType::variant RtpfbTypeNack -->
|
||||
Generic NACK
|
||||
<!-- enum RTCPFBType::variant RtpfbTypeTmmbr -->
|
||||
Temporary Maximum Media Stream Bit Rate Request
|
||||
<!-- enum RTCPFBType::variant RtpfbTypeTmmbn -->
|
||||
Temporary Maximum Media Stream Bit Rate
|
||||
Notification
|
||||
<!-- enum RTCPFBType::variant RtpfbTypeRtcpSrReq -->
|
||||
Request an SR packet for early
|
||||
synchronization
|
||||
<!-- enum RTCPFBType::variant PsfbTypePli -->
|
||||
Picture Loss Indication
|
||||
<!-- enum RTCPFBType::variant PsfbTypeSli -->
|
||||
Slice Loss Indication
|
||||
<!-- enum RTCPFBType::variant PsfbTypeRpsi -->
|
||||
Reference Picture Selection Indication
|
||||
<!-- enum RTCPFBType::variant PsfbTypeAfb -->
|
||||
Application layer Feedback
|
||||
<!-- enum RTCPFBType::variant PsfbTypeFir -->
|
||||
Full Intra Request Command
|
||||
<!-- enum RTCPFBType::variant PsfbTypeTstr -->
|
||||
Temporal-Spatial Trade-off Request
|
||||
<!-- enum RTCPFBType::variant PsfbTypeTstn -->
|
||||
Temporal-Spatial Trade-off Notification
|
||||
<!-- enum RTCPFBType::variant PsfbTypeVbcn -->
|
||||
Video Back Channel Message
|
||||
<!-- enum RTCPSDESType -->
|
||||
Different types of SDES content.
|
||||
<!-- enum RTCPSDESType::variant Invalid -->
|
||||
Invalid SDES entry
|
||||
<!-- enum RTCPSDESType::variant End -->
|
||||
End of SDES list
|
||||
<!-- enum RTCPSDESType::variant Cname -->
|
||||
Canonical name
|
||||
<!-- enum RTCPSDESType::variant Name -->
|
||||
User name
|
||||
<!-- enum RTCPSDESType::variant Email -->
|
||||
User's electronic mail address
|
||||
<!-- enum RTCPSDESType::variant Phone -->
|
||||
User's phone number
|
||||
<!-- enum RTCPSDESType::variant Loc -->
|
||||
Geographic user location
|
||||
<!-- enum RTCPSDESType::variant Tool -->
|
||||
Name of application or tool
|
||||
<!-- enum RTCPSDESType::variant Note -->
|
||||
Notice about the source
|
||||
<!-- enum RTCPSDESType::variant Priv -->
|
||||
Private extensions
|
||||
<!-- enum RTCPType -->
|
||||
Different RTCP packet types.
|
||||
<!-- enum RTCPType::variant Invalid -->
|
||||
Invalid type
|
||||
<!-- enum RTCPType::variant Sr -->
|
||||
Sender report
|
||||
<!-- enum RTCPType::variant Rr -->
|
||||
Receiver report
|
||||
<!-- enum RTCPType::variant Sdes -->
|
||||
Source description
|
||||
<!-- enum RTCPType::variant Bye -->
|
||||
Goodbye
|
||||
<!-- enum RTCPType::variant App -->
|
||||
Application defined
|
||||
<!-- enum RTCPType::variant Rtpfb -->
|
||||
Transport layer feedback.
|
||||
<!-- enum RTCPType::variant Psfb -->
|
||||
Payload-specific feedback.
|
||||
<!-- enum RTCPType::variant Xr -->
|
||||
Extended report.
|
||||
<!-- enum RTCPXRType -->
|
||||
Types of RTCP Extended Reports, those are defined in RFC 3611 and other RFCs
|
||||
according to the [IANA registry](https://www.iana.org/assignments/rtcp-xr-block-types/rtcp-xr-block-types.xhtml).
|
||||
<!-- enum RTCPXRType::variant Invalid -->
|
||||
Invalid XR Report Block
|
||||
<!-- enum RTCPXRType::variant Lrle -->
|
||||
Loss RLE Report Block
|
||||
<!-- enum RTCPXRType::variant Drle -->
|
||||
Duplicate RLE Report Block
|
||||
<!-- enum RTCPXRType::variant Prt -->
|
||||
Packet Receipt Times Report Block
|
||||
<!-- enum RTCPXRType::variant Rrt -->
|
||||
Receiver Reference Time Report Block
|
||||
<!-- enum RTCPXRType::variant Dlrr -->
|
||||
Delay since the last Receiver Report
|
||||
<!-- enum RTCPXRType::variant Ssumm -->
|
||||
Statistics Summary Report Block
|
||||
<!-- enum RTCPXRType::variant VoipMetrics -->
|
||||
VoIP Metrics Report Block
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
<!-- enum RTPPayload -->
|
||||
Standard predefined fixed payload types.
|
||||
|
||||
The official list is at:
|
||||
http://www.iana.org/assignments/rtp-parameters
|
||||
|
||||
Audio:
|
||||
reserved: 19
|
||||
unassigned: 20-23,
|
||||
|
||||
Video:
|
||||
unassigned: 24, 27, 29, 30, 35-71, 77-95
|
||||
Reserved for RTCP conflict avoidance: 72-76
|
||||
<!-- enum RTPPayload::variant Pcmu -->
|
||||
ITU-T G.711. mu-law audio (RFC 3551)
|
||||
<!-- enum RTPPayload::variant 1016 -->
|
||||
RFC 3551 says reserved
|
||||
<!-- enum RTPPayload::variant G721 -->
|
||||
RFC 3551 says reserved
|
||||
<!-- enum RTPPayload::variant Gsm -->
|
||||
GSM audio
|
||||
<!-- enum RTPPayload::variant G723 -->
|
||||
ITU G.723.1 audio
|
||||
<!-- enum RTPPayload::variant Dvi48000 -->
|
||||
IMA ADPCM wave type (RFC 3551)
|
||||
<!-- enum RTPPayload::variant Dvi416000 -->
|
||||
IMA ADPCM wave type (RFC 3551)
|
||||
<!-- enum RTPPayload::variant Lpc -->
|
||||
experimental linear predictive encoding
|
||||
<!-- enum RTPPayload::variant Pcma -->
|
||||
ITU-T G.711 A-law audio (RFC 3551)
|
||||
<!-- enum RTPPayload::variant G722 -->
|
||||
ITU-T G.722 (RFC 3551)
|
||||
<!-- enum RTPPayload::variant L16Stereo -->
|
||||
stereo PCM
|
||||
<!-- enum RTPPayload::variant L16Mono -->
|
||||
mono PCM
|
||||
<!-- enum RTPPayload::variant Qcelp -->
|
||||
EIA & TIA standard IS-733
|
||||
<!-- enum RTPPayload::variant Cn -->
|
||||
Comfort Noise (RFC 3389)
|
||||
<!-- enum RTPPayload::variant Mpa -->
|
||||
Audio MPEG 1-3.
|
||||
<!-- enum RTPPayload::variant G728 -->
|
||||
ITU-T G.728 Speech coder (RFC 3551)
|
||||
<!-- enum RTPPayload::variant Dvi411025 -->
|
||||
IMA ADPCM wave type (RFC 3551)
|
||||
<!-- enum RTPPayload::variant Dvi422050 -->
|
||||
IMA ADPCM wave type (RFC 3551)
|
||||
<!-- enum RTPPayload::variant G729 -->
|
||||
ITU-T G.729 Speech coder (RFC 3551)
|
||||
<!-- enum RTPPayload::variant Cellb -->
|
||||
See RFC 2029
|
||||
<!-- enum RTPPayload::variant Jpeg -->
|
||||
ISO Standards 10918-1 and 10918-2 (RFC 2435)
|
||||
<!-- enum RTPPayload::variant Nv -->
|
||||
nv encoding by Ron Frederick
|
||||
<!-- enum RTPPayload::variant H261 -->
|
||||
ITU-T Recommendation H.261 (RFC 2032)
|
||||
<!-- enum RTPPayload::variant Mpv -->
|
||||
Video MPEG 1 & 2 (RFC 2250)
|
||||
<!-- enum RTPPayload::variant Mp2t -->
|
||||
MPEG-2 transport stream (RFC 2250)
|
||||
<!-- enum RTPPayload::variant H263 -->
|
||||
Video H263 (RFC 2190)
|
||||
<!-- enum RTPProfile -->
|
||||
The transfer profile to use.
|
||||
<!-- enum RTPProfile::variant Unknown -->
|
||||
invalid profile
|
||||
<!-- enum RTPProfile::variant Avp -->
|
||||
the Audio/Visual profile (RFC 3551)
|
||||
<!-- enum RTPProfile::variant Savp -->
|
||||
the secure Audio/Visual profile (RFC 3711)
|
||||
<!-- enum RTPProfile::variant Avpf -->
|
||||
the Audio/Visual profile with feedback (RFC 4585)
|
||||
<!-- enum RTPProfile::variant Savpf -->
|
||||
the secure Audio/Visual profile with feedback (RFC 5124)
|
File diff suppressed because it is too large
Load diff
|
@ -1,188 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- enum RTSPAuthMethod -->
|
||||
Authentication methods, ordered by strength
|
||||
<!-- enum RTSPAuthMethod::variant None -->
|
||||
no authentication
|
||||
<!-- enum RTSPAuthMethod::variant Basic -->
|
||||
basic authentication
|
||||
<!-- enum RTSPAuthMethod::variant Digest -->
|
||||
digest authentication
|
||||
<!-- struct RTSPAuthParam -->
|
||||
RTSP Authentication parameter
|
||||
|
||||
Feature: `v1_12`
|
||||
<!-- enum RTSPFamily -->
|
||||
The possible network families.
|
||||
<!-- enum RTSPFamily::variant None -->
|
||||
unknown network family
|
||||
<!-- enum RTSPFamily::variant Inet -->
|
||||
internet
|
||||
<!-- enum RTSPFamily::variant Inet6 -->
|
||||
internet V6
|
||||
<!-- enum RTSPHeaderField -->
|
||||
Enumeration of rtsp header fields
|
||||
<!-- enum RTSPMsgType -->
|
||||
The type of a message.
|
||||
<!-- enum RTSPMsgType::variant Invalid -->
|
||||
invalid message type
|
||||
<!-- enum RTSPMsgType::variant Request -->
|
||||
RTSP request message
|
||||
<!-- enum RTSPMsgType::variant Response -->
|
||||
RTSP response message
|
||||
<!-- enum RTSPMsgType::variant HttpRequest -->
|
||||
HTTP request message.
|
||||
<!-- enum RTSPMsgType::variant HttpResponse -->
|
||||
HTTP response message.
|
||||
<!-- enum RTSPMsgType::variant Data -->
|
||||
data message
|
||||
<!-- enum RTSPRangeUnit -->
|
||||
Different possible time range units.
|
||||
<!-- enum RTSPRangeUnit::variant Smpte -->
|
||||
SMPTE timecode
|
||||
<!-- enum RTSPRangeUnit::variant Smpte30Drop -->
|
||||
29.97 frames per second
|
||||
<!-- enum RTSPRangeUnit::variant Smpte25 -->
|
||||
25 frames per second
|
||||
<!-- enum RTSPRangeUnit::variant Npt -->
|
||||
Normal play time
|
||||
<!-- enum RTSPRangeUnit::variant Clock -->
|
||||
Absolute time expressed as ISO 8601 timestamps
|
||||
<!-- enum RTSPResult -->
|
||||
Result codes from the RTSP functions.
|
||||
<!-- enum RTSPResult::variant Ok -->
|
||||
no error
|
||||
<!-- enum RTSPResult::variant Error -->
|
||||
some unspecified error occurred
|
||||
<!-- enum RTSPResult::variant Einval -->
|
||||
invalid arguments were provided to a function
|
||||
<!-- enum RTSPResult::variant Eintr -->
|
||||
an operation was canceled
|
||||
<!-- enum RTSPResult::variant Enomem -->
|
||||
no memory was available for the operation
|
||||
<!-- enum RTSPResult::variant Eresolv -->
|
||||
a host resolve error occurred
|
||||
<!-- enum RTSPResult::variant Enotimpl -->
|
||||
function not implemented
|
||||
<!-- enum RTSPResult::variant Esys -->
|
||||
a system error occurred, errno contains more details
|
||||
<!-- enum RTSPResult::variant Eparse -->
|
||||
a parsing error occurred
|
||||
<!-- enum RTSPResult::variant Ewsastart -->
|
||||
windows networking could not start
|
||||
<!-- enum RTSPResult::variant Ewsaversion -->
|
||||
windows networking stack has wrong version
|
||||
<!-- enum RTSPResult::variant Eeof -->
|
||||
end-of-file was reached
|
||||
<!-- enum RTSPResult::variant Enet -->
|
||||
a network problem occurred, h_errno contains more details
|
||||
<!-- enum RTSPResult::variant Enotip -->
|
||||
the host is not an IP host
|
||||
<!-- enum RTSPResult::variant Etimeout -->
|
||||
a timeout occurred
|
||||
<!-- enum RTSPResult::variant Etget -->
|
||||
the tunnel GET request has been performed
|
||||
<!-- enum RTSPResult::variant Etpost -->
|
||||
the tunnel POST request has been performed
|
||||
<!-- enum RTSPResult::variant Elast -->
|
||||
last error
|
||||
<!-- enum RTSPState -->
|
||||
The different RTSP states.
|
||||
<!-- enum RTSPState::variant Invalid -->
|
||||
invalid state
|
||||
<!-- enum RTSPState::variant Init -->
|
||||
initializing
|
||||
<!-- enum RTSPState::variant Ready -->
|
||||
ready for operation
|
||||
<!-- enum RTSPState::variant Seeking -->
|
||||
seeking in progress
|
||||
<!-- enum RTSPState::variant Playing -->
|
||||
playing
|
||||
<!-- enum RTSPState::variant Recording -->
|
||||
recording
|
||||
<!-- enum RTSPStatusCode -->
|
||||
Enumeration of rtsp status codes
|
||||
<!-- enum RTSPTimeType -->
|
||||
Possible time types.
|
||||
<!-- enum RTSPTimeType::variant Seconds -->
|
||||
seconds
|
||||
<!-- enum RTSPTimeType::variant Now -->
|
||||
now
|
||||
<!-- enum RTSPTimeType::variant End -->
|
||||
end
|
||||
<!-- enum RTSPTimeType::variant Frames -->
|
||||
frames and subframes
|
||||
<!-- enum RTSPTimeType::variant Utc -->
|
||||
UTC time
|
||||
<!-- struct RTSPUrl -->
|
||||
Provides helper functions to handle RTSP urls.
|
||||
<!-- impl RTSPUrl::fn copy -->
|
||||
Make a copy of `self`.
|
||||
|
||||
# Returns
|
||||
|
||||
a copy of `self`. Free with gst_rtsp_url_free () after usage.
|
||||
<!-- impl RTSPUrl::fn decode_path_components -->
|
||||
Splits the path of `self` on '/' boundaries, decoding the resulting components,
|
||||
|
||||
The decoding performed by this routine is "URI decoding", as defined in RFC
|
||||
3986, commonly known as percent-decoding. For example, a string "foo\%2fbar"
|
||||
will decode to "foo/bar" -- the \%2f being replaced by the corresponding byte
|
||||
with hex value 0x2f. Note that there is no guarantee that the resulting byte
|
||||
sequence is valid in any given encoding. As a special case, \%00 is not
|
||||
unescaped to NUL, as that would prematurely terminate the string.
|
||||
|
||||
Also note that since paths usually start with a slash, the first component
|
||||
will usually be the empty string.
|
||||
|
||||
# Returns
|
||||
|
||||
`None`-terminated array of URL components. Free with
|
||||
`g_strfreev` when no longer needed.
|
||||
<!-- impl RTSPUrl::fn free -->
|
||||
Free the memory used by `self`.
|
||||
<!-- impl RTSPUrl::fn get_port -->
|
||||
Get the port number of `self`.
|
||||
## `port`
|
||||
location to hold the port
|
||||
|
||||
# Returns
|
||||
|
||||
`RTSPResult::Ok`.
|
||||
<!-- impl RTSPUrl::fn get_request_uri -->
|
||||
Get a newly allocated string describing the request URI for `self`.
|
||||
|
||||
# Returns
|
||||
|
||||
a string with the request URI. `g_free` after usage.
|
||||
<!-- impl RTSPUrl::fn get_request_uri_with_control -->
|
||||
Get a newly allocated string describing the request URI for `self`
|
||||
combined with the control path for `control_path`
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `control_path`
|
||||
an RTSP aggregate control path
|
||||
|
||||
# Returns
|
||||
|
||||
a string with the request URI combined with the control path.
|
||||
`g_free` after usage.
|
||||
<!-- impl RTSPUrl::fn set_port -->
|
||||
Set the port number in `self` to `port`.
|
||||
## `port`
|
||||
the port
|
||||
|
||||
# Returns
|
||||
|
||||
`RTSPResult::Ok`.
|
||||
<!-- impl RTSPUrl::fn parse -->
|
||||
Parse the RTSP `urlstr` into a newly allocated `RTSPUrl`. Free after usage
|
||||
with `RTSPUrl::free`.
|
||||
## `urlstr`
|
||||
the url string to parse
|
||||
## `url`
|
||||
location to hold the result.
|
||||
|
||||
# Returns
|
||||
|
||||
a `RTSPResult`.
|
|
@ -1 +0,0 @@
|
|||
<!-- file * -->
|
File diff suppressed because it is too large
Load diff
|
@ -1,326 +0,0 @@
|
|||
<!-- file * -->
|
||||
<!-- enum WebRTCBundlePolicy -->
|
||||
GST_WEBRTC_BUNDLE_POLICY_NONE: none
|
||||
GST_WEBRTC_BUNDLE_POLICY_BALANCED: balanced
|
||||
GST_WEBRTC_BUNDLE_POLICY_MAX_COMPAT: max-compat
|
||||
GST_WEBRTC_BUNDLE_POLICY_MAX_BUNDLE: max-bundle
|
||||
See https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24`section`-4.1.1
|
||||
for more information.
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
<!-- enum WebRTCDTLSSetup -->
|
||||
<!-- enum WebRTCDTLSSetup::variant None -->
|
||||
none
|
||||
<!-- enum WebRTCDTLSSetup::variant Actpass -->
|
||||
actpass
|
||||
<!-- enum WebRTCDTLSSetup::variant Active -->
|
||||
sendonly
|
||||
<!-- enum WebRTCDTLSSetup::variant Passive -->
|
||||
recvonly
|
||||
<!-- struct WebRTCDTLSTransport -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- enum WebRTCDTLSTransportState -->
|
||||
<!-- enum WebRTCDTLSTransportState::variant New -->
|
||||
new
|
||||
<!-- enum WebRTCDTLSTransportState::variant Closed -->
|
||||
closed
|
||||
<!-- enum WebRTCDTLSTransportState::variant Failed -->
|
||||
failed
|
||||
<!-- enum WebRTCDTLSTransportState::variant Connecting -->
|
||||
connecting
|
||||
<!-- enum WebRTCDTLSTransportState::variant Connected -->
|
||||
connected
|
||||
<!-- struct WebRTCDataChannel -->
|
||||
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl WebRTCDataChannel::fn close -->
|
||||
Close the `self`.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
<!-- impl WebRTCDataChannel::fn on_buffered_amount_low -->
|
||||
Signal that the data channel reached a low buffered amount. Should only be used by subclasses.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
<!-- impl WebRTCDataChannel::fn on_close -->
|
||||
Signal that the data channel was closed. Should only be used by subclasses.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
<!-- impl WebRTCDataChannel::fn on_error -->
|
||||
Signal that the data channel had an error. Should only be used by subclasses.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `error`
|
||||
a `glib::Error`
|
||||
<!-- impl WebRTCDataChannel::fn on_message_data -->
|
||||
Signal that the data channel received a data message. Should only be used by subclasses.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `data`
|
||||
a `glib::Bytes` or `None`
|
||||
<!-- impl WebRTCDataChannel::fn on_message_string -->
|
||||
Signal that the data channel received a string message. Should only be used by subclasses.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `str`
|
||||
a string or `None`
|
||||
<!-- impl WebRTCDataChannel::fn on_open -->
|
||||
Signal that the data channel was opened. Should only be used by subclasses.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
<!-- impl WebRTCDataChannel::fn send_data -->
|
||||
Send `data` as a data message over `self`.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `data`
|
||||
a `glib::Bytes` or `None`
|
||||
<!-- impl WebRTCDataChannel::fn send_string -->
|
||||
Send `str` as a string message over `self`.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
## `str`
|
||||
a string or `None`
|
||||
<!-- impl WebRTCDataChannel::fn connect_close -->
|
||||
Close the data channel
|
||||
<!-- impl WebRTCDataChannel::fn connect_on_error -->
|
||||
## `error`
|
||||
the `glib::Error` thrown
|
||||
<!-- impl WebRTCDataChannel::fn connect_on_message_data -->
|
||||
## `data`
|
||||
a `glib::Bytes` of the data received
|
||||
<!-- impl WebRTCDataChannel::fn connect_on_message_string -->
|
||||
## `data`
|
||||
the data received as a string
|
||||
<!-- impl WebRTCDataChannel::fn connect_send_data -->
|
||||
## `data`
|
||||
a `glib::Bytes` with the data
|
||||
<!-- impl WebRTCDataChannel::fn connect_send_string -->
|
||||
## `data`
|
||||
the data to send as a string
|
||||
<!-- enum WebRTCDataChannelState -->
|
||||
GST_WEBRTC_DATA_CHANNEL_STATE_NEW: new
|
||||
GST_WEBRTC_DATA_CHANNEL_STATE_CONNECTING: connection
|
||||
GST_WEBRTC_DATA_CHANNEL_STATE_OPEN: open
|
||||
GST_WEBRTC_DATA_CHANNEL_STATE_CLOSING: closing
|
||||
GST_WEBRTC_DATA_CHANNEL_STATE_CLOSED: closed
|
||||
See <http://w3c.github.io/webrtc-pc/`dom`-rtcdatachannelstate>
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
<!-- enum WebRTCFECType -->
|
||||
<!-- enum WebRTCFECType::variant None -->
|
||||
none
|
||||
<!-- enum WebRTCFECType::variant UlpRed -->
|
||||
ulpfec + red
|
||||
|
||||
Feature: `v1_14_1`
|
||||
|
||||
<!-- enum WebRTCICEComponent -->
|
||||
<!-- enum WebRTCICEComponent::variant Rtp -->
|
||||
RTP component
|
||||
<!-- enum WebRTCICEComponent::variant Rtcp -->
|
||||
RTCP component
|
||||
<!-- enum WebRTCICEConnectionState -->
|
||||
See <http://w3c.github.io/webrtc-pc/`dom`-rtciceconnectionstate>
|
||||
<!-- enum WebRTCICEConnectionState::variant New -->
|
||||
new
|
||||
<!-- enum WebRTCICEConnectionState::variant Checking -->
|
||||
checking
|
||||
<!-- enum WebRTCICEConnectionState::variant Connected -->
|
||||
connected
|
||||
<!-- enum WebRTCICEConnectionState::variant Completed -->
|
||||
completed
|
||||
<!-- enum WebRTCICEConnectionState::variant Failed -->
|
||||
failed
|
||||
<!-- enum WebRTCICEConnectionState::variant Disconnected -->
|
||||
disconnected
|
||||
<!-- enum WebRTCICEConnectionState::variant Closed -->
|
||||
closed
|
||||
<!-- enum WebRTCICEGatheringState -->
|
||||
See <http://w3c.github.io/webrtc-pc/`dom`-rtcicegatheringstate>
|
||||
<!-- enum WebRTCICEGatheringState::variant New -->
|
||||
new
|
||||
<!-- enum WebRTCICEGatheringState::variant Gathering -->
|
||||
gathering
|
||||
<!-- enum WebRTCICEGatheringState::variant Complete -->
|
||||
complete
|
||||
<!-- enum WebRTCICERole -->
|
||||
<!-- enum WebRTCICERole::variant Controlled -->
|
||||
controlled
|
||||
<!-- enum WebRTCICERole::variant Controlling -->
|
||||
controlling
|
||||
<!-- struct WebRTCICETransport -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- enum WebRTCICETransportPolicy -->
|
||||
GST_WEBRTC_ICE_TRANSPORT_POLICY_ALL: all
|
||||
GST_WEBRTC_ICE_TRANSPORT_POLICY_RELAY: relay
|
||||
See https://tools.ietf.org/html/draft-ietf-rtcweb-jsep-24`section`-4.1.1
|
||||
for more information.
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
<!-- enum WebRTCPeerConnectionState -->
|
||||
See <http://w3c.github.io/webrtc-pc/`dom`-rtcpeerconnectionstate>
|
||||
<!-- enum WebRTCPeerConnectionState::variant New -->
|
||||
new
|
||||
<!-- enum WebRTCPeerConnectionState::variant Connecting -->
|
||||
connecting
|
||||
<!-- enum WebRTCPeerConnectionState::variant Connected -->
|
||||
connected
|
||||
<!-- enum WebRTCPeerConnectionState::variant Disconnected -->
|
||||
disconnected
|
||||
<!-- enum WebRTCPeerConnectionState::variant Failed -->
|
||||
failed
|
||||
<!-- enum WebRTCPeerConnectionState::variant Closed -->
|
||||
closed
|
||||
<!-- enum WebRTCPriorityType -->
|
||||
GST_WEBRTC_PRIORITY_TYPE_VERY_LOW: very-low
|
||||
GST_WEBRTC_PRIORITY_TYPE_LOW: low
|
||||
GST_WEBRTC_PRIORITY_TYPE_MEDIUM: medium
|
||||
GST_WEBRTC_PRIORITY_TYPE_HIGH: high
|
||||
See <http://w3c.github.io/webrtc-pc/`dom`-rtcprioritytype>
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
<!-- struct WebRTCRTPReceiver -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- struct WebRTCRTPSender -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- struct WebRTCRTPTransceiver -->
|
||||
|
||||
|
||||
# Implements
|
||||
|
||||
[`glib::object::ObjectExt`](../glib/object/trait.ObjectExt.html)
|
||||
<!-- impl WebRTCRTPTransceiver::fn get_property_direction -->
|
||||
Direction of the transceiver.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
<!-- impl WebRTCRTPTransceiver::fn set_property_direction -->
|
||||
Direction of the transceiver.
|
||||
|
||||
Feature: `v1_18`
|
||||
|
||||
<!-- enum WebRTCRTPTransceiverDirection -->
|
||||
<!-- enum WebRTCRTPTransceiverDirection::variant None -->
|
||||
none
|
||||
<!-- enum WebRTCRTPTransceiverDirection::variant Inactive -->
|
||||
inactive
|
||||
<!-- enum WebRTCRTPTransceiverDirection::variant Sendonly -->
|
||||
sendonly
|
||||
<!-- enum WebRTCRTPTransceiverDirection::variant Recvonly -->
|
||||
recvonly
|
||||
<!-- enum WebRTCRTPTransceiverDirection::variant Sendrecv -->
|
||||
sendrecv
|
||||
<!-- enum WebRTCSCTPTransportState -->
|
||||
GST_WEBRTC_SCTP_TRANSPORT_STATE_NEW: new
|
||||
GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTING: connecting
|
||||
GST_WEBRTC_SCTP_TRANSPORT_STATE_CONNECTED: connected
|
||||
GST_WEBRTC_SCTP_TRANSPORT_STATE_CLOSED: closed
|
||||
See <http://w3c.github.io/webrtc-pc/`dom`-rtcsctptransportstate>
|
||||
|
||||
Feature: `v1_16`
|
||||
|
||||
<!-- enum WebRTCSDPType -->
|
||||
See <http://w3c.github.io/webrtc-pc/`rtcsdptype`>
|
||||
<!-- enum WebRTCSDPType::variant Offer -->
|
||||
offer
|
||||
<!-- enum WebRTCSDPType::variant Pranswer -->
|
||||
pranswer
|
||||
<!-- enum WebRTCSDPType::variant Answer -->
|
||||
answer
|
||||
<!-- enum WebRTCSDPType::variant Rollback -->
|
||||
rollback
|
||||
<!-- struct WebRTCSessionDescription -->
|
||||
See <https://www.w3.org/TR/webrtc/`rtcsessiondescription`-class>
|
||||
<!-- impl WebRTCSessionDescription::fn new -->
|
||||
## `type_`
|
||||
a `WebRTCSDPType`
|
||||
## `sdp`
|
||||
a `gst_sdp::SDPMessage`
|
||||
|
||||
# Returns
|
||||
|
||||
a new `WebRTCSessionDescription` from `type_`
|
||||
and `sdp`
|
||||
<!-- impl WebRTCSessionDescription::fn copy -->
|
||||
|
||||
# Returns
|
||||
|
||||
a new copy of `self`
|
||||
<!-- impl WebRTCSessionDescription::fn free -->
|
||||
Free `self` and all associated resources
|
||||
<!-- enum WebRTCSignalingState -->
|
||||
See <http://w3c.github.io/webrtc-pc/`dom`-rtcsignalingstate>
|
||||
<!-- enum WebRTCSignalingState::variant Stable -->
|
||||
stable
|
||||
<!-- enum WebRTCSignalingState::variant Closed -->
|
||||
closed
|
||||
<!-- enum WebRTCSignalingState::variant HaveLocalOffer -->
|
||||
have-local-offer
|
||||
<!-- enum WebRTCSignalingState::variant HaveRemoteOffer -->
|
||||
have-remote-offer
|
||||
<!-- enum WebRTCSignalingState::variant HaveLocalPranswer -->
|
||||
have-local-pranswer
|
||||
<!-- enum WebRTCSignalingState::variant HaveRemotePranswer -->
|
||||
have-remote-pranswer
|
||||
<!-- enum WebRTCStatsType -->
|
||||
<!-- enum WebRTCStatsType::variant Codec -->
|
||||
codec
|
||||
<!-- enum WebRTCStatsType::variant InboundRtp -->
|
||||
inbound-rtp
|
||||
<!-- enum WebRTCStatsType::variant OutboundRtp -->
|
||||
outbound-rtp
|
||||
<!-- enum WebRTCStatsType::variant RemoteInboundRtp -->
|
||||
remote-inbound-rtp
|
||||
<!-- enum WebRTCStatsType::variant RemoteOutboundRtp -->
|
||||
remote-outbound-rtp
|
||||
<!-- enum WebRTCStatsType::variant Csrc -->
|
||||
csrc
|
||||
<!-- enum WebRTCStatsType::variant PeerConnection -->
|
||||
peer-connectiion
|
||||
<!-- enum WebRTCStatsType::variant DataChannel -->
|
||||
data-channel
|
||||
<!-- enum WebRTCStatsType::variant Stream -->
|
||||
stream
|
||||
<!-- enum WebRTCStatsType::variant Transport -->
|
||||
transport
|
||||
<!-- enum WebRTCStatsType::variant CandidatePair -->
|
||||
candidate-pair
|
||||
<!-- enum WebRTCStatsType::variant LocalCandidate -->
|
||||
local-candidate
|
||||
<!-- enum WebRTCStatsType::variant RemoteCandidate -->
|
||||
remote-candidate
|
||||
<!-- enum WebRTCStatsType::variant Certificate -->
|
||||
certificate
|
15566
docs/gstreamer/docs.md
15566
docs/gstreamer/docs.md
File diff suppressed because it is too large
Load diff
|
@ -1,88 +0,0 @@
|
|||
extern crate stripper_lib;
|
||||
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
use stripper_lib::{loop_over_files, parse_cmts, regenerate_comments, strip_comments};
|
||||
|
||||
#[derive(Clone, Copy, Debug)]
|
||||
pub enum Library {
|
||||
GstWebRTC,
|
||||
GstVideo,
|
||||
GstSdp,
|
||||
GstRtspServer,
|
||||
GstRtsp,
|
||||
GstRtp,
|
||||
GstPlayer,
|
||||
GstNet,
|
||||
GstGL,
|
||||
GES,
|
||||
GstCheck,
|
||||
GstPbutils,
|
||||
GstBase,
|
||||
GstAudio,
|
||||
GstApp,
|
||||
Gst,
|
||||
}
|
||||
|
||||
fn docs(lib: Library) -> Option<&'static str> {
|
||||
match lib {
|
||||
Library::GstWebRTC => Some(include_str!("../gstreamer-webrtc/docs.md")),
|
||||
Library::GstVideo => Some(include_str!("../gstreamer-video/docs.md")),
|
||||
Library::GstSdp => Some(include_str!("../gstreamer-sdp/docs.md")),
|
||||
Library::GstRtspServer => Some(include_str!("../gstreamer-rtsp-server/docs.md")),
|
||||
Library::GstRtsp => Some(include_str!("../gstreamer-rtsp/docs.md")),
|
||||
Library::GstRtp => Some(include_str!("../gstreamer-rtp/docs.md")),
|
||||
Library::GstPlayer => Some(include_str!("../gstreamer-player/docs.md")),
|
||||
Library::GstNet => Some(include_str!("../gstreamer-net/docs.md")),
|
||||
Library::GstGL => Some(include_str!("../gstreamer-gl/docs.md")),
|
||||
Library::GES => Some(include_str!("../gstreamer-editing-services/docs.md")),
|
||||
Library::GstCheck => Some(include_str!("../gstreamer-check/docs.md")),
|
||||
Library::GstPbutils => Some(include_str!("../gstreamer-pbutils/docs.md")),
|
||||
Library::GstBase => Some(include_str!("../gstreamer-base/docs.md")),
|
||||
Library::GstAudio => Some(include_str!("../gstreamer-audio/docs.md")),
|
||||
Library::GstApp => Some(include_str!("../gstreamer-app/docs.md")),
|
||||
Library::Gst => Some(include_str!("../gstreamer/docs.md")),
|
||||
}
|
||||
}
|
||||
|
||||
fn vendor_docs(_lib: Library) -> Option<&'static str> {
|
||||
None
|
||||
}
|
||||
|
||||
/// Embeds the docs.
|
||||
///
|
||||
/// `path` is the root directory to process.
|
||||
///
|
||||
/// `ignores` is the list of files to skip (relative to `path`).
|
||||
pub fn embed<P: AsRef<Path>>(library: Library, path: P, ignores: &[&str]) {
|
||||
if let Some(docs) = docs(library) {
|
||||
do_embed(docs, path.as_ref(), ignores);
|
||||
}
|
||||
if let Some(docs) = vendor_docs(library) {
|
||||
do_embed(docs, path.as_ref(), ignores);
|
||||
}
|
||||
}
|
||||
|
||||
fn do_embed(docs: &str, path: &Path, ignores: &[&str]) {
|
||||
let mut infos = parse_cmts(docs.lines(), true);
|
||||
loop_over_files(
|
||||
path,
|
||||
&mut |w, s| regenerate_comments(w, s, &mut infos, true, true),
|
||||
&ignores,
|
||||
false,
|
||||
);
|
||||
}
|
||||
|
||||
/// Remove any doc comments.
|
||||
///
|
||||
/// `path` is the root directory to process.
|
||||
///
|
||||
/// `ignores` is the list of files to skip (relative to `path`).
|
||||
pub fn purge<P: AsRef<Path>>(path: P, ignores: &[&str]) {
|
||||
loop_over_files(
|
||||
path.as_ref(),
|
||||
&mut |w, s| strip_comments(w, s, &mut io::sink(), true),
|
||||
&ignores,
|
||||
false,
|
||||
);
|
||||
}
|
|
@ -1,60 +1,73 @@
|
|||
[package]
|
||||
name = "examples"
|
||||
version = "0.16.0"
|
||||
version.workspace = true
|
||||
license = "MIT"
|
||||
authors = ["Sebastian Dröge <sebastian@centricular.com>"]
|
||||
edition = "2018"
|
||||
edition.workspace = true
|
||||
rust-version.workspace = true
|
||||
|
||||
[dependencies]
|
||||
glib = { git = "https://github.com/gtk-rs/glib" }
|
||||
gstreamer = { path = "../gstreamer" }
|
||||
gstreamer-gl = { path = "../gstreamer-gl", optional = true }
|
||||
gstreamer-app = { path = "../gstreamer-app" }
|
||||
gstreamer-audio = { path = "../gstreamer-audio" }
|
||||
gstreamer-base = { path = "../gstreamer-base" }
|
||||
gstreamer-video = { path = "../gstreamer-video" }
|
||||
gstreamer-pbutils = { path = "../gstreamer-pbutils" }
|
||||
gstreamer-player = { path = "../gstreamer-player", optional = true }
|
||||
gstreamer-editing-services = { path = "../gstreamer-editing-services", optional = true }
|
||||
gstreamer-sdp = { path = "../gstreamer-sdp", optional = true }
|
||||
gstreamer-rtsp = { path = "../gstreamer-rtsp", optional = true }
|
||||
gstreamer-rtsp-server = { path = "../gstreamer-rtsp-server", optional = true }
|
||||
gstreamer-rtsp-server-sys = { git = "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs-sys", features = ["v1_8"], optional = true }
|
||||
gtk = { git = "https://github.com/gtk-rs/gtk", optional = true }
|
||||
gdk = { git = "https://github.com/gtk-rs/gdk", optional = true }
|
||||
gio = { git = "https://github.com/gtk-rs/gio", optional = true }
|
||||
glib.workspace = true
|
||||
gst.workspace = true
|
||||
gst-gl = { workspace = true, optional = true }
|
||||
gst-gl-egl = { workspace = true, optional = true }
|
||||
gst-gl-x11 = { workspace = true, optional = true }
|
||||
gst-app.workspace = true
|
||||
gst-audio.workspace = true
|
||||
gst-base.workspace = true
|
||||
gst-video.workspace = true
|
||||
gst-pbutils.workspace = true
|
||||
gst-play = { workspace = true, optional = true }
|
||||
gst-player = { workspace = true, optional = true }
|
||||
ges = { workspace = true, optional = true }
|
||||
gst-sdp = { workspace = true, optional = true }
|
||||
gst-rtsp = { workspace = true, optional = true }
|
||||
gst-rtsp-server = { workspace = true, optional = true }
|
||||
gst-allocators = { workspace = true, optional = true }
|
||||
gio = { workspace = true, optional = true }
|
||||
anyhow = "1.0"
|
||||
byte-slice-cast = "1"
|
||||
cairo-rs = { workspace = true, features=["use_glib"], optional = true }
|
||||
derive_more = "0.99.5"
|
||||
futures = "0.3"
|
||||
byte-slice-cast = "0.3"
|
||||
cairo-rs = { git = "https://github.com/gtk-rs/cairo", features=["use_glib"], optional = true }
|
||||
cairo-sys-rs = { git = "https://github.com/gtk-rs/cairo", features=["use_glib"], optional = true }
|
||||
pango = { git = "https://github.com/gtk-rs/pango", optional = true }
|
||||
pangocairo = { git = "https://github.com/gtk-rs/pangocairo", optional = true }
|
||||
glutin = { version = "0.21", optional = true }
|
||||
winit = { version = "0.19", optional = true }
|
||||
once_cell = "1.0"
|
||||
glutin = { version = "0.31", optional = true, default-features = false }
|
||||
glutin-winit = { version = "0.4", optional = true, default-features = false }
|
||||
image = { version = "0.24", optional = true, default-features = false, features = ["png", "jpeg"] }
|
||||
memfd = { version = "0.6", optional = true }
|
||||
memmap2 = { version = "0.9", optional = true }
|
||||
pango = { workspace = true, optional = true }
|
||||
pangocairo = { workspace = true, optional = true }
|
||||
raw-window-handle = { version = "0.5", optional = true }
|
||||
uds = { version = "0.4", optional = true }
|
||||
winit = { version = "0.29", optional = true, default-features = false, features = ["rwh_05"] }
|
||||
atomic_refcell = "0.1"
|
||||
data-encoding = "2.0"
|
||||
once_cell = "1"
|
||||
|
||||
[target.'cfg(windows)'.dependencies]
|
||||
windows = { version = "0.56", features=["Win32_Graphics_Direct3D11",
|
||||
"Win32_Foundation", "Win32_Graphics_Direct3D", "Win32_Graphics_Dxgi",
|
||||
"Win32_Graphics_Dxgi_Common", "Win32_Graphics_Direct2D",
|
||||
"Win32_Graphics_Direct2D_Common", "Win32_Graphics_DirectWrite",
|
||||
"Win32_Graphics_Imaging", "Win32_System_Com", "Foundation_Numerics"], optional = true }
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
cocoa = "0.25"
|
||||
objc = "0.2.7"
|
||||
|
||||
[build-dependencies]
|
||||
gl_generator = { version = "0.14", optional = true }
|
||||
|
||||
[features]
|
||||
default = []
|
||||
gst-player = ["gstreamer-player"]
|
||||
ges = ["gstreamer-editing-services"]
|
||||
gtksink = ["gtk", "gio"]
|
||||
gtkvideooverlay = ["gtk", "gdk", "gio"]
|
||||
gtkvideooverlay-x11 = ["gtkvideooverlay"]
|
||||
gtkvideooverlay-quartz = ["gtkvideooverlay"]
|
||||
gst-rtsp-server = ["gstreamer-rtsp-server", "gstreamer-rtsp", "gstreamer-sdp"]
|
||||
gst-rtsp-server-record = ["gstreamer-rtsp-server-sys", "gstreamer-rtsp-server", "gstreamer-rtsp", "gio"]
|
||||
v1_10 = ["gstreamer/v1_10"]
|
||||
rtsp-server = ["gst-rtsp-server", "gst-rtsp", "gst-sdp"]
|
||||
rtsp-server-record = ["gst-rtsp-server", "gst-rtsp", "gio"]
|
||||
pango-cairo = ["pango", "pangocairo", "cairo-rs"]
|
||||
overlay-composition = ["pango", "pangocairo", "cairo-rs", "cairo-sys-rs" ]
|
||||
gl = ["gstreamer-gl", "gl_generator", "glutin"]
|
||||
gl-egl = ["gstreamer-gl/egl"]
|
||||
gl-x11 = ["gstreamer-gl/x11"]
|
||||
gl-wayland = ["gstreamer-gl/wayland"]
|
||||
overlay-composition = ["pango", "pangocairo", "cairo-rs"]
|
||||
gl = ["dep:gst-gl", "dep:gl_generator", "dep:glutin", "dep:glutin-winit", "dep:winit", "dep:raw-window-handle"]
|
||||
gst-gl-x11 = ["dep:gst-gl-x11", "glutin-winit?/glx"] # glx turns on x11
|
||||
gst-gl-egl = ["dep:gst-gl-egl", "glutin-winit?/egl", "glutin-winit?/x11", "glutin-winit?/wayland"] # Use X11 or Wayland via EGL
|
||||
allocators = ["gst-allocators", "memmap2", "memfd", "uds"]
|
||||
|
||||
[[bin]]
|
||||
name = "appsink"
|
||||
|
@ -64,7 +77,6 @@ name = "appsrc"
|
|||
|
||||
[[bin]]
|
||||
name = "custom_events"
|
||||
required-features = ["v1_10"]
|
||||
|
||||
[[bin]]
|
||||
name = "custom_meta"
|
||||
|
@ -72,20 +84,15 @@ name = "custom_meta"
|
|||
[[bin]]
|
||||
name = "decodebin"
|
||||
|
||||
[[bin]]
|
||||
name = "debug_ringbuffer"
|
||||
|
||||
[[bin]]
|
||||
name = "encodebin"
|
||||
|
||||
[[bin]]
|
||||
name = "events"
|
||||
|
||||
[[bin]]
|
||||
name = "gtksink"
|
||||
required-features = ["gtksink"]
|
||||
|
||||
[[bin]]
|
||||
name = "gtkvideooverlay"
|
||||
required-features = ["gtkvideooverlay"]
|
||||
|
||||
[[bin]]
|
||||
name = "iterator"
|
||||
|
||||
|
@ -101,6 +108,10 @@ name = "transmux"
|
|||
[[bin]]
|
||||
name = "pad_probes"
|
||||
|
||||
[[bin]]
|
||||
name = "play"
|
||||
required-features = ["gst-play"]
|
||||
|
||||
[[bin]]
|
||||
name = "playbin"
|
||||
|
||||
|
@ -119,11 +130,15 @@ name = "rtpfecserver"
|
|||
|
||||
[[bin]]
|
||||
name = "rtsp-server"
|
||||
required-features = ["gst-rtsp-server"]
|
||||
required-features = ["rtsp-server"]
|
||||
|
||||
[[bin]]
|
||||
name = "rtsp-server-subclass"
|
||||
required-features = ["gst-rtsp-server"]
|
||||
required-features = ["rtsp-server"]
|
||||
|
||||
[[bin]]
|
||||
name = "rtsp-server-custom-auth"
|
||||
required-features = ["rtsp-server", "gst-rtsp-server/v1_22"]
|
||||
|
||||
[[bin]]
|
||||
name = "tagsetter"
|
||||
|
@ -139,7 +154,7 @@ name = "glib-futures"
|
|||
|
||||
[[bin]]
|
||||
name = "rtsp-server-record"
|
||||
required-features = ["gst-rtsp-server-record"]
|
||||
required-features = ["rtsp-server-record"]
|
||||
|
||||
[[bin]]
|
||||
name = "discoverer"
|
||||
|
@ -152,17 +167,43 @@ required-features = ["pango-cairo"]
|
|||
name = "overlay-composition"
|
||||
required-features = ["overlay-composition"]
|
||||
|
||||
[[bin]]
|
||||
name = "overlay-composition-d2d"
|
||||
required-features = ["windows"]
|
||||
|
||||
[[bin]]
|
||||
name = "ges"
|
||||
required-features = ["ges"]
|
||||
|
||||
[[bin]]
|
||||
name = "glupload"
|
||||
name = "glwindow"
|
||||
required-features = ["gl"]
|
||||
|
||||
[[bin]]
|
||||
name = "glfilter"
|
||||
required-features = ["gl"]
|
||||
features = ["gl-egl", "gl-x11", "gl-wayland"]
|
||||
|
||||
[[bin]]
|
||||
name = "subclass"
|
||||
|
||||
[[bin]]
|
||||
name = "video_converter"
|
||||
|
||||
[[bin]]
|
||||
name = "thumbnail"
|
||||
required-features = ["image"]
|
||||
|
||||
[[bin]]
|
||||
name = "fd_allocator"
|
||||
required-features = ["allocators"]
|
||||
|
||||
[[bin]]
|
||||
name = "cairo_compositor"
|
||||
required-features = ["cairo-rs", "gst-video/v1_18"]
|
||||
|
||||
[[bin]]
|
||||
name = "d3d11videosink"
|
||||
required-features = ["windows"]
|
||||
|
||||
[[bin]]
|
||||
name = "audio_multichannel_interleave"
|
||||
|
|
|
@ -1,10 +1,7 @@
|
|||
#[cfg(feature = "gl")]
|
||||
extern crate gl_generator;
|
||||
|
||||
#[cfg(feature = "gl")]
|
||||
fn generate_gl_bindings() {
|
||||
let dest = std::path::PathBuf::from(&std::env::var("OUT_DIR").unwrap());
|
||||
let mut file = std::fs::File::create(&dest.join("test_gl_bindings.rs")).unwrap();
|
||||
let mut file = std::fs::File::create(dest.join("test_gl_bindings.rs")).unwrap();
|
||||
gl_generator::Registry::new(
|
||||
gl_generator::Api::Gles2,
|
||||
(3, 0),
|
||||
|
|
|
@ -10,64 +10,44 @@
|
|||
// This is the format we request:
|
||||
// Audio / Signed 16bit / 1 channel / arbitrary sample rate
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::prelude::*;
|
||||
extern crate gstreamer_app as gst_app;
|
||||
extern crate gstreamer_audio as gst_audio;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
|
||||
use std::i16;
|
||||
use std::i32;
|
||||
|
||||
use anyhow::Error;
|
||||
use byte_slice_cast::*;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::{element_error, prelude::*};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("audiotestsrc", None)
|
||||
.map_err(|_| MissingElement("audiotestsrc"))?;
|
||||
let sink = gst::ElementFactory::make("appsink", None).map_err(|_| MissingElement("appsink"))?;
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("audiotestsrc").build()?;
|
||||
let appsink = gst_app::AppSink::builder()
|
||||
// Tell the appsink what format we want. It will then be the audiotestsrc's job to
|
||||
// provide the format we request.
|
||||
// This can be set after linking the two objects, because format negotiation between
|
||||
// both elements will happen during pre-rolling of the pipeline.
|
||||
.caps(
|
||||
&gst_audio::AudioCapsBuilder::new_interleaved()
|
||||
.format(gst_audio::AUDIO_FORMAT_S16)
|
||||
.channels(1)
|
||||
.build(),
|
||||
)
|
||||
.build();
|
||||
|
||||
pipeline.add_many(&[&src, &sink])?;
|
||||
src.link(&sink)?;
|
||||
|
||||
let appsink = sink
|
||||
.dynamic_cast::<gst_app::AppSink>()
|
||||
.expect("Sink element is expected to be an appsink!");
|
||||
|
||||
// Tell the appsink what format we want. It will then be the audiotestsrc's job to
|
||||
// provide the format we request.
|
||||
// This can be set after linking the two objects, because format negotiation between
|
||||
// both elements will happen during pre-rolling of the pipeline.
|
||||
appsink.set_caps(Some(&gst::Caps::new_simple(
|
||||
"audio/x-raw",
|
||||
&[
|
||||
("format", &gst_audio::AUDIO_FORMAT_S16.to_str()),
|
||||
("layout", &"interleaved"),
|
||||
("channels", &(1i32)),
|
||||
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
|
||||
],
|
||||
)));
|
||||
pipeline.add_many([&src, appsink.upcast_ref()])?;
|
||||
src.link(&appsink)?;
|
||||
|
||||
// Getting data out of the appsink is done by setting callbacks on it.
|
||||
// The appsink will then call those handlers, as soon as data is available.
|
||||
|
@ -77,8 +57,8 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
.new_sample(|appsink| {
|
||||
// Pull the sample in question out of the appsink's buffer.
|
||||
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
|
||||
let buffer = sample.get_buffer().ok_or_else(|| {
|
||||
gst_element_error!(
|
||||
let buffer = sample.buffer().ok_or_else(|| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get buffer from appsink")
|
||||
|
@ -95,7 +75,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||
let map = buffer.map_readable().map_err(|_| {
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to map buffer readable")
|
||||
|
@ -108,10 +88,10 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// it by setting the appsink's caps. So what we do here is interpret the
|
||||
// memory region we mapped as an array of signed 16 bit integers.
|
||||
let samples = map.as_slice_of::<i16>().map_err(|_| {
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to interprete buffer as S16 PCM")
|
||||
("Failed to interpret buffer as S16 PCM")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
|
@ -127,7 +107,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
})
|
||||
.sum();
|
||||
let rms = (sum / (samples.len() as f64)).sqrt();
|
||||
println!("rms: {}", rms);
|
||||
println!("rms: {rms}");
|
||||
|
||||
Ok(gst::FlowSuccess::Ok)
|
||||
})
|
||||
|
@ -141,10 +121,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -153,12 +133,11 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Null)?;
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
@ -174,12 +153,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
fn example_main() {
|
||||
match create_pipeline().and_then(main_loop) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -10,28 +10,20 @@
|
|||
// The application provides data of the following format:
|
||||
// Video / BGRx (4 bytes) / 2 fps
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
extern crate gstreamer_app as gst_app;
|
||||
extern crate gstreamer_video as gst_video;
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::prelude::*;
|
||||
use gst_video::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
const WIDTH: usize = 320;
|
||||
|
@ -40,19 +32,7 @@ const HEIGHT: usize = 240;
|
|||
fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("appsrc", None).map_err(|_| MissingElement("appsrc"))?;
|
||||
let videoconvert = gst::ElementFactory::make("videoconvert", None)
|
||||
.map_err(|_| MissingElement("videoconvert"))?;
|
||||
let sink = gst::ElementFactory::make("autovideosink", None)
|
||||
.map_err(|_| MissingElement("autovideosink"))?;
|
||||
|
||||
pipeline.add_many(&[&src, &videoconvert, &sink])?;
|
||||
gst::Element::link_many(&[&src, &videoconvert, &sink])?;
|
||||
|
||||
let appsrc = src
|
||||
.dynamic_cast::<gst_app::AppSrc>()
|
||||
.expect("Source element is expected to be an appsrc!");
|
||||
let pipeline = gst::Pipeline::default();
|
||||
|
||||
// Specify the format we want to provide as application into the pipeline
|
||||
// by creating a video info with the given format and creating caps from it for the appsrc element.
|
||||
|
@ -62,8 +42,16 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
.build()
|
||||
.expect("Failed to create video info");
|
||||
|
||||
appsrc.set_caps(Some(&video_info.to_caps().unwrap()));
|
||||
appsrc.set_property_format(gst::Format::Time);
|
||||
let appsrc = gst_app::AppSrc::builder()
|
||||
.caps(&video_info.to_caps().unwrap())
|
||||
.format(gst::Format::Time)
|
||||
.build();
|
||||
|
||||
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
|
||||
pipeline.add_many([appsrc.upcast_ref(), &videoconvert, &sink])?;
|
||||
gst::Element::link_many([appsrc.upcast_ref(), &videoconvert, &sink])?;
|
||||
|
||||
// Our frame counter, that is stored in the mutable environment
|
||||
// of the closure of the need-data callback
|
||||
|
@ -88,7 +76,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
return;
|
||||
}
|
||||
|
||||
println!("Producing frame {}", i);
|
||||
println!("Producing frame {i}");
|
||||
|
||||
let r = if i % 2 == 0 { 0 } else { 255 };
|
||||
let g = if i % 3 == 0 { 0 } else { 255 };
|
||||
|
@ -101,20 +89,37 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// For each frame we produce, we set the timestamp when it should be displayed
|
||||
// (pts = presentation time stamp)
|
||||
// The autovideosink will use this information to display the frame at the right time.
|
||||
buffer.set_pts(i * 500 * gst::MSECOND);
|
||||
buffer.set_pts(i * 500 * gst::ClockTime::MSECOND);
|
||||
|
||||
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||
// When we want to access its content, we have to map it while requesting the required
|
||||
// mode of access (read, read/write).
|
||||
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||
let mut data = buffer.map_writable().unwrap();
|
||||
let mut vframe =
|
||||
gst_video::VideoFrameRef::from_buffer_ref_writable(buffer, &video_info)
|
||||
.unwrap();
|
||||
|
||||
for p in data.as_mut_slice().chunks_mut(4) {
|
||||
assert_eq!(p.len(), 4);
|
||||
p[0] = b;
|
||||
p[1] = g;
|
||||
p[2] = r;
|
||||
p[3] = 0;
|
||||
// Remember some values from the frame for later usage
|
||||
let width = vframe.width() as usize;
|
||||
let height = vframe.height() as usize;
|
||||
|
||||
// Each line of the first plane has this many bytes
|
||||
let stride = vframe.plane_stride()[0] as usize;
|
||||
|
||||
// Iterate over each of the height many lines of length stride
|
||||
for line in vframe
|
||||
.plane_data_mut(0)
|
||||
.unwrap()
|
||||
.chunks_exact_mut(stride)
|
||||
.take(height)
|
||||
{
|
||||
// Iterate over each pixel of 4 bytes in that line
|
||||
for pixel in line[..(4 * width)].chunks_exact_mut(4) {
|
||||
pixel[0] = b;
|
||||
pixel[1] = g;
|
||||
pixel[2] = r;
|
||||
pixel[3] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -133,10 +138,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -145,12 +150,11 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Null)?;
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
@ -166,12 +170,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
fn example_main() {
|
||||
match create_pipeline().and_then(main_loop) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
153
examples/src/bin/audio_multichannel_interleave.rs
Normal file
153
examples/src/bin/audio_multichannel_interleave.rs
Normal file
|
@ -0,0 +1,153 @@
|
|||
// This example demonstrates how to mix multiple audio
|
||||
// streams into a single output using the audiomixer element.
|
||||
// In this case, we're mixing 4 stereo streams into a single 8 channel output.
|
||||
|
||||
use gst::prelude::*;
|
||||
use std::env;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
const TRACKS: i32 = 4;
|
||||
|
||||
fn create_source_and_link(pipeline: &gst::Pipeline, mixer: &gst::Element, track_number: i32) {
|
||||
let freq = ((track_number + 1) * 1000) as f64;
|
||||
let audiosrc = gst::ElementFactory::make("audiotestsrc")
|
||||
.property("freq", freq)
|
||||
.property("num-buffers", 2000)
|
||||
.build()
|
||||
.unwrap();
|
||||
let caps = gst_audio::AudioCapsBuilder::new().channels(2).build();
|
||||
let capsfilter = gst::ElementFactory::make("capsfilter")
|
||||
.property("caps", &caps)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
pipeline.add_many([&audiosrc, &capsfilter]).unwrap();
|
||||
gst::Element::link_many([&audiosrc, &capsfilter]).unwrap();
|
||||
|
||||
let src_pad = capsfilter.static_pad("src").unwrap();
|
||||
let mixer_pad = mixer.request_pad_simple("sink_%u").unwrap();
|
||||
|
||||
// audiomixer expects a mix-matrix set on each input pad,
|
||||
// indicating which output channels our input should appear in.
|
||||
// Rows => input channels, columns => output channels.
|
||||
// Here each input channel will appear in exactly one output channel.
|
||||
let mut mix_matrix: Vec<Vec<f32>> = vec![];
|
||||
for i in 0..TRACKS {
|
||||
if i == track_number {
|
||||
mix_matrix.push(vec![1.0, 0.0]);
|
||||
mix_matrix.push(vec![0.0, 1.0]);
|
||||
} else {
|
||||
mix_matrix.push(vec![0.0, 0.0]);
|
||||
mix_matrix.push(vec![0.0, 0.0]);
|
||||
}
|
||||
}
|
||||
let mut audiomixer_config = gst_audio::AudioConverterConfig::new();
|
||||
audiomixer_config.set_mix_matrix(&mix_matrix);
|
||||
mixer_pad.set_property("converter-config", audiomixer_config);
|
||||
|
||||
src_pad.link(&mixer_pad).unwrap();
|
||||
}
|
||||
|
||||
fn example_main() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let args: Vec<_> = env::args().collect();
|
||||
let output_file = if args.len() == 2 {
|
||||
&args[1]
|
||||
} else {
|
||||
println!("Usage: audiomixer <output file>");
|
||||
std::process::exit(-1);
|
||||
};
|
||||
|
||||
let pipeline = gst::Pipeline::new();
|
||||
let audiomixer = gst::ElementFactory::make("audiomixer").build().unwrap();
|
||||
|
||||
// Using an arbitrary layout of 4 stereo pairs.
|
||||
let positions = [
|
||||
gst_audio::AudioChannelPosition::FrontLeft,
|
||||
gst_audio::AudioChannelPosition::FrontRight,
|
||||
gst_audio::AudioChannelPosition::RearLeft,
|
||||
gst_audio::AudioChannelPosition::RearRight,
|
||||
gst_audio::AudioChannelPosition::SideLeft,
|
||||
gst_audio::AudioChannelPosition::SideRight,
|
||||
gst_audio::AudioChannelPosition::TopFrontLeft,
|
||||
gst_audio::AudioChannelPosition::TopFrontRight,
|
||||
];
|
||||
|
||||
let mask = gst_audio::AudioChannelPosition::positions_to_mask(&positions, true).unwrap();
|
||||
let caps = gst_audio::AudioCapsBuilder::new()
|
||||
.channels(positions.len() as i32)
|
||||
.channel_mask(mask)
|
||||
.build();
|
||||
let capsfilter = gst::ElementFactory::make("capsfilter")
|
||||
.property("caps", &caps)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let audioconvert = gst::ElementFactory::make("audioconvert").build().unwrap();
|
||||
let audioresample = gst::ElementFactory::make("audioresample").build().unwrap();
|
||||
let wavenc = gst::ElementFactory::make("wavenc").build().unwrap();
|
||||
let sink = gst::ElementFactory::make("filesink")
|
||||
.property("location", output_file)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
pipeline
|
||||
.add_many([
|
||||
&audiomixer,
|
||||
&capsfilter,
|
||||
&audioconvert,
|
||||
&audioresample,
|
||||
&wavenc,
|
||||
&sink,
|
||||
])
|
||||
.unwrap();
|
||||
gst::Element::link_many([
|
||||
&audiomixer,
|
||||
&capsfilter,
|
||||
&audioconvert,
|
||||
&audioresample,
|
||||
&wavenc,
|
||||
&sink,
|
||||
])
|
||||
.unwrap();
|
||||
|
||||
for i in 0..TRACKS {
|
||||
create_source_and_link(&pipeline, &audiomixer, i);
|
||||
}
|
||||
|
||||
let bus = pipeline.bus().expect("Pipeline without bus");
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to start pipeline");
|
||||
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
eprintln!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
msg.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to change pipeline state to NULL");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
719
examples/src/bin/cairo_compositor.rs
Normal file
719
examples/src/bin/cairo_compositor.rs
Normal file
|
@ -0,0 +1,719 @@
|
|||
// This example demonstrates how to implement a custom compositor based on cairo.
|
||||
#![allow(clippy::non_send_fields_in_send_ty)]
|
||||
|
||||
use anyhow::{Context, Error};
|
||||
use gst::prelude::*;
|
||||
use gst_base::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
// Our custom compositor element is defined in this module.
|
||||
mod cairo_compositor {
|
||||
use gst_base::subclass::prelude::*;
|
||||
use gst_video::{prelude::*, subclass::prelude::*};
|
||||
|
||||
// In the imp submodule we include the actual implementation of the compositor.
|
||||
mod imp {
|
||||
use std::sync::Mutex;
|
||||
|
||||
use super::*;
|
||||
|
||||
// Settings of the compositor.
|
||||
#[derive(Clone)]
|
||||
struct Settings {
|
||||
background_color: u32,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
background_color: 0xff_00_00_00,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This is the private data of our compositor.
|
||||
#[derive(Default)]
|
||||
pub struct CairoCompositor {
|
||||
settings: Mutex<Settings>,
|
||||
}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data.
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for CairoCompositor {
|
||||
const NAME: &'static str = "CairoCompositor";
|
||||
type Type = super::CairoCompositor;
|
||||
type ParentType = gst_video::VideoAggregator;
|
||||
type Interfaces = (gst::ChildProxy,);
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods.
|
||||
impl ObjectImpl for CairoCompositor {
|
||||
// Specification of the compositor properties.
|
||||
// In this case a single property for configuring the background color of the
|
||||
// composition.
|
||||
fn properties() -> &'static [glib::ParamSpec] {
|
||||
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
PROPERTIES.get_or_init(|| {
|
||||
vec![glib::ParamSpecUInt::builder("background-color")
|
||||
.nick("Background Color")
|
||||
.blurb("Background color as 0xRRGGBB")
|
||||
.default_value(Settings::default().background_color)
|
||||
.build()]
|
||||
})
|
||||
}
|
||||
|
||||
// Called by the application whenever the value of a property should be changed.
|
||||
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
|
||||
match pspec.name() {
|
||||
"background-color" => {
|
||||
settings.background_color = value.get().unwrap();
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
}
|
||||
|
||||
// Called by the application whenever the value of a property should be retrieved.
|
||||
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
|
||||
match pspec.name() {
|
||||
"background-color" => settings.background_color.to_value(),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gst::Object virtual methods.
|
||||
impl GstObjectImpl for CairoCompositor {}
|
||||
|
||||
// Implementation of gst::Element virtual methods.
|
||||
impl ElementImpl for CairoCompositor {
|
||||
// The element specific metadata. This information is what is visible from
|
||||
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
|
||||
// after initial registration without having to load the plugin in memory.
|
||||
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
|
||||
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
Some(ELEMENT_METADATA.get_or_init(|| {
|
||||
gst::subclass::ElementMetadata::new(
|
||||
"Cairo Compositor",
|
||||
"Compositor/Video",
|
||||
"Cairo based compositor",
|
||||
"Sebastian Dröge <sebastian@centricular.com>",
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
fn pad_templates() -> &'static [gst::PadTemplate] {
|
||||
static PAD_TEMPLATES: std::sync::OnceLock<Vec<gst::PadTemplate>> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
PAD_TEMPLATES.get_or_init(|| {
|
||||
// Create pad templates for our sink and source pad. These are later used for
|
||||
// actually creating the pads and beforehand already provide information to
|
||||
// GStreamer about all possible pads that could exist for this type.
|
||||
|
||||
// On all pads we can only handle BGRx.
|
||||
let caps = gst_video::VideoCapsBuilder::new()
|
||||
.format(gst_video::VideoFormat::Bgrx)
|
||||
.pixel_aspect_ratio((1, 1).into())
|
||||
.build();
|
||||
|
||||
vec![
|
||||
// The src pad template must be named "src" for aggregator
|
||||
// and always be there.
|
||||
gst::PadTemplate::new(
|
||||
"src",
|
||||
gst::PadDirection::Src,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
)
|
||||
.unwrap(),
|
||||
// The sink pad template must be named "sink_%u" by default for aggregator
|
||||
// and be requested by the application.
|
||||
//
|
||||
// Also declare here that it should be a pad with our custom compositor pad
|
||||
// type that is defined further below.
|
||||
gst::PadTemplate::with_gtype(
|
||||
"sink_%u",
|
||||
gst::PadDirection::Sink,
|
||||
gst::PadPresence::Request,
|
||||
&caps,
|
||||
super::CairoCompositorPad::static_type(),
|
||||
)
|
||||
.unwrap(),
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
// Notify via the child proxy interface whenever a new pad is added or removed.
|
||||
fn request_new_pad(
|
||||
&self,
|
||||
templ: &gst::PadTemplate,
|
||||
name: Option<&str>,
|
||||
caps: Option<&gst::Caps>,
|
||||
) -> Option<gst::Pad> {
|
||||
let element = self.obj();
|
||||
let pad = self.parent_request_new_pad(templ, name, caps)?;
|
||||
element.child_added(&pad, &pad.name());
|
||||
Some(pad)
|
||||
}
|
||||
|
||||
fn release_pad(&self, pad: &gst::Pad) {
|
||||
let element = self.obj();
|
||||
element.child_removed(pad, &pad.name());
|
||||
self.parent_release_pad(pad);
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gst_base::Aggregator virtual methods.
|
||||
impl AggregatorImpl for CairoCompositor {
|
||||
// Called whenever a query arrives at the given sink pad of the compositor.
|
||||
fn sink_query(
|
||||
&self,
|
||||
aggregator_pad: &gst_base::AggregatorPad,
|
||||
query: &mut gst::QueryRef,
|
||||
) -> bool {
|
||||
use gst::QueryViewMut;
|
||||
|
||||
// We can accept any input caps that match the pad template. By default
|
||||
// videoaggregator only allows caps that have the same format as the output.
|
||||
match query.view_mut() {
|
||||
QueryViewMut::Caps(q) => {
|
||||
let caps = aggregator_pad.pad_template_caps();
|
||||
let filter = q.filter();
|
||||
|
||||
let caps = if let Some(filter) = filter {
|
||||
filter.intersect_with_mode(&caps, gst::CapsIntersectMode::First)
|
||||
} else {
|
||||
caps
|
||||
};
|
||||
|
||||
q.set_result(&caps);
|
||||
|
||||
true
|
||||
}
|
||||
QueryViewMut::AcceptCaps(q) => {
|
||||
let caps = q.caps();
|
||||
let template_caps = aggregator_pad.pad_template_caps();
|
||||
let res = caps.is_subset(&template_caps);
|
||||
q.set_result(res);
|
||||
|
||||
true
|
||||
}
|
||||
_ => self.parent_sink_query(aggregator_pad, query),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gst_video::VideoAggregator virtual methods.
|
||||
impl VideoAggregatorImpl for CairoCompositor {
|
||||
// Called by videoaggregator whenever the output format should be determined.
|
||||
fn find_best_format(
|
||||
&self,
|
||||
_downstream_caps: &gst::Caps,
|
||||
) -> Option<(gst_video::VideoInfo, bool)> {
|
||||
// Let videoaggregator select whatever format downstream wants.
|
||||
//
|
||||
// By default videoaggregator doesn't allow a different format than the input
|
||||
// format.
|
||||
None
|
||||
}
|
||||
|
||||
// Called whenever a new output frame should be produced. At this point, each pad has
|
||||
// either no frame queued up at all or the frame that should be used for this output
|
||||
// time.
|
||||
fn aggregate_frames(
|
||||
&self,
|
||||
token: &gst_video::subclass::AggregateFramesToken,
|
||||
outbuf: &mut gst::BufferRef,
|
||||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let element = self.obj();
|
||||
let pads = element.sink_pads();
|
||||
|
||||
// Map the output frame writable.
|
||||
let out_info = element.video_info().unwrap();
|
||||
let mut out_frame =
|
||||
gst_video::VideoFrameRef::from_buffer_ref_writable(outbuf, &out_info).unwrap();
|
||||
|
||||
// And then create a cairo context for drawing on the output frame.
|
||||
with_frame(&mut out_frame, |ctx| {
|
||||
let settings = self.settings.lock().unwrap().clone();
|
||||
|
||||
// First of all, clear the background.
|
||||
let bg = (
|
||||
((settings.background_color >> 16) & 0xff) as f64 / 255.0,
|
||||
((settings.background_color >> 8) & 0xff) as f64 / 255.0,
|
||||
(settings.background_color & 0xff) as f64 / 255.0,
|
||||
);
|
||||
ctx.set_operator(cairo::Operator::Source);
|
||||
ctx.set_source_rgb(bg.0, bg.1, bg.2);
|
||||
ctx.paint().unwrap();
|
||||
|
||||
ctx.set_operator(cairo::Operator::Over);
|
||||
|
||||
// Then for each pad (in zorder), draw it according to the current settings.
|
||||
for pad in pads {
|
||||
let pad = pad.downcast_ref::<CairoCompositorPad>().unwrap();
|
||||
|
||||
let settings = pad.imp().settings.lock().unwrap().clone();
|
||||
|
||||
if settings.alpha <= 0.0 || settings.scale <= 0.0 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let frame = match pad.prepared_frame(token) {
|
||||
Some(frame) => frame,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
ctx.save().unwrap();
|
||||
|
||||
ctx.translate(settings.xpos, settings.ypos);
|
||||
|
||||
ctx.scale(settings.scale, settings.scale);
|
||||
|
||||
ctx.translate(frame.width() as f64 / 2.0, frame.height() as f64 / 2.0);
|
||||
ctx.rotate(settings.rotate / 360.0 * 2.0 * std::f64::consts::PI);
|
||||
ctx.translate(
|
||||
-(frame.width() as f64 / 2.0),
|
||||
-(frame.height() as f64 / 2.0),
|
||||
);
|
||||
|
||||
paint_frame(ctx, &frame, settings.alpha);
|
||||
|
||||
ctx.restore().unwrap();
|
||||
}
|
||||
});
|
||||
|
||||
Ok(gst::FlowSuccess::Ok)
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gst::ChildProxy virtual methods.
|
||||
//
|
||||
// This allows accessing the pads and their properties from e.g. gst-launch.
|
||||
impl ChildProxyImpl for CairoCompositor {
|
||||
fn children_count(&self) -> u32 {
|
||||
let object = self.obj();
|
||||
object.num_pads() as u32
|
||||
}
|
||||
|
||||
fn child_by_name(&self, name: &str) -> Option<glib::Object> {
|
||||
let object = self.obj();
|
||||
object
|
||||
.pads()
|
||||
.into_iter()
|
||||
.find(|p| p.name() == name)
|
||||
.map(|p| p.upcast())
|
||||
}
|
||||
|
||||
fn child_by_index(&self, index: u32) -> Option<glib::Object> {
|
||||
let object = self.obj();
|
||||
object
|
||||
.pads()
|
||||
.into_iter()
|
||||
.nth(index as usize)
|
||||
.map(|p| p.upcast())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Creates a cairo context around the given video frame and then calls the closure to operate
|
||||
// on the cairo context. Ensures that no references to the video frame stay inside cairo.
|
||||
fn with_frame<F: FnOnce(&cairo::Context)>(
|
||||
frame: &mut gst_video::VideoFrameRef<&mut gst::BufferRef>,
|
||||
func: F,
|
||||
) {
|
||||
// SAFETY: This is the one and only surface reference and it is dropped at the end, meaning
|
||||
// nothing from cairo is referencing the frame data anymore.
|
||||
unsafe {
|
||||
use glib::translate::*;
|
||||
|
||||
let surface = cairo::ImageSurface::create_for_data_unsafe(
|
||||
frame.plane_data_mut(0).unwrap().as_mut_ptr(),
|
||||
cairo::Format::Rgb24,
|
||||
frame.width() as i32,
|
||||
frame.height() as i32,
|
||||
frame.plane_stride()[0],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let ctx = cairo::Context::new(&surface).unwrap();
|
||||
func(&ctx);
|
||||
drop(ctx);
|
||||
surface.finish();
|
||||
assert_eq!(
|
||||
cairo::ffi::cairo_surface_get_reference_count(surface.to_glib_none().0),
|
||||
1,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Paints the frame with the given alpha on the cairo context at the current origin.
|
||||
// Ensures that no references to the video frame stay inside cairo.
|
||||
fn paint_frame(
|
||||
ctx: &cairo::Context,
|
||||
frame: &gst_video::VideoFrameRef<&gst::BufferRef>,
|
||||
alpha: f64,
|
||||
) {
|
||||
// SAFETY: This is the one and only surface reference and it is dropped at the end, meaning
|
||||
// nothing from cairo is referencing the frame data anymore.
|
||||
//
|
||||
// Also nothing is ever writing to the surface from here.
|
||||
unsafe {
|
||||
use glib::translate::*;
|
||||
|
||||
let surface = cairo::ImageSurface::create_for_data_unsafe(
|
||||
frame.plane_data(0).unwrap().as_ptr() as *mut u8,
|
||||
cairo::Format::Rgb24,
|
||||
frame.width() as i32,
|
||||
frame.height() as i32,
|
||||
frame.plane_stride()[0],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
ctx.set_source_surface(&surface, 0.0, 0.0).unwrap();
|
||||
ctx.paint_with_alpha(alpha).unwrap();
|
||||
ctx.set_source_rgb(0.0, 0.0, 0.0);
|
||||
|
||||
assert_eq!(
|
||||
cairo::ffi::cairo_surface_get_reference_count(surface.to_glib_none().0),
|
||||
1,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// This here defines the public interface of our element and implements
|
||||
// the corresponding traits so that it behaves like any other gst::Element.
|
||||
glib::wrapper! {
|
||||
pub struct CairoCompositor(ObjectSubclass<imp::CairoCompositor>) @extends gst_video::VideoAggregator, gst_base::Aggregator, gst::Element, gst::Object, @implements gst::ChildProxy;
|
||||
}
|
||||
|
||||
impl CairoCompositor {
|
||||
// Creates a new instance of our compositor with the given name.
|
||||
pub fn new(name: Option<&str>) -> Self {
|
||||
glib::Object::builder().property("name", name).build()
|
||||
}
|
||||
}
|
||||
|
||||
// In the imp submodule we include the implementation of the pad subclass.
|
||||
//
|
||||
// This doesn't implement any additional logic but only provides properties for configuring the
|
||||
// appearance of the stream corresponding to this pad and the storage of the property values.
|
||||
mod imp_pad {
|
||||
use std::sync::Mutex;
|
||||
|
||||
use super::*;
|
||||
|
||||
// Settings of our pad.
|
||||
#[derive(Clone)]
|
||||
pub(super) struct Settings {
|
||||
pub(super) alpha: f64,
|
||||
pub(super) scale: f64,
|
||||
pub(super) rotate: f64,
|
||||
pub(super) xpos: f64,
|
||||
pub(super) ypos: f64,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
alpha: 1.0,
|
||||
scale: 1.0,
|
||||
rotate: 0.0,
|
||||
xpos: 0.0,
|
||||
ypos: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This is the private data of our pad.
|
||||
#[derive(Default)]
|
||||
pub struct CairoCompositorPad {
|
||||
pub(super) settings: Mutex<Settings>,
|
||||
}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data.
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for CairoCompositorPad {
|
||||
const NAME: &'static str = "CairoCompositorPad";
|
||||
type Type = super::CairoCompositorPad;
|
||||
type ParentType = gst_video::VideoAggregatorPad;
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods.
|
||||
impl ObjectImpl for CairoCompositorPad {
|
||||
// Specification of the compositor pad properties.
|
||||
// In this case there are various properties for defining the position and otherwise
|
||||
// the appearance of the stream corresponding to this pad.
|
||||
fn properties() -> &'static [glib::ParamSpec] {
|
||||
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
PROPERTIES.get_or_init(|| {
|
||||
vec![
|
||||
glib::ParamSpecDouble::builder("alpha")
|
||||
.nick("Alpha")
|
||||
.blurb("Alpha value of the input")
|
||||
.minimum(0.0)
|
||||
.maximum(1.0)
|
||||
.default_value(Settings::default().alpha)
|
||||
.build(),
|
||||
glib::ParamSpecDouble::builder("scale")
|
||||
.nick("Scale")
|
||||
.blurb("Scale factor of the input")
|
||||
.minimum(0.0)
|
||||
.maximum(f64::MAX)
|
||||
.default_value(Settings::default().scale)
|
||||
.build(),
|
||||
glib::ParamSpecDouble::builder("rotate")
|
||||
.nick("Rotate")
|
||||
.blurb("Rotation of the input")
|
||||
.minimum(0.0)
|
||||
.maximum(360.0)
|
||||
.default_value(Settings::default().rotate)
|
||||
.build(),
|
||||
glib::ParamSpecDouble::builder("xpos")
|
||||
.nick("X Position")
|
||||
.blurb("Horizontal position of the input")
|
||||
.minimum(0.0)
|
||||
.maximum(f64::MAX)
|
||||
.default_value(Settings::default().xpos)
|
||||
.build(),
|
||||
glib::ParamSpecDouble::builder("ypos")
|
||||
.nick("Y Position")
|
||||
.blurb("Vertical position of the input")
|
||||
.minimum(0.0)
|
||||
.maximum(f64::MAX)
|
||||
.default_value(Settings::default().ypos)
|
||||
.build(),
|
||||
]
|
||||
})
|
||||
}
|
||||
|
||||
// Called by the application whenever the value of a property should be changed.
|
||||
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
|
||||
let mut settings = self.settings.lock().unwrap();
|
||||
|
||||
match pspec.name() {
|
||||
"alpha" => {
|
||||
settings.alpha = value.get().unwrap();
|
||||
}
|
||||
"scale" => {
|
||||
settings.scale = value.get().unwrap();
|
||||
}
|
||||
"rotate" => {
|
||||
settings.rotate = value.get().unwrap();
|
||||
}
|
||||
"xpos" => {
|
||||
settings.xpos = value.get().unwrap();
|
||||
}
|
||||
"ypos" => {
|
||||
settings.ypos = value.get().unwrap();
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
}
|
||||
|
||||
// Called by the application whenever the value of a property should be retrieved.
|
||||
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
|
||||
let settings = self.settings.lock().unwrap();
|
||||
|
||||
match pspec.name() {
|
||||
"alpha" => settings.alpha.to_value(),
|
||||
"scale" => settings.scale.to_value(),
|
||||
"rotate" => settings.rotate.to_value(),
|
||||
"xpos" => settings.xpos.to_value(),
|
||||
"ypos" => settings.ypos.to_value(),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gst::Object virtual methods.
|
||||
impl GstObjectImpl for CairoCompositorPad {}
|
||||
|
||||
// Implementation of gst::Pad virtual methods.
|
||||
impl PadImpl for CairoCompositorPad {}
|
||||
|
||||
// Implementation of gst_base::AggregatorPad virtual methods.
|
||||
impl AggregatorPadImpl for CairoCompositorPad {}
|
||||
|
||||
// Implementation of gst_video::VideoAggregatorPad virtual methods.
|
||||
impl VideoAggregatorPadImpl for CairoCompositorPad {}
|
||||
}
|
||||
|
||||
// This here defines the public interface of our element and implements
|
||||
// the corresponding traits so that it behaves like any other gst::Pad.
|
||||
glib::wrapper! {
|
||||
pub struct CairoCompositorPad(ObjectSubclass<imp_pad::CairoCompositorPad>) @extends gst_video::VideoAggregatorPad, gst_base::AggregatorPad, gst::Pad, gst::Object;
|
||||
}
|
||||
}
|
||||
|
||||
fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
// Create our pipeline with the compositor and two input streams.
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src1 = gst::ElementFactory::make("videotestsrc")
|
||||
.property_from_str("pattern", "ball")
|
||||
.build()?;
|
||||
let src2 = gst::ElementFactory::make("videotestsrc")
|
||||
.property_from_str("pattern", "smpte")
|
||||
.build()?;
|
||||
let comp = cairo_compositor::CairoCompositor::new(None);
|
||||
let conv = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
|
||||
comp.set_property("background-color", 0xff_33_33_33u32);
|
||||
|
||||
pipeline.add_many([&src1, &src2, comp.upcast_ref(), &conv, &sink])?;
|
||||
|
||||
// Link everything together.
|
||||
src1.link_filtered(
|
||||
&comp,
|
||||
&gst::Caps::builder("video/x-raw")
|
||||
.field("width", 320i32)
|
||||
.field("height", 240i32)
|
||||
.build(),
|
||||
)
|
||||
.context("Linking source 1")?;
|
||||
src2.link_filtered(
|
||||
&comp,
|
||||
&gst::Caps::builder("video/x-raw")
|
||||
.field("width", 320i32)
|
||||
.field("height", 240i32)
|
||||
.build(),
|
||||
)
|
||||
.context("Linking source 2")?;
|
||||
comp.link_filtered(
|
||||
&conv,
|
||||
&gst::Caps::builder("video/x-raw")
|
||||
.field("width", 1280i32)
|
||||
.field("height", 720i32)
|
||||
.build(),
|
||||
)
|
||||
.context("Linking converter")?;
|
||||
conv.link(&sink).context("Linking sink")?;
|
||||
|
||||
// Change positions etc of both inputs based on a timer
|
||||
let xmax = 1280.0 - 320.0f64;
|
||||
let ymax = 720.0 - 240.0f64;
|
||||
let sink_0 = comp.static_pad("sink_0").unwrap();
|
||||
sink_0.set_property("xpos", 0.0f64);
|
||||
sink_0.set_property("ypos", 0.0f64);
|
||||
let sink_1 = comp.static_pad("sink_1").unwrap();
|
||||
sink_1.set_property("xpos", xmax);
|
||||
sink_1.set_property("ypos", ymax);
|
||||
|
||||
comp.set_emit_signals(true);
|
||||
comp.connect_samples_selected(move |_agg, _seg, pts, _dts, _dur, _info| {
|
||||
// Position and rotation period is 10s.
|
||||
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(10).nseconds()) as f64
|
||||
/ gst::ClockTime::from_seconds(10).nseconds() as f64;
|
||||
|
||||
let xpos = (1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) * xmax / 2.0;
|
||||
let ypos = (1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) * ymax / 2.0;
|
||||
|
||||
sink_0.set_property("xpos", xpos);
|
||||
sink_0.set_property("ypos", ypos);
|
||||
|
||||
let xpos = (1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) * xmax / 2.0;
|
||||
let ypos = (1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) * ymax / 2.0;
|
||||
|
||||
sink_1.set_property("xpos", xpos);
|
||||
sink_1.set_property("ypos", ypos);
|
||||
|
||||
sink_0.set_property("rotate", pos * 360.0);
|
||||
sink_1.set_property("rotate", 360.0 - pos * 360.0);
|
||||
|
||||
// Alpha period is 2s.
|
||||
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(2).nseconds()) as f64
|
||||
/ gst::ClockTime::from_seconds(2).nseconds() as f64;
|
||||
sink_0.set_property(
|
||||
"alpha",
|
||||
(1.0 + f64::sin(2.0 * std::f64::consts::PI * pos)) / 2.0,
|
||||
);
|
||||
sink_1.set_property(
|
||||
"alpha",
|
||||
(1.0 + f64::cos(2.0 * std::f64::consts::PI * pos)) / 2.0,
|
||||
);
|
||||
|
||||
// Scale period is 20s.
|
||||
let pos = (pts.unwrap().nseconds() % gst::ClockTime::from_seconds(20).nseconds()) as f64
|
||||
/ gst::ClockTime::from_seconds(20).nseconds() as f64;
|
||||
sink_0.set_property("scale", pos);
|
||||
sink_1.set_property("scale", 1.0 - pos);
|
||||
});
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
// Start the pipeline and collect messages from the bus until an error or EOS.
|
||||
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
let mut bus_stream = bus.stream();
|
||||
|
||||
let main_context = glib::MainContext::default();
|
||||
|
||||
// Storage for any error so we can report it later.
|
||||
let mut error = None;
|
||||
main_context.block_on(async {
|
||||
use futures::prelude::*;
|
||||
|
||||
while let Some(msg) = bus_stream.next().await {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
error = Some(anyhow::anyhow!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
));
|
||||
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// In case of error, report to the caller.
|
||||
if let Some(error) = error {
|
||||
let _ = pipeline.set_state(gst::State::Null);
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn example_main() -> Result<(), Error> {
|
||||
create_pipeline().and_then(main_loop)
|
||||
}
|
||||
|
||||
fn main() -> Result<(), Error> {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically).
|
||||
examples_common::run(example_main)
|
||||
}
|
|
@ -5,7 +5,6 @@
|
|||
// is sent on the sink pad, we expect to see it emerge on the other side when
|
||||
// the data in front of it has exited.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
|
@ -22,7 +21,7 @@ impl ExampleCustomEvent {
|
|||
#[allow(clippy::new_ret_no_self)]
|
||||
pub fn new(send_eos: bool) -> gst::Event {
|
||||
let s = gst::Structure::builder(Self::EVENT_NAME)
|
||||
.field("send_eos", &send_eos)
|
||||
.field("send_eos", send_eos)
|
||||
.build();
|
||||
gst::event::CustomDownstream::new(s)
|
||||
}
|
||||
|
@ -30,12 +29,12 @@ impl ExampleCustomEvent {
|
|||
pub fn parse(ev: &gst::EventRef) -> Option<ExampleCustomEvent> {
|
||||
match ev.view() {
|
||||
gst::EventView::CustomDownstream(e) => {
|
||||
let s = match e.get_structure() {
|
||||
Some(s) if s.get_name() == Self::EVENT_NAME => s,
|
||||
let s = match e.structure() {
|
||||
Some(s) if s.name() == Self::EVENT_NAME => s,
|
||||
_ => return None, // No structure in this event, or the name didn't match
|
||||
};
|
||||
|
||||
let send_eos = s.get_some::<bool>("send_eos").unwrap();
|
||||
let send_eos = s.get::<bool>("send_eos").unwrap();
|
||||
Some(ExampleCustomEvent { send_eos })
|
||||
}
|
||||
_ => None, // Not a custom event
|
||||
|
@ -49,19 +48,19 @@ fn example_main() {
|
|||
let main_loop = glib::MainLoop::new(None, false);
|
||||
|
||||
// This creates a pipeline by parsing the gst-launch pipeline syntax.
|
||||
let pipeline = gst::parse_launch(
|
||||
let pipeline = gst::parse::launch(
|
||||
"audiotestsrc name=src ! queue max-size-time=2000000000 ! fakesink name=sink sync=true",
|
||||
)
|
||||
.unwrap();
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
|
||||
|
||||
let sink = pipeline.get_by_name("sink").unwrap();
|
||||
let sinkpad = sink.get_static_pad("sink").unwrap();
|
||||
let sink = pipeline.by_name("sink").unwrap();
|
||||
let sinkpad = sink.static_pad("sink").unwrap();
|
||||
|
||||
// Need to move a new reference into the closure.
|
||||
// !!ATTENTION!!:
|
||||
|
@ -76,31 +75,33 @@ fn example_main() {
|
|||
// Add a pad probe on the sink pad and catch the custom event we sent, then send
|
||||
// an EOS event on the pipeline.
|
||||
sinkpad.add_probe(gst::PadProbeType::EVENT_DOWNSTREAM, move |_, probe_info| {
|
||||
match probe_info.data {
|
||||
Some(gst::PadProbeData::Event(ref ev))
|
||||
if ev.get_type() == gst::EventType::CustomDownstream =>
|
||||
{
|
||||
if let Some(custom_event) = ExampleCustomEvent::parse(ev) {
|
||||
if let Some(pipeline) = pipeline_weak.upgrade() {
|
||||
if custom_event.send_eos {
|
||||
/* Send EOS event to shut down the pipeline, but from an async callback, as we're
|
||||
* in a pad probe blocking the stream thread here... */
|
||||
println!("Got custom event with send_eos=true. Sending EOS");
|
||||
let ev = gst::event::Eos::new();
|
||||
let pipeline_weak = pipeline_weak.clone();
|
||||
pipeline.call_async(move |_| {
|
||||
if let Some(pipeline) = pipeline_weak.upgrade() {
|
||||
pipeline.send_event(ev);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
println!("Got custom event, with send_eos=false. Ignoring");
|
||||
}
|
||||
}
|
||||
let Some(event) = probe_info.event() else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
let Some(custom_event) = ExampleCustomEvent::parse(event) else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
let Some(pipeline) = pipeline_weak.upgrade() else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
if custom_event.send_eos {
|
||||
/* Send EOS event to shut down the pipeline, but from an async callback, as we're
|
||||
* in a pad probe blocking the stream thread here... */
|
||||
println!("Got custom event with send_eos=true. Sending EOS");
|
||||
let ev = gst::event::Eos::new();
|
||||
let pipeline_weak = pipeline_weak.clone();
|
||||
pipeline.call_async(move |_| {
|
||||
if let Some(pipeline) = pipeline_weak.upgrade() {
|
||||
pipeline.send_event(ev);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
});
|
||||
} else {
|
||||
println!("Got custom event, with send_eos=false. Ignoring");
|
||||
}
|
||||
|
||||
gst::PadProbeReturn::Ok
|
||||
});
|
||||
|
||||
|
@ -114,56 +115,53 @@ fn example_main() {
|
|||
glib::timeout_add_seconds(2 + i as u32, move || {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return glib::Continue(false),
|
||||
let Some(pipeline) = pipeline_weak.upgrade() else {
|
||||
return glib::ControlFlow::Break;
|
||||
};
|
||||
println!(
|
||||
"Sending custom event to the pipeline with send_eos={}",
|
||||
send_eos
|
||||
);
|
||||
println!("Sending custom event to the pipeline with send_eos={send_eos}");
|
||||
let ev = ExampleCustomEvent::new(*send_eos);
|
||||
if !pipeline.send_event(ev) {
|
||||
println!("Warning: Failed to send custom event");
|
||||
}
|
||||
// Remove this handler, the pipeline will shutdown once our pad probe catches the custom
|
||||
// event and sends EOS
|
||||
glib::Continue(false)
|
||||
glib::ControlFlow::Break
|
||||
});
|
||||
}
|
||||
|
||||
let main_loop_clone = main_loop.clone();
|
||||
// This sets the bus's signal handler (don't be mislead by the "add", there can only be one).
|
||||
// Every message from the bus is passed through this function. Its returnvalue determines
|
||||
// whether the handler wants to be called again. If glib::Continue(false) is returned, the
|
||||
// whether the handler wants to be called again. If glib::ControlFlow::Break is returned, the
|
||||
// handler is removed and will never be called again. The mainloop still runs though.
|
||||
bus.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
let _bus_watch = bus
|
||||
.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
println!("received eos");
|
||||
// An EndOfStream event was sent to the pipeline, so we tell our main loop
|
||||
// to stop execution here.
|
||||
main_loop.quit()
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
println!("received eos");
|
||||
// An EndOfStream event was sent to the pipeline, so we tell our main loop
|
||||
// to stop execution here.
|
||||
main_loop.quit()
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
// Tell the mainloop to continue executing this callback.
|
||||
glib::Continue(true)
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
// Tell the mainloop to continue executing this callback.
|
||||
glib::ControlFlow::Continue
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
|
||||
// Operate GStreamer's bus, facilitating GLib's mainloop here.
|
||||
// This function call will block until you tell the mainloop to quit
|
||||
|
@ -173,15 +171,10 @@ fn example_main() {
|
|||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
// Remove the watch function from the bus.
|
||||
// Again: There can always only be one watch function.
|
||||
// Thus we don't have to tell him which function to remove.
|
||||
bus.remove_watch().unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -2,24 +2,20 @@
|
|||
//
|
||||
// It simply attaches a GstMeta with a Rust String to buffers that are passed into
|
||||
// an appsrc and retrieves them again from an appsink.
|
||||
#![allow(clippy::non_send_fields_in_send_ty)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_app as gst_app;
|
||||
use gst::{element_error, prelude::*};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
mod custom_meta {
|
||||
use gst::gst_sys;
|
||||
use std::{fmt, mem};
|
||||
|
||||
use gst::prelude::*;
|
||||
use std::fmt;
|
||||
use std::ptr;
|
||||
|
||||
// Public Rust type for the custom meta.
|
||||
#[repr(C)]
|
||||
#[repr(transparent)]
|
||||
pub struct CustomMeta(imp::CustomMeta);
|
||||
|
||||
// Metas must be Send+Sync.
|
||||
|
@ -33,25 +29,23 @@ mod custom_meta {
|
|||
label: String,
|
||||
) -> gst::MetaRefMut<Self, gst::meta::Standalone> {
|
||||
unsafe {
|
||||
// First add it: this will store an empty label via custom_meta_init().
|
||||
let meta = gst_sys::gst_buffer_add_meta(
|
||||
// Manually dropping because gst_buffer_add_meta() takes ownership of the
|
||||
// content of the struct.
|
||||
let mut params = mem::ManuallyDrop::new(imp::CustomMetaParams { label });
|
||||
|
||||
// The label is passed through via the params to custom_meta_init().
|
||||
let meta = gst::ffi::gst_buffer_add_meta(
|
||||
buffer.as_mut_ptr(),
|
||||
imp::custom_meta_get_info(),
|
||||
ptr::null_mut(),
|
||||
&mut *params as *mut imp::CustomMetaParams as glib::ffi::gpointer,
|
||||
) as *mut imp::CustomMeta;
|
||||
|
||||
// Then actually set the label.
|
||||
{
|
||||
let meta = &mut *meta;
|
||||
meta.label = label;
|
||||
}
|
||||
|
||||
Self::from_mut_ptr(buffer, meta)
|
||||
}
|
||||
}
|
||||
|
||||
// Retrieve the stored label.
|
||||
pub fn get_label(&self) -> &str {
|
||||
pub fn label(&self) -> &str {
|
||||
self.0.label.as_str()
|
||||
}
|
||||
}
|
||||
|
@ -60,7 +54,7 @@ mod custom_meta {
|
|||
unsafe impl MetaAPI for CustomMeta {
|
||||
type GstType = imp::CustomMeta;
|
||||
|
||||
fn get_meta_api() -> glib::Type {
|
||||
fn meta_api() -> glib::Type {
|
||||
imp::custom_meta_api_get_type()
|
||||
}
|
||||
}
|
||||
|
@ -68,64 +62,68 @@ mod custom_meta {
|
|||
impl fmt::Debug for CustomMeta {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
f.debug_struct("CustomMeta")
|
||||
.field("label", &self.get_label())
|
||||
.field("label", &self.label())
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
// Actual unsafe implementation of the meta.
|
||||
mod imp {
|
||||
use glib::glib_sys;
|
||||
use std::{mem, ptr};
|
||||
|
||||
use glib::translate::*;
|
||||
use gst::gst_sys;
|
||||
use once_cell::sync::Lazy;
|
||||
use std::mem;
|
||||
use std::ptr;
|
||||
|
||||
pub(super) struct CustomMetaParams {
|
||||
pub label: String,
|
||||
}
|
||||
|
||||
// This is the C type that is actually stored as meta inside the buffers.
|
||||
#[repr(C)]
|
||||
pub struct CustomMeta {
|
||||
parent: gst_sys::GstMeta,
|
||||
parent: gst::ffi::GstMeta,
|
||||
pub(super) label: String,
|
||||
}
|
||||
|
||||
// Function to register the meta API and get a type back.
|
||||
pub(super) fn custom_meta_api_get_type() -> glib::Type {
|
||||
static TYPE: Lazy<glib::Type> = Lazy::new(|| unsafe {
|
||||
let t = from_glib(gst_sys::gst_meta_api_type_register(
|
||||
static TYPE: std::sync::OnceLock<glib::Type> = std::sync::OnceLock::new();
|
||||
|
||||
*TYPE.get_or_init(|| unsafe {
|
||||
let t = glib::Type::from_glib(gst::ffi::gst_meta_api_type_register(
|
||||
b"MyCustomMetaAPI\0".as_ptr() as *const _,
|
||||
// We provide no tags here as our meta is just a label and does
|
||||
// not refer to any specific aspect of the buffer
|
||||
// not refer to any specific aspect of the buffer.
|
||||
[ptr::null::<std::os::raw::c_char>()].as_ptr() as *mut *const _,
|
||||
));
|
||||
|
||||
assert_ne!(t, glib::Type::Invalid);
|
||||
assert_ne!(t, glib::Type::INVALID);
|
||||
|
||||
t
|
||||
});
|
||||
|
||||
*TYPE
|
||||
})
|
||||
}
|
||||
|
||||
// Initialization function for our meta. This needs to ensure all fields are correctly
|
||||
// initialized. They will contain random memory before.
|
||||
unsafe extern "C" fn custom_meta_init(
|
||||
meta: *mut gst_sys::GstMeta,
|
||||
_params: glib_sys::gpointer,
|
||||
_buffer: *mut gst_sys::GstBuffer,
|
||||
) -> glib_sys::gboolean {
|
||||
meta: *mut gst::ffi::GstMeta,
|
||||
params: glib::ffi::gpointer,
|
||||
_buffer: *mut gst::ffi::GstBuffer,
|
||||
) -> glib::ffi::gboolean {
|
||||
assert!(!params.is_null());
|
||||
|
||||
let meta = &mut *(meta as *mut CustomMeta);
|
||||
let params = ptr::read(params as *const CustomMetaParams);
|
||||
|
||||
// Need to initialize all our fields correctly here
|
||||
ptr::write(&mut meta.label, String::new());
|
||||
// Need to initialize all our fields correctly here.
|
||||
ptr::write(&mut meta.label, params.label);
|
||||
|
||||
true.to_glib()
|
||||
true.into_glib()
|
||||
}
|
||||
|
||||
// Free function for our meta. This needs to free/drop all memory we allocated.
|
||||
unsafe extern "C" fn custom_meta_free(
|
||||
meta: *mut gst_sys::GstMeta,
|
||||
_buffer: *mut gst_sys::GstBuffer,
|
||||
meta: *mut gst::ffi::GstMeta,
|
||||
_buffer: *mut gst::ffi::GstBuffer,
|
||||
) {
|
||||
let meta = &mut *(meta as *mut CustomMeta);
|
||||
|
||||
|
@ -137,42 +135,45 @@ mod custom_meta {
|
|||
// in a way that is compatible with the transformation type. In this case we just always
|
||||
// copy it over.
|
||||
unsafe extern "C" fn custom_meta_transform(
|
||||
dest: *mut gst_sys::GstBuffer,
|
||||
meta: *mut gst_sys::GstMeta,
|
||||
_buffer: *mut gst_sys::GstBuffer,
|
||||
_type_: glib_sys::GQuark,
|
||||
_data: glib_sys::gpointer,
|
||||
) -> glib_sys::gboolean {
|
||||
let meta = &mut *(meta as *mut CustomMeta);
|
||||
dest: *mut gst::ffi::GstBuffer,
|
||||
meta: *mut gst::ffi::GstMeta,
|
||||
_buffer: *mut gst::ffi::GstBuffer,
|
||||
_type_: glib::ffi::GQuark,
|
||||
_data: glib::ffi::gpointer,
|
||||
) -> glib::ffi::gboolean {
|
||||
let meta = &*(meta as *mut CustomMeta);
|
||||
|
||||
// We simply copy over our meta here. Other metas might have to look at the type
|
||||
// and do things conditional on that, or even just drop the meta.
|
||||
super::CustomMeta::add(gst::BufferRef::from_mut_ptr(dest), meta.label.clone());
|
||||
|
||||
true.to_glib()
|
||||
true.into_glib()
|
||||
}
|
||||
|
||||
// Register the meta itself with its functions.
|
||||
pub(super) fn custom_meta_get_info() -> *const gst_sys::GstMetaInfo {
|
||||
struct MetaInfo(ptr::NonNull<gst_sys::GstMetaInfo>);
|
||||
pub(super) fn custom_meta_get_info() -> *const gst::ffi::GstMetaInfo {
|
||||
struct MetaInfo(ptr::NonNull<gst::ffi::GstMetaInfo>);
|
||||
unsafe impl Send for MetaInfo {}
|
||||
unsafe impl Sync for MetaInfo {}
|
||||
|
||||
static META_INFO: Lazy<MetaInfo> = Lazy::new(|| unsafe {
|
||||
MetaInfo(
|
||||
ptr::NonNull::new(gst_sys::gst_meta_register(
|
||||
custom_meta_api_get_type().to_glib(),
|
||||
b"MyCustomMeta\0".as_ptr() as *const _,
|
||||
mem::size_of::<CustomMeta>(),
|
||||
Some(custom_meta_init),
|
||||
Some(custom_meta_free),
|
||||
Some(custom_meta_transform),
|
||||
) as *mut gst_sys::GstMetaInfo)
|
||||
.expect("Failed to register meta API"),
|
||||
)
|
||||
});
|
||||
static META_INFO: std::sync::OnceLock<MetaInfo> = std::sync::OnceLock::new();
|
||||
|
||||
META_INFO.0.as_ptr()
|
||||
META_INFO
|
||||
.get_or_init(|| unsafe {
|
||||
MetaInfo(
|
||||
ptr::NonNull::new(gst::ffi::gst_meta_register(
|
||||
custom_meta_api_get_type().into_glib(),
|
||||
b"MyCustomMeta\0".as_ptr() as *const _,
|
||||
mem::size_of::<CustomMeta>(),
|
||||
Some(custom_meta_init),
|
||||
Some(custom_meta_free),
|
||||
Some(custom_meta_transform),
|
||||
) as *mut gst::ffi::GstMetaInfo)
|
||||
.expect("Failed to register meta API"),
|
||||
)
|
||||
})
|
||||
.0
|
||||
.as_ptr()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -181,15 +182,9 @@ fn example_main() {
|
|||
gst::init().unwrap();
|
||||
|
||||
// This creates a pipeline with appsrc and appsink.
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let appsrc = gst::ElementFactory::make("appsrc", None)
|
||||
.unwrap()
|
||||
.downcast::<gst_app::AppSrc>()
|
||||
.unwrap();
|
||||
let appsink = gst::ElementFactory::make("appsink", None)
|
||||
.unwrap()
|
||||
.downcast::<gst_app::AppSink>()
|
||||
.unwrap();
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let appsrc = gst_app::AppSrc::builder().build();
|
||||
let appsink = gst_app::AppSink::builder().build();
|
||||
|
||||
pipeline.add(&appsrc).unwrap();
|
||||
pipeline.add(&appsink).unwrap();
|
||||
|
@ -207,13 +202,13 @@ fn example_main() {
|
|||
return;
|
||||
}
|
||||
|
||||
println!("Producing buffer {}", i);
|
||||
println!("Producing buffer {i}");
|
||||
|
||||
// Add a custom meta with a label to this buffer.
|
||||
let mut buffer = gst::Buffer::new();
|
||||
{
|
||||
let buffer = buffer.get_mut().unwrap();
|
||||
custom_meta::CustomMeta::add(buffer, format!("This is buffer {}", i));
|
||||
custom_meta::CustomMeta::add(buffer, format!("This is buffer {i}"));
|
||||
}
|
||||
|
||||
i += 1;
|
||||
|
@ -232,8 +227,8 @@ fn example_main() {
|
|||
.new_sample(|appsink| {
|
||||
// Pull the sample in question out of the appsink's buffer.
|
||||
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
|
||||
let buffer = sample.get_buffer().ok_or_else(|| {
|
||||
gst_element_error!(
|
||||
let buffer = sample.buffer().ok_or_else(|| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get buffer from appsink")
|
||||
|
@ -244,9 +239,9 @@ fn example_main() {
|
|||
|
||||
// Retrieve the custom meta from the buffer and print it.
|
||||
let meta = buffer
|
||||
.get_meta::<custom_meta::CustomMeta>()
|
||||
.meta::<custom_meta::CustomMeta>()
|
||||
.expect("No custom meta found");
|
||||
println!("Got buffer with label: {}", meta.get_label());
|
||||
println!("Got buffer with label: {}", meta.label());
|
||||
|
||||
Ok(gst::FlowSuccess::Ok)
|
||||
})
|
||||
|
@ -260,11 +255,11 @@ fn example_main() {
|
|||
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
// And run until EOS or an error happened.
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -272,9 +267,9 @@ fn example_main() {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
@ -289,7 +284,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically).
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
364
examples/src/bin/d3d11videosink.rs
Normal file
364
examples/src/bin/d3d11videosink.rs
Normal file
|
@ -0,0 +1,364 @@
|
|||
// This example demonstrates the use of the d3d11videosink's "present"
|
||||
// signal and the use of Direct2D/DirectWrite APIs in Rust.
|
||||
//
|
||||
// Application can perform various hardware-accelerated 2D graphics operation
|
||||
// (e.g., like cairo can support) and text rendering via the Windows APIs.
|
||||
// In this example, 2D graphics operation and text rendering will happen
|
||||
// directly to the on the DXGI swapchain's backbuffer via Windows API in
|
||||
// strictly zero-copy manner
|
||||
|
||||
use std::{
|
||||
collections::VecDeque,
|
||||
sync::{Arc, Mutex},
|
||||
time::SystemTime,
|
||||
};
|
||||
|
||||
use gst::{glib, prelude::*};
|
||||
use windows::{
|
||||
core::*,
|
||||
Win32::Graphics::{
|
||||
Direct2D::{Common::*, *},
|
||||
Direct3D11::*,
|
||||
DirectWrite::*,
|
||||
Dxgi::{Common::*, *},
|
||||
},
|
||||
};
|
||||
|
||||
struct OverlayContext {
|
||||
d2d_factory: ID2D1Factory,
|
||||
dwrite_factory: IDWriteFactory,
|
||||
text_format: IDWriteTextFormat,
|
||||
texture_desc: D3D11_TEXTURE2D_DESC,
|
||||
text_layout: Option<IDWriteTextLayout>,
|
||||
timestamp_queue: VecDeque<SystemTime>,
|
||||
avg_fps: f32,
|
||||
display_fps: f32,
|
||||
font_size: f32,
|
||||
}
|
||||
|
||||
fn create_overlay_context() -> Arc<Mutex<OverlayContext>> {
|
||||
// Lots of DirectX APIs are marked as unsafe but the below operations
|
||||
// are not expected to be failed unless GPU hang or device remove condition
|
||||
// happens
|
||||
let d2d_factory = unsafe {
|
||||
D2D1CreateFactory::<ID2D1Factory>(D2D1_FACTORY_TYPE_MULTI_THREADED, None).unwrap()
|
||||
};
|
||||
let dwrite_factory =
|
||||
unsafe { DWriteCreateFactory::<IDWriteFactory>(DWRITE_FACTORY_TYPE_SHARED).unwrap() };
|
||||
|
||||
// Font size can be updated later
|
||||
let text_format = unsafe {
|
||||
dwrite_factory
|
||||
.CreateTextFormat(
|
||||
w!("Consolas"),
|
||||
None,
|
||||
DWRITE_FONT_WEIGHT_REGULAR,
|
||||
DWRITE_FONT_STYLE_NORMAL,
|
||||
DWRITE_FONT_STRETCH_NORMAL,
|
||||
12f32,
|
||||
w!("en-us"),
|
||||
)
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
Arc::new(Mutex::new(OverlayContext {
|
||||
d2d_factory,
|
||||
dwrite_factory,
|
||||
text_format,
|
||||
texture_desc: D3D11_TEXTURE2D_DESC::default(),
|
||||
text_layout: None,
|
||||
timestamp_queue: VecDeque::with_capacity(10),
|
||||
avg_fps: 0f32,
|
||||
display_fps: 0f32,
|
||||
font_size: 12f32,
|
||||
}))
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
gst::init().unwrap();
|
||||
|
||||
let args: Vec<String> = std::env::args().collect();
|
||||
|
||||
if args.len() != 2 {
|
||||
println!("URI must be specified");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let main_loop = glib::MainLoop::new(None, false);
|
||||
|
||||
let overlay_context = create_overlay_context();
|
||||
let overlay_context_weak = Arc::downgrade(&overlay_context);
|
||||
// Needs BGRA or RGBA swapchain for D2D interop,
|
||||
// and "present" signal must be explicitly enabled
|
||||
let videosink = gst::ElementFactory::make("d3d11videosink")
|
||||
.property("emit-present", true)
|
||||
.property_from_str("display-format", "DXGI_FORMAT_B8G8R8A8_UNORM")
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
// Listen "present" signal and draw overlay from the callback
|
||||
// Required operations here:
|
||||
// 1) Gets IDXGISurface and ID3D11Texture2D interface from
|
||||
// given ID3D11RenderTargetView COM object
|
||||
// - ID3D11Texture2D: To get texture resolution
|
||||
// - IDXGISurface: To create Direct2D render target
|
||||
// 2) Creates or reuses IDWriteTextLayout interface
|
||||
// - This object represents text layout we want to draw on render target
|
||||
// 3) Draw rectangle (overlay background) and text on render target
|
||||
//
|
||||
// NOTE: ID2D1Factory, IDWriteFactory, IDWriteTextFormat, and
|
||||
// IDWriteTextLayout objects are device-independent. Which can be created
|
||||
// earlier instead of creating them in the callback.
|
||||
// But ID2D1RenderTarget is a device-dependent resource.
|
||||
// The client should not hold the d2d render target object outside of
|
||||
// this callback scope because the resource must be cleared before
|
||||
// releasing/resizing DXGI swapchain.
|
||||
videosink.connect_closure(
|
||||
"present",
|
||||
false,
|
||||
glib::closure!(move |_sink: &gst::Element,
|
||||
_device: &gst::Object,
|
||||
rtv_raw: glib::Pointer| {
|
||||
let overlay_context = overlay_context_weak.upgrade().unwrap();
|
||||
let mut context = overlay_context.lock().unwrap();
|
||||
let dwrite_factory = context.dwrite_factory.clone();
|
||||
let d2d_factory = context.d2d_factory.clone();
|
||||
|
||||
// SAFETY: transmute() below is clearly unsafe operation here.
|
||||
// Regarding the other part of the below block, all DirectX
|
||||
// APIs are marked as unsafe, except for cast.
|
||||
//
|
||||
// In theory, all the Direct3D/Direct2D APIs could fail for
|
||||
// some reasons (it's hardware!), but in practice, it's very unexpected
|
||||
// situation and any of failure below would mean we are doing
|
||||
// something in wrong way or driver bug or so.
|
||||
unsafe {
|
||||
let rtv = ID3D11RenderTargetView::from_raw_borrowed(&rtv_raw).unwrap();
|
||||
let resource = rtv.GetResource().unwrap();
|
||||
|
||||
let texture = resource.cast::<ID3D11Texture2D>().unwrap();
|
||||
let desc = {
|
||||
let mut desc = D3D11_TEXTURE2D_DESC::default();
|
||||
texture.GetDesc(&mut desc);
|
||||
desc
|
||||
};
|
||||
|
||||
// Window size was updated, creates new text layout
|
||||
let calculate_font_size = if desc != context.texture_desc {
|
||||
context.texture_desc = desc;
|
||||
context.text_layout = None;
|
||||
true
|
||||
} else {
|
||||
false
|
||||
};
|
||||
|
||||
// New fps, creates new layout
|
||||
if context.avg_fps != context.display_fps {
|
||||
context.display_fps = context.avg_fps;
|
||||
context.text_layout = None;
|
||||
}
|
||||
|
||||
if context.text_layout.is_none() {
|
||||
let overlay_string = format!("TextOverlay, Fps {:.1}", context.display_fps);
|
||||
let overlay_wstring = overlay_string.encode_utf16().collect::<Vec<_>>();
|
||||
let layout = dwrite_factory
|
||||
.CreateTextLayout(
|
||||
&overlay_wstring,
|
||||
&context.text_format,
|
||||
desc.Width as f32,
|
||||
desc.Height as f32 / 5f32,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Adjust alignment
|
||||
layout
|
||||
.SetTextAlignment(DWRITE_TEXT_ALIGNMENT_CENTER)
|
||||
.unwrap();
|
||||
layout
|
||||
.SetParagraphAlignment(DWRITE_PARAGRAPH_ALIGNMENT_CENTER)
|
||||
.unwrap();
|
||||
|
||||
// XXX: This is not an efficient approach.
|
||||
// The font size can be pre-calculated for a pre-defined
|
||||
// window size and string length
|
||||
let mut range = DWRITE_TEXT_RANGE {
|
||||
startPosition: 0u32,
|
||||
length: overlay_wstring.len() as u32,
|
||||
};
|
||||
|
||||
if calculate_font_size {
|
||||
let mut font_size = 12f32;
|
||||
let mut was_decreased = false;
|
||||
|
||||
loop {
|
||||
let mut metrics = DWRITE_TEXT_METRICS::default();
|
||||
layout.GetMetrics(&mut metrics).unwrap();
|
||||
layout
|
||||
.GetFontSize(0, &mut font_size, Some(&mut range))
|
||||
.unwrap();
|
||||
|
||||
if metrics.widthIncludingTrailingWhitespace >= desc.Width as f32 {
|
||||
if font_size > 1f32 {
|
||||
font_size -= 0.5f32;
|
||||
was_decreased = true;
|
||||
layout.SetFontSize(font_size, range).unwrap();
|
||||
continue;
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
if was_decreased {
|
||||
break;
|
||||
}
|
||||
|
||||
if metrics.widthIncludingTrailingWhitespace < desc.Width as f32 {
|
||||
if metrics.widthIncludingTrailingWhitespace
|
||||
>= desc.Width as f32 * 0.7f32
|
||||
{
|
||||
break;
|
||||
}
|
||||
|
||||
font_size += 0.5f32;
|
||||
layout.SetFontSize(font_size, range).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
context.font_size = font_size;
|
||||
} else {
|
||||
layout.SetFontSize(context.font_size, range).unwrap();
|
||||
}
|
||||
|
||||
context.text_layout = Some(layout);
|
||||
};
|
||||
|
||||
let dxgi_surf = resource.cast::<IDXGISurface>().unwrap();
|
||||
let render_target = d2d_factory
|
||||
.CreateDxgiSurfaceRenderTarget(
|
||||
&dxgi_surf,
|
||||
&D2D1_RENDER_TARGET_PROPERTIES {
|
||||
r#type: D2D1_RENDER_TARGET_TYPE_DEFAULT,
|
||||
pixelFormat: D2D1_PIXEL_FORMAT {
|
||||
format: DXGI_FORMAT_B8G8R8A8_UNORM,
|
||||
alphaMode: D2D1_ALPHA_MODE_PREMULTIPLIED,
|
||||
},
|
||||
// zero means default DPI
|
||||
dpiX: 0f32,
|
||||
dpiY: 0f32,
|
||||
usage: D2D1_RENDER_TARGET_USAGE_NONE,
|
||||
minLevel: D2D1_FEATURE_LEVEL_DEFAULT,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
let text_brush = render_target
|
||||
.CreateSolidColorBrush(
|
||||
&D2D1_COLOR_F {
|
||||
r: 0f32,
|
||||
g: 0f32,
|
||||
b: 0f32,
|
||||
a: 1f32,
|
||||
},
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
let overlay_brush = render_target
|
||||
.CreateSolidColorBrush(
|
||||
&D2D1_COLOR_F {
|
||||
r: 0f32,
|
||||
g: 0.5f32,
|
||||
b: 0.5f32,
|
||||
a: 0.3f32,
|
||||
},
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
render_target.BeginDraw();
|
||||
// Draws overlay background. It will blend overlay's background
|
||||
// color with already rendred video frame
|
||||
render_target.FillRectangle(
|
||||
&D2D_RECT_F {
|
||||
left: 0f32,
|
||||
top: 0f32,
|
||||
right: desc.Width as f32,
|
||||
bottom: desc.Height as f32 / 5f32,
|
||||
},
|
||||
&overlay_brush,
|
||||
);
|
||||
|
||||
// Then, renders text
|
||||
render_target.DrawTextLayout(
|
||||
D2D_POINT_2F { x: 0f32, y: 0f32 },
|
||||
context.text_layout.as_ref(),
|
||||
&text_brush,
|
||||
D2D1_DRAW_TEXT_OPTIONS_NONE,
|
||||
);
|
||||
|
||||
// EndDraw may not be successful for some reasons.
|
||||
// Ignores any error in this example
|
||||
let _ = render_target.EndDraw(None, None);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// Add pad probe to calculate framerate
|
||||
let sinkpad = videosink.static_pad("sink").unwrap();
|
||||
let overlay_context_weak = Arc::downgrade(&overlay_context);
|
||||
sinkpad.add_probe(gst::PadProbeType::BUFFER, move |_, probe_info| {
|
||||
let overlay_context = overlay_context_weak.upgrade().unwrap();
|
||||
let mut context = overlay_context.lock().unwrap();
|
||||
context.timestamp_queue.push_back(SystemTime::now());
|
||||
// Updates framerate per 10 frames
|
||||
if context.timestamp_queue.len() >= 10 {
|
||||
let now = context.timestamp_queue.back().unwrap();
|
||||
let front = context.timestamp_queue.front().unwrap();
|
||||
let duration = now.duration_since(*front).unwrap().as_millis() as f32;
|
||||
context.avg_fps = 1000f32 * (context.timestamp_queue.len() - 1) as f32 / duration;
|
||||
context.timestamp_queue.clear();
|
||||
}
|
||||
|
||||
gst::PadProbeReturn::Ok
|
||||
});
|
||||
|
||||
let playbin = gst::ElementFactory::make("playbin")
|
||||
.property("uri", &args[1])
|
||||
.property("video-sink", &videosink)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let main_loop_clone = main_loop.clone();
|
||||
let bus = playbin.bus().unwrap();
|
||||
let _bus_watch = bus
|
||||
.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
println!("received eos");
|
||||
main_loop.quit()
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
glib::ControlFlow::Continue
|
||||
})
|
||||
.unwrap();
|
||||
|
||||
playbin.set_state(gst::State::Playing).unwrap();
|
||||
|
||||
main_loop.run();
|
||||
|
||||
playbin.set_state(gst::State::Null).unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
85
examples/src/bin/debug_ringbuffer.rs
Normal file
85
examples/src/bin/debug_ringbuffer.rs
Normal file
|
@ -0,0 +1,85 @@
|
|||
// This example shows how to use the debug ringbuffer.
|
||||
//
|
||||
// It runs a simple GStreamer pipeline for a short time,
|
||||
// and on EOS it dumps the last few KB of debug logs.
|
||||
//
|
||||
// It's possible to dump the logs at any time in an application,
|
||||
// not just on exit like is done here.
|
||||
use std::process;
|
||||
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
fn example_main() {
|
||||
let pipeline_str = "videotestsrc num-buffers=100 ! autovideosink";
|
||||
|
||||
gst::init().unwrap();
|
||||
|
||||
/* Disable stdout debug, then configure the debug ringbuffer and enable
|
||||
* all debug */
|
||||
gst::log::remove_default_log_function();
|
||||
/* Keep 1KB of logs per thread, removing old threads after 10 seconds */
|
||||
gst::log::add_ring_buffer_logger(1024, 10);
|
||||
/* Enable all debug categories */
|
||||
gst::log::set_default_threshold(gst::DebugLevel::Log);
|
||||
|
||||
let mut context = gst::ParseContext::new();
|
||||
let pipeline =
|
||||
match gst::parse::launch_full(pipeline_str, Some(&mut context), gst::ParseFlags::empty()) {
|
||||
Ok(pipeline) => pipeline,
|
||||
Err(err) => {
|
||||
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
|
||||
println!("Missing element(s): {:?}", context.missing_elements());
|
||||
} else {
|
||||
println!("Failed to parse pipeline: {err}");
|
||||
}
|
||||
|
||||
process::exit(-1)
|
||||
}
|
||||
};
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
break;
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
/* Insert a message into the debug log */
|
||||
gst::error!(gst::CAT_DEFAULT, "Hi from the debug log ringbuffer example");
|
||||
|
||||
println!("Dumping debug logs\n");
|
||||
for s in gst::log::ring_buffer_logger_get_logs().iter() {
|
||||
println!("{s}\n------------------");
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
|
@ -29,42 +29,28 @@
|
|||
// Especially Windows APIs tend to be quite picky about samplerate and sample-format.
|
||||
// The same applies to videostreams.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::gst_element_warning;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[cfg(feature = "v1_10")]
|
||||
use glib::subclass::prelude::*;
|
||||
#[cfg(feature = "v1_10")]
|
||||
use glib::GBoxed;
|
||||
|
||||
use std::env;
|
||||
#[cfg(feature = "v1_10")]
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{
|
||||
env,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::{element_error, element_warning, prelude::*};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "v1_10")]
|
||||
#[derive(Clone, Debug, GBoxed)]
|
||||
#[gboxed(type_name = "ErrorValue")]
|
||||
#[derive(Clone, Debug, glib::Boxed)]
|
||||
#[boxed_type(name = "ErrorValue")]
|
||||
struct ErrorValue(Arc<Mutex<Option<Error>>>);
|
||||
|
||||
fn example_main() -> Result<(), Error> {
|
||||
|
@ -78,16 +64,14 @@ fn example_main() -> Result<(), Error> {
|
|||
std::process::exit(-1)
|
||||
};
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("filesrc", None).map_err(|_| MissingElement("filesrc"))?;
|
||||
let decodebin =
|
||||
gst::ElementFactory::make("decodebin", None).map_err(|_| MissingElement("decodebin"))?;
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("filesrc")
|
||||
.property("location", uri)
|
||||
.build()?;
|
||||
let decodebin = gst::ElementFactory::make("decodebin").build()?;
|
||||
|
||||
// Tell the filesrc what file to load
|
||||
src.set_property("location", &uri)?;
|
||||
|
||||
pipeline.add_many(&[&src, &decodebin])?;
|
||||
gst::Element::link_many(&[&src, &decodebin])?;
|
||||
pipeline.add_many([&src, &decodebin])?;
|
||||
gst::Element::link_many([&src, &decodebin])?;
|
||||
|
||||
// Need to move a new reference into the closure.
|
||||
// !!ATTENTION!!:
|
||||
|
@ -106,27 +90,26 @@ fn example_main() -> Result<(), Error> {
|
|||
decodebin.connect_pad_added(move |dbin, src_pad| {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return,
|
||||
let Some(pipeline) = pipeline_weak.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// Try to detect whether the raw stream decodebin provided us with
|
||||
// just now is either audio or video (or none of both, e.g. subtitles).
|
||||
let (is_audio, is_video) = {
|
||||
let media_type = src_pad.get_current_caps().and_then(|caps| {
|
||||
caps.get_structure(0).map(|s| {
|
||||
let name = s.get_name();
|
||||
let media_type = src_pad.current_caps().and_then(|caps| {
|
||||
caps.structure(0).map(|s| {
|
||||
let name = s.name();
|
||||
(name.starts_with("audio/"), name.starts_with("video/"))
|
||||
})
|
||||
});
|
||||
|
||||
match media_type {
|
||||
None => {
|
||||
gst_element_warning!(
|
||||
element_warning!(
|
||||
dbin,
|
||||
gst::CoreError::Negotiation,
|
||||
("Failed to get media type from pad {}", src_pad.get_name())
|
||||
("Failed to get media type from pad {}", src_pad.name())
|
||||
);
|
||||
|
||||
return;
|
||||
|
@ -143,14 +126,10 @@ fn example_main() -> Result<(), Error> {
|
|||
if is_audio {
|
||||
// decodebin found a raw audiostream, so we build the follow-up pipeline to
|
||||
// play it on the default audio playback device (using autoaudiosink).
|
||||
let queue = gst::ElementFactory::make("queue", None)
|
||||
.map_err(|_| MissingElement("queue"))?;
|
||||
let convert = gst::ElementFactory::make("audioconvert", None)
|
||||
.map_err(|_| MissingElement("audioconvert"))?;
|
||||
let resample = gst::ElementFactory::make("audioresample", None)
|
||||
.map_err(|_| MissingElement("audioresample"))?;
|
||||
let sink = gst::ElementFactory::make("autoaudiosink", None)
|
||||
.map_err(|_| MissingElement("autoaudiosink"))?;
|
||||
let queue = gst::ElementFactory::make("queue").build()?;
|
||||
let convert = gst::ElementFactory::make("audioconvert").build()?;
|
||||
let resample = gst::ElementFactory::make("audioresample").build()?;
|
||||
let sink = gst::ElementFactory::make("autoaudiosink").build()?;
|
||||
|
||||
let elements = &[&queue, &convert, &resample, &sink];
|
||||
pipeline.add_many(elements)?;
|
||||
|
@ -166,19 +145,15 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
// Get the queue element's sink pad and link the decodebin's newly created
|
||||
// src pad for the audio stream to it.
|
||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
|
||||
src_pad.link(&sink_pad)?;
|
||||
} else if is_video {
|
||||
// decodebin found a raw videostream, so we build the follow-up pipeline to
|
||||
// display it using the autovideosink.
|
||||
let queue = gst::ElementFactory::make("queue", None)
|
||||
.map_err(|_| MissingElement("queue"))?;
|
||||
let convert = gst::ElementFactory::make("videoconvert", None)
|
||||
.map_err(|_| MissingElement("videoconvert"))?;
|
||||
let scale = gst::ElementFactory::make("videoscale", None)
|
||||
.map_err(|_| MissingElement("videoscale"))?;
|
||||
let sink = gst::ElementFactory::make("autovideosink", None)
|
||||
.map_err(|_| MissingElement("autovideosink"))?;
|
||||
let queue = gst::ElementFactory::make("queue").build()?;
|
||||
let convert = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let scale = gst::ElementFactory::make("videoscale").build()?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
|
||||
let elements = &[&queue, &convert, &scale, &sink];
|
||||
pipeline.add_many(elements)?;
|
||||
|
@ -190,7 +165,7 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
// Get the queue element's sink pad and link the decodebin's newly created
|
||||
// src pad for the video stream to it.
|
||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
|
||||
src_pad.link(&sink_pad)?;
|
||||
}
|
||||
|
||||
|
@ -208,8 +183,7 @@ fn example_main() -> Result<(), Error> {
|
|||
if let Err(err) = insert_sink(is_audio, is_video) {
|
||||
// The following sends a message of type Error on the bus, containing our detailed
|
||||
// error information.
|
||||
#[cfg(feature = "v1_10")]
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
dbin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to insert sink"),
|
||||
|
@ -218,28 +192,20 @@ fn example_main() -> Result<(), Error> {
|
|||
&ErrorValue(Arc::new(Mutex::new(Some(err)))))
|
||||
.build()
|
||||
);
|
||||
|
||||
#[cfg(not(feature = "v1_10"))]
|
||||
gst_element_error!(
|
||||
dbin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to insert sink"),
|
||||
["{}", err]
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
// This code iterates over all messages that are sent across our pipeline's bus.
|
||||
// In the callback ("pad-added" on the decodebin), we sent better error information
|
||||
// using a bus message. This is the position where we get those messages and log
|
||||
// the contained information.
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -247,53 +213,40 @@ fn example_main() -> Result<(), Error> {
|
|||
MessageView::Error(err) => {
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
#[cfg(feature = "v1_10")]
|
||||
{
|
||||
match err.get_details() {
|
||||
// This bus-message of type error contained our custom error-details struct
|
||||
// that we sent in the pad-added callback above. So we unpack it and log
|
||||
// the detailed error information here. details contains a glib::SendValue.
|
||||
// The unpacked error is the converted to a Result::Err, stopping the
|
||||
// application's execution.
|
||||
Some(details) if details.get_name() == "error-details" => details
|
||||
.get::<&ErrorValue>("error")
|
||||
.unwrap()
|
||||
.and_then(|v| v.0.lock().unwrap().take())
|
||||
.map(Result::Err)
|
||||
.expect("error-details message without actual error"),
|
||||
_ => Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
}
|
||||
.into()),
|
||||
}?;
|
||||
}
|
||||
#[cfg(not(feature = "v1_10"))]
|
||||
{
|
||||
return Err(ErrorMessage {
|
||||
match err.details() {
|
||||
// This bus-message of type error contained our custom error-details struct
|
||||
// that we sent in the pad-added callback above. So we unpack it and log
|
||||
// the detailed error information here. details contains a glib::SendValue.
|
||||
// The unpacked error is the converted to a Result::Err, stopping the
|
||||
// application's execution.
|
||||
Some(details) if details.name() == "error-details" => details
|
||||
.get::<&ErrorValue>("error")
|
||||
.unwrap()
|
||||
.clone()
|
||||
.0
|
||||
.lock()
|
||||
.unwrap()
|
||||
.take()
|
||||
.map(Result::Err)
|
||||
.expect("error-details message without actual error"),
|
||||
_ => Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
.into()),
|
||||
}?;
|
||||
}
|
||||
MessageView::StateChanged(s) => {
|
||||
println!(
|
||||
"State changed from {:?}: {:?} -> {:?} ({:?})",
|
||||
s.get_src().map(|s| s.get_path_string()),
|
||||
s.get_old(),
|
||||
s.get_current(),
|
||||
s.get_pending()
|
||||
s.src().map(|s| s.path_string()),
|
||||
s.old(),
|
||||
s.current(),
|
||||
s.pending()
|
||||
);
|
||||
}
|
||||
_ => (),
|
||||
|
@ -306,10 +259,10 @@ fn example_main() -> Result<(), Error> {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -8,33 +8,26 @@
|
|||
// Discovered information could for example contain the stream's duration or whether it is
|
||||
// seekable (filesystem) or not (some http servers).
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
|
||||
extern crate gstreamer_pbutils as pbutils;
|
||||
use crate::pbutils::prelude::*;
|
||||
|
||||
use crate::pbutils::DiscovererInfo;
|
||||
use crate::pbutils::DiscovererStreamInfo;
|
||||
use std::env;
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
|
||||
use std::env;
|
||||
use gst_pbutils::{prelude::*, DiscovererInfo, DiscovererStreamInfo};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Discoverer error {}", _0)]
|
||||
#[display(fmt = "Discoverer error {_0}")]
|
||||
struct DiscovererError(#[error(not(source))] &'static str);
|
||||
|
||||
fn print_tags(info: &DiscovererInfo) {
|
||||
println!("Tags:");
|
||||
|
||||
let tags = info.get_tags();
|
||||
let tags = info.tags();
|
||||
match tags {
|
||||
Some(taglist) => {
|
||||
println!(" {}", taglist.to_string()); // FIXME use an iterator
|
||||
println!(" {taglist}"); // FIXME use an iterator
|
||||
}
|
||||
None => {
|
||||
println!(" no tags");
|
||||
|
@ -44,30 +37,29 @@ fn print_tags(info: &DiscovererInfo) {
|
|||
|
||||
fn print_stream_info(stream: &DiscovererStreamInfo) {
|
||||
println!("Stream: ");
|
||||
if let Some(id) = stream.get_stream_id() {
|
||||
println!(" Stream id: {}", id);
|
||||
|
||||
if let Some(stream_id) = stream.stream_id() {
|
||||
println!(" Stream id: {}", stream_id);
|
||||
}
|
||||
let caps_str = match stream.get_caps() {
|
||||
|
||||
let caps_str = match stream.caps() {
|
||||
Some(caps) => caps.to_string(),
|
||||
None => String::from("--"),
|
||||
};
|
||||
println!(" Format: {}", caps_str);
|
||||
println!(" Format: {caps_str}");
|
||||
}
|
||||
|
||||
fn print_discoverer_info(info: &DiscovererInfo) -> Result<(), Error> {
|
||||
let uri = info
|
||||
.get_uri()
|
||||
.ok_or(DiscovererError("URI should not be null"))?;
|
||||
println!("URI: {}", uri);
|
||||
println!("Duration: {}", info.get_duration());
|
||||
println!("URI: {}", info.uri());
|
||||
println!("Duration: {}", info.duration().display());
|
||||
print_tags(info);
|
||||
print_stream_info(
|
||||
&info
|
||||
.get_stream_info()
|
||||
.stream_info()
|
||||
.ok_or(DiscovererError("Error while obtaining stream info"))?,
|
||||
);
|
||||
|
||||
let children = info.get_stream_list();
|
||||
let children = info.stream_list();
|
||||
println!("Children streams:");
|
||||
for child in children {
|
||||
print_stream_info(&child);
|
||||
|
@ -88,7 +80,7 @@ fn run_discoverer() -> Result<(), Error> {
|
|||
};
|
||||
|
||||
let timeout: gst::ClockTime = gst::ClockTime::from_seconds(15);
|
||||
let discoverer = pbutils::Discoverer::new(timeout)?;
|
||||
let discoverer = gst_pbutils::Discoverer::new(timeout)?;
|
||||
let info = discoverer.discover_uri(uri)?;
|
||||
print_discoverer_info(&info)?;
|
||||
Ok(())
|
||||
|
@ -97,12 +89,12 @@ fn run_discoverer() -> Result<(), Error> {
|
|||
fn example_main() {
|
||||
match run_discoverer() {
|
||||
Ok(_) => (),
|
||||
Err(e) => eprintln!("Error: {}", e),
|
||||
Err(e) => eprintln!("Error: {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -12,79 +12,60 @@
|
|||
// {uridecodebin} -| {encodebin}-{filesink}
|
||||
// \-{queue}-{videoconvert}-{videoscale}----/
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::gst_element_warning;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_pbutils as gst_pbutils;
|
||||
use gst_pbutils::prelude::*;
|
||||
|
||||
#[cfg(feature = "v1_10")]
|
||||
use glib::subclass::prelude::*;
|
||||
#[cfg(feature = "v1_10")]
|
||||
use glib::GBoxed;
|
||||
|
||||
use std::env;
|
||||
#[cfg(feature = "v1_10")]
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{
|
||||
env,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::{element_error, element_warning};
|
||||
use gst_pbutils::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
#[cfg(feature = "v1_10")]
|
||||
#[derive(Clone, Debug, GBoxed)]
|
||||
#[gboxed(type_name = "ErrorValue")]
|
||||
#[derive(Clone, Debug, glib::Boxed)]
|
||||
#[boxed_type(name = "ErrorValue")]
|
||||
struct ErrorValue(Arc<Mutex<Option<Error>>>);
|
||||
|
||||
fn configure_encodebin(encodebin: &gst::Element) -> Result<(), Error> {
|
||||
fn configure_encodebin(encodebin: &gst::Element) {
|
||||
// To tell the encodebin what we want it to produce, we create an EncodingProfile
|
||||
// https://gstreamer.freedesktop.org/data/doc/gstreamer/head/gst-plugins-base-libs/html/GstEncodingProfile.html
|
||||
// This profile consists of information about the contained audio and video formats
|
||||
// as well as the container format we want everything to be combined into.
|
||||
|
||||
// Every audiostream piped into the encodebin should be encoded using vorbis.
|
||||
let audio_profile = gst_pbutils::EncodingAudioProfileBuilder::new()
|
||||
.format(&gst::Caps::new_simple("audio/x-vorbis", &[]))
|
||||
.presence(0)
|
||||
.build()?;
|
||||
let audio_profile =
|
||||
gst_pbutils::EncodingAudioProfile::builder(&gst::Caps::builder("audio/x-vorbis").build())
|
||||
.presence(0)
|
||||
.build();
|
||||
|
||||
// Every videostream piped into the encodebin should be encoded using theora.
|
||||
let video_profile = gst_pbutils::EncodingVideoProfileBuilder::new()
|
||||
.format(&gst::Caps::new_simple("video/x-theora", &[]))
|
||||
.presence(0)
|
||||
.build()?;
|
||||
let video_profile =
|
||||
gst_pbutils::EncodingVideoProfile::builder(&gst::Caps::builder("video/x-theora").build())
|
||||
.presence(0)
|
||||
.build();
|
||||
|
||||
// All streams are then finally combined into a matroska container.
|
||||
let container_profile = gst_pbutils::EncodingContainerProfileBuilder::new()
|
||||
.name("container")
|
||||
.format(&gst::Caps::new_simple("video/x-matroska", &[]))
|
||||
.add_profile(&(video_profile))
|
||||
.add_profile(&(audio_profile))
|
||||
.build()?;
|
||||
let container_profile = gst_pbutils::EncodingContainerProfile::builder(
|
||||
&gst::Caps::builder("video/x-matroska").build(),
|
||||
)
|
||||
.name("container")
|
||||
.add_profile(video_profile)
|
||||
.add_profile(audio_profile)
|
||||
.build();
|
||||
|
||||
// Finally, apply the EncodingProfile onto our encodebin element.
|
||||
encodebin
|
||||
.set_property("profile", &container_profile)
|
||||
.expect("set profile property failed");
|
||||
|
||||
Ok(())
|
||||
encodebin.set_property("profile", &container_profile);
|
||||
}
|
||||
|
||||
fn example_main() -> Result<(), Error> {
|
||||
|
@ -102,31 +83,27 @@ fn example_main() -> Result<(), Error> {
|
|||
std::process::exit(-1)
|
||||
};
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("uridecodebin", None)
|
||||
.map_err(|_| MissingElement("uridecodebin"))?;
|
||||
let encodebin =
|
||||
gst::ElementFactory::make("encodebin", None).map_err(|_| MissingElement("encodebin"))?;
|
||||
let sink =
|
||||
gst::ElementFactory::make("filesink", None).map_err(|_| MissingElement("filesink"))?;
|
||||
|
||||
src.set_property("uri", &uri)
|
||||
.expect("setting URI Property failed");
|
||||
sink.set_property("location", &output_file)
|
||||
.expect("setting location property failed");
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("uridecodebin")
|
||||
.property("uri", uri)
|
||||
.build()?;
|
||||
let encodebin = gst::ElementFactory::make("encodebin").build()?;
|
||||
let sink = gst::ElementFactory::make("filesink")
|
||||
.property("location", output_file)
|
||||
.build()?;
|
||||
|
||||
// Configure the encodebin.
|
||||
// Here we tell the bin what format we expect it to create at its output.
|
||||
configure_encodebin(&encodebin)?;
|
||||
configure_encodebin(&encodebin);
|
||||
|
||||
pipeline
|
||||
.add_many(&[&src, &encodebin, &sink])
|
||||
.add_many([&src, &encodebin, &sink])
|
||||
.expect("failed to add elements to pipeline");
|
||||
// It is clear from the start, that encodebin has only one src pad, so we can
|
||||
// directly link it to our filesink without problems.
|
||||
// The caps of encodebin's src-pad are set after we configured the encoding-profile.
|
||||
// (But filesink doesn't really care about the caps at its input anyway)
|
||||
gst::Element::link_many(&[&encodebin, &sink])?;
|
||||
gst::Element::link_many([&encodebin, &sink])?;
|
||||
|
||||
// Need to move a new reference into the closure.
|
||||
// !!ATTENTION!!:
|
||||
|
@ -143,28 +120,24 @@ fn example_main() -> Result<(), Error> {
|
|||
src.connect_pad_added(move |dbin, dbin_src_pad| {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return,
|
||||
let Some(pipeline) = pipeline_weak.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
let (is_audio, is_video) = {
|
||||
let media_type = dbin_src_pad.get_current_caps().and_then(|caps| {
|
||||
caps.get_structure(0).map(|s| {
|
||||
let name = s.get_name();
|
||||
let media_type = dbin_src_pad.current_caps().and_then(|caps| {
|
||||
caps.structure(0).map(|s| {
|
||||
let name = s.name();
|
||||
(name.starts_with("audio/"), name.starts_with("video/"))
|
||||
})
|
||||
});
|
||||
|
||||
match media_type {
|
||||
None => {
|
||||
gst_element_warning!(
|
||||
element_warning!(
|
||||
dbin,
|
||||
gst::CoreError::Negotiation,
|
||||
(
|
||||
"Failed to get media type from pad {}",
|
||||
dbin_src_pad.get_name()
|
||||
)
|
||||
("Failed to get media type from pad {}", dbin_src_pad.name())
|
||||
);
|
||||
|
||||
return;
|
||||
|
@ -175,12 +148,9 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
let link_to_encodebin = |is_audio, is_video| -> Result<(), Error> {
|
||||
if is_audio {
|
||||
let queue = gst::ElementFactory::make("queue", None)
|
||||
.map_err(|_| MissingElement("queue"))?;
|
||||
let convert = gst::ElementFactory::make("audioconvert", None)
|
||||
.map_err(|_| MissingElement("audioconvert"))?;
|
||||
let resample = gst::ElementFactory::make("audioresample", None)
|
||||
.map_err(|_| MissingElement("audioresample"))?;
|
||||
let queue = gst::ElementFactory::make("queue").build()?;
|
||||
let convert = gst::ElementFactory::make("audioconvert").build()?;
|
||||
let resample = gst::ElementFactory::make("audioresample").build()?;
|
||||
|
||||
let elements = &[&queue, &convert, &resample];
|
||||
pipeline
|
||||
|
@ -192,11 +162,9 @@ fn example_main() -> Result<(), Error> {
|
|||
// The encodebin will then automatically create an internal pipeline, that encodes
|
||||
// the audio stream in the format we specified in the EncodingProfile.
|
||||
let enc_sink_pad = encodebin
|
||||
.get_request_pad("audio_%u")
|
||||
.request_pad_simple("audio_%u")
|
||||
.expect("Could not get audio pad from encodebin");
|
||||
let src_pad = resample
|
||||
.get_static_pad("src")
|
||||
.expect("resample has no srcpad");
|
||||
let src_pad = resample.static_pad("src").expect("resample has no srcpad");
|
||||
src_pad.link(&enc_sink_pad)?;
|
||||
|
||||
for e in elements {
|
||||
|
@ -205,15 +173,12 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
// Get the queue element's sink pad and link the decodebin's newly created
|
||||
// src pad for the audio stream to it.
|
||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
|
||||
dbin_src_pad.link(&sink_pad)?;
|
||||
} else if is_video {
|
||||
let queue = gst::ElementFactory::make("queue", None)
|
||||
.map_err(|_| MissingElement("queue"))?;
|
||||
let convert = gst::ElementFactory::make("videoconvert", None)
|
||||
.map_err(|_| MissingElement("videoconvert"))?;
|
||||
let scale = gst::ElementFactory::make("videoscale", None)
|
||||
.map_err(|_| MissingElement("videoscale"))?;
|
||||
let queue = gst::ElementFactory::make("queue").build()?;
|
||||
let convert = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let scale = gst::ElementFactory::make("videoscale").build()?;
|
||||
|
||||
let elements = &[&queue, &convert, &scale];
|
||||
pipeline
|
||||
|
@ -223,13 +188,11 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
// Request a sink pad from our encodebin, that can handle a raw videostream.
|
||||
// The encodebin will then automatically create an internal pipeline, that encodes
|
||||
// the audio stream in the format we specified in the EncodingProfile.
|
||||
// the video stream in the format we specified in the EncodingProfile.
|
||||
let enc_sink_pad = encodebin
|
||||
.get_request_pad("video_%u")
|
||||
.request_pad_simple("video_%u")
|
||||
.expect("Could not get video pad from encodebin");
|
||||
let src_pad = scale
|
||||
.get_static_pad("src")
|
||||
.expect("videoscale has no srcpad");
|
||||
let src_pad = scale.static_pad("src").expect("videoscale has no srcpad");
|
||||
src_pad.link(&enc_sink_pad)?;
|
||||
|
||||
for e in elements {
|
||||
|
@ -238,7 +201,7 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
// Get the queue element's sink pad and link the decodebin's newly created
|
||||
// src pad for the video stream to it.
|
||||
let sink_pad = queue.get_static_pad("sink").expect("queue has no sinkpad");
|
||||
let sink_pad = queue.static_pad("sink").expect("queue has no sinkpad");
|
||||
dbin_src_pad.link(&sink_pad)?;
|
||||
}
|
||||
|
||||
|
@ -246,8 +209,7 @@ fn example_main() -> Result<(), Error> {
|
|||
};
|
||||
|
||||
if let Err(err) = link_to_encodebin(is_audio, is_video) {
|
||||
#[cfg(feature = "v1_10")]
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
dbin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to insert sink"),
|
||||
|
@ -256,24 +218,16 @@ fn example_main() -> Result<(), Error> {
|
|||
&ErrorValue(Arc::new(Mutex::new(Some(err)))))
|
||||
.build()
|
||||
);
|
||||
|
||||
#[cfg(not(feature = "v1_10"))]
|
||||
gst_element_error!(
|
||||
dbin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to insert sink"),
|
||||
["{}", err]
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -281,49 +235,35 @@ fn example_main() -> Result<(), Error> {
|
|||
MessageView::Error(err) => {
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
#[cfg(feature = "v1_10")]
|
||||
{
|
||||
match err.get_details() {
|
||||
Some(details) if details.get_name() == "error-details" => details
|
||||
.get::<&ErrorValue>("error")
|
||||
.unwrap()
|
||||
.cloned()
|
||||
.and_then(|v| v.0.lock().unwrap().take())
|
||||
.map(Result::Err)
|
||||
.expect("error-details message without actual error"),
|
||||
_ => Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
}
|
||||
.into()),
|
||||
}?;
|
||||
}
|
||||
#[cfg(not(feature = "v1_10"))]
|
||||
{
|
||||
return Err(ErrorMessage {
|
||||
match err.details() {
|
||||
Some(details) if details.name() == "error-details" => details
|
||||
.get::<&ErrorValue>("error")
|
||||
.unwrap()
|
||||
.clone()
|
||||
.0
|
||||
.lock()
|
||||
.unwrap()
|
||||
.take()
|
||||
.map(Result::Err)
|
||||
.expect("error-details message without actual error"),
|
||||
_ => Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
.into()),
|
||||
}?;
|
||||
}
|
||||
MessageView::StateChanged(s) => {
|
||||
println!(
|
||||
"State changed from {:?}: {:?} -> {:?} ({:?})",
|
||||
s.get_src().map(|s| s.get_path_string()),
|
||||
s.get_old(),
|
||||
s.get_current(),
|
||||
s.get_pending()
|
||||
s.src().map(|s| s.path_string()),
|
||||
s.old(),
|
||||
s.current(),
|
||||
s.pending()
|
||||
);
|
||||
}
|
||||
_ => (),
|
||||
|
@ -336,10 +276,10 @@ fn example_main() -> Result<(), Error> {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,7 +19,6 @@
|
|||
// This makes it possible, e.g., to schedule an arbitrary piece of code
|
||||
// to run in the main loop thread - avoiding potential threading issues.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
|
@ -31,8 +30,8 @@ fn example_main() {
|
|||
let main_loop = glib::MainLoop::new(None, false);
|
||||
|
||||
// This creates a pipeline by parsing the gst-launch pipeline syntax.
|
||||
let pipeline = gst::parse_launch("audiotestsrc ! fakesink").unwrap();
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let pipeline = gst::parse::launch("audiotestsrc ! fakesink").unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
|
@ -51,14 +50,13 @@ fn example_main() {
|
|||
// Add a timeout to the main loop. This closure will be executed
|
||||
// in an interval of 5 seconds. The return value of the handler function
|
||||
// determines whether the handler still wants to be called:
|
||||
// - glib::Continue(false) - stop calling this handler, remove timeout
|
||||
// - glib::Continue(true) - continue calling this handler
|
||||
// - glib::ControlFlow::Break - stop calling this handler, remove timeout
|
||||
// - glib::ControlFlow::Continue- continue calling this handler
|
||||
glib::timeout_add_seconds(5, move || {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return glib::Continue(false),
|
||||
let Some(pipeline) = pipeline_weak.upgrade() else {
|
||||
return glib::ControlFlow::Break;
|
||||
};
|
||||
|
||||
println!("sending eos");
|
||||
|
@ -78,43 +76,44 @@ fn example_main() {
|
|||
|
||||
// Remove this handler, the pipeline will shutdown anyway, now that we
|
||||
// sent the EOS event.
|
||||
glib::Continue(false)
|
||||
glib::ControlFlow::Break
|
||||
});
|
||||
|
||||
//bus.add_signal_watch();
|
||||
//bus.connect_message(move |_, msg| {
|
||||
//bus.connect_message(None, move |_, msg| {
|
||||
let main_loop_clone = main_loop.clone();
|
||||
// This sets the bus's signal handler (don't be mislead by the "add", there can only be one).
|
||||
// Every message from the bus is passed through this function. Its returnvalue determines
|
||||
// whether the handler wants to be called again. If glib::Continue(false) is returned, the
|
||||
// whether the handler wants to be called again. If glib::ControlFlow::Break is returned, the
|
||||
// handler is removed and will never be called again. The mainloop still runs though.
|
||||
bus.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
let _bus_watch = bus
|
||||
.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
println!("received eos");
|
||||
// An EndOfStream event was sent to the pipeline, so we tell our main loop
|
||||
// to stop execution here.
|
||||
main_loop.quit()
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
println!("received eos");
|
||||
// An EndOfStream event was sent to the pipeline, so we tell our main loop
|
||||
// to stop execution here.
|
||||
main_loop.quit()
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
// Tell the mainloop to continue executing this callback.
|
||||
glib::Continue(true)
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
// Tell the mainloop to continue executing this callback.
|
||||
glib::ControlFlow::Continue
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
|
||||
// Operate GStreamer's bus, facilliating GLib's mainloop here.
|
||||
// This function call will block until you tell the mainloop to quit
|
||||
|
@ -124,15 +123,10 @@ fn example_main() {
|
|||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
// Remove the watch function from the bus.
|
||||
// Again: There can always only be one watch function.
|
||||
// Thus we don't have to tell him which function to remove.
|
||||
bus.remove_watch().unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
481
examples/src/bin/fd_allocator.rs
Normal file
481
examples/src/bin/fd_allocator.rs
Normal file
|
@ -0,0 +1,481 @@
|
|||
// This example demonstrates the use of the FdMemory allocator.
|
||||
// It operates the following two pipelines:
|
||||
|
||||
// sender: {videotestsrc} - {appsink}
|
||||
// receiver: {appsrc} - {FdMemoryVideoFilter} - {videoconvert} - {queue} - {autovideosink}
|
||||
|
||||
// The sender creates shared memory files from the appsink which are sent
|
||||
// to the receiver using a unix domain socket.
|
||||
// The receiver creates buffers in the appsrc using the FdMemoryAllocator from
|
||||
// the received file descriptors.
|
||||
|
||||
// Additional to demonstrating how the FdMemoryAllocator can be used to share
|
||||
// file descriptors the example implements a custom VideoFilter demonstrating
|
||||
// how the file descriptor of FdMemory can be accessed in a pipeline.
|
||||
// Note that instead of manual mapping the file descriptor it is also possible
|
||||
// to use map_writable, which will also map the file descriptor.
|
||||
use std::{
|
||||
os::unix::{net::UnixStream, prelude::AsRawFd},
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use anyhow::Error;
|
||||
use futures::StreamExt;
|
||||
use gst::{element_error, prelude::*};
|
||||
use memmap2::MmapMut;
|
||||
use uds::UnixStreamExt;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
fn create_receiver_pipeline(
|
||||
video_info: &gst_video::VideoInfo,
|
||||
receiver: UnixStream,
|
||||
) -> Result<gst::Pipeline, Error> {
|
||||
let caps = video_info.to_caps()?;
|
||||
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst_app::AppSrc::builder()
|
||||
.caps(&caps)
|
||||
.do_timestamp(true)
|
||||
.is_live(true)
|
||||
.build();
|
||||
let filter = video_filter::FdMemoryFadeInVideoFilter::default().upcast::<gst::Element>();
|
||||
let convert = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let queue = gst::ElementFactory::make("queue").build()?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
|
||||
pipeline.add_many([src.upcast_ref(), &filter, &convert, &queue, &sink])?;
|
||||
gst::Element::link_many([src.upcast_ref(), &filter, &convert, &queue, &sink])?;
|
||||
|
||||
let fd_allocator = gst_allocators::FdAllocator::new();
|
||||
let video_info = video_info.clone();
|
||||
let mut fd_buf = [-1; 253];
|
||||
|
||||
src.set_callbacks(
|
||||
gst_app::AppSrcCallbacks::builder()
|
||||
.need_data(move |appsrc, _| {
|
||||
// Read the next fds from the socket, if 0
|
||||
// is returned the sender has closed the stream
|
||||
// which is handled as EOS here.
|
||||
let fds = match receiver.recv_fds(&mut [0u8; 1], &mut fd_buf) {
|
||||
Ok((_, 0)) => {
|
||||
let _ = appsrc.end_of_stream();
|
||||
return;
|
||||
}
|
||||
Ok((_, fds)) => fds,
|
||||
Err(err) => {
|
||||
gst::error_msg!(
|
||||
gst::StreamError::Failed,
|
||||
("failed to receive fds: {}", err)
|
||||
);
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
for fd in &fd_buf[0..fds] {
|
||||
// Allocate a new FdMemory for the received file descriptor.
|
||||
// It is important that the size matches the size of the
|
||||
// actual backing storage. In this example we just use the
|
||||
// same video info in both sides, sending and receiving.
|
||||
// Pass FdMemoryFlags::NONE to make the FdMemory take
|
||||
// ownership of the passed file descriptor. The file descriptor
|
||||
// will be closed when the memory is released.
|
||||
let memory = unsafe {
|
||||
fd_allocator
|
||||
.alloc(*fd, video_info.size(), gst_allocators::FdMemoryFlags::NONE)
|
||||
.unwrap()
|
||||
};
|
||||
let mut buffer = gst::Buffer::new();
|
||||
let buffer_mut = buffer.make_mut();
|
||||
buffer_mut.append_memory(memory);
|
||||
let _ = appsrc.push_buffer(buffer);
|
||||
}
|
||||
})
|
||||
.build(),
|
||||
);
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
fn create_sender_pipeline(
|
||||
video_info: &gst_video::VideoInfo,
|
||||
sender: UnixStream,
|
||||
) -> Result<gst::Pipeline, Error> {
|
||||
let sender = Arc::new(Mutex::new(sender));
|
||||
let caps = video_info.to_caps()?;
|
||||
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("videotestsrc")
|
||||
.property("num-buffers", 250i32)
|
||||
.build()?;
|
||||
let sink = gst::ElementFactory::make("appsink").build()?;
|
||||
|
||||
sink.downcast_ref::<gst_app::AppSink>()
|
||||
.ok_or_else(|| anyhow::anyhow!("is not a appsink"))?
|
||||
.set_caps(Some(&caps));
|
||||
|
||||
pipeline.add_many([&src, &sink])?;
|
||||
gst::Element::link_many([&src, &sink])?;
|
||||
|
||||
let appsink = sink
|
||||
.downcast::<gst_app::AppSink>()
|
||||
.map_err(|_| anyhow::anyhow!("is not a appsink"))?;
|
||||
|
||||
appsink.set_callbacks(
|
||||
gst_app::AppSinkCallbacks::builder()
|
||||
// Add a handler to the "eos" signal
|
||||
.eos({
|
||||
let sender = sender.clone();
|
||||
move |_| {
|
||||
// Close the sender part of the UnixSocket pair, this will automatically
|
||||
// create a eos in the receiving part.
|
||||
let _ = sender.lock().unwrap().shutdown(std::net::Shutdown::Write);
|
||||
}
|
||||
})
|
||||
// Add a handler to the "new-sample" signal.
|
||||
.new_sample(move |appsink| {
|
||||
// Pull the sample in question out of the appsink's buffer.
|
||||
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
|
||||
let buffer = sample.buffer().ok_or_else(|| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get buffer from appsink")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
if buffer.n_memory() != 1 {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Expected buffer with single memory")
|
||||
);
|
||||
|
||||
return Err(gst::FlowError::Error);
|
||||
}
|
||||
|
||||
let mem = buffer.peek_memory(0);
|
||||
|
||||
// We can use downcast_memory_ref to check if the provided
|
||||
// memory is allocated by FdMemoryAllocator or a subtype of it.
|
||||
// Note: This is not used in the example, we will always copy
|
||||
// the memory to a new shared memory file.
|
||||
if let Some(fd_memory) = mem.downcast_memory_ref::<gst_allocators::FdMemory>() {
|
||||
// As we already got a fd we can just directly send it over the socket.
|
||||
// NOTE: Synchronization is left out of this example, in a real world
|
||||
// application access to the memory should be synchronized.
|
||||
// For example wayland provides a release callback to signal that
|
||||
// the memory is no longer in use.
|
||||
sender
|
||||
.lock()
|
||||
.unwrap()
|
||||
.send_fds(&[0u8; 1], &[fd_memory.fd()])
|
||||
.map_err(|_| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to send fd over unix stream")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
} else {
|
||||
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||
// When we want to access its content, we have to map it while requesting the required
|
||||
// mode of access (read, read/write).
|
||||
// This type of abstraction is necessary, because the buffer in question might not be
|
||||
// on the machine's main memory itself, but rather in the GPU's memory.
|
||||
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||
let map = buffer.map_readable().map_err(|_| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to map buffer readable")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
// Note: To simplify this example we always create a new shared memory file instead
|
||||
// of using a pool of buffers. When using a pool we need to make sure access to the
|
||||
// file is synchronized.
|
||||
let opts = memfd::MemfdOptions::default().allow_sealing(true);
|
||||
let mfd = opts.create("gst-examples").map_err(|err| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to allocated fd: {}", err)
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
mfd.as_file().set_len(map.size() as u64).map_err(|err| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to resize fd memory: {}", err)
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
let mut seals = memfd::SealsHashSet::new();
|
||||
seals.insert(memfd::FileSeal::SealShrink);
|
||||
seals.insert(memfd::FileSeal::SealGrow);
|
||||
mfd.add_seals(&seals).map_err(|err| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to add fd seals: {}", err)
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
mfd.add_seal(memfd::FileSeal::SealSeal).map_err(|err| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to add fd seals: {}", err)
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
unsafe {
|
||||
let mut mmap = MmapMut::map_mut(mfd.as_file()).map_err(|_| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to mmap fd")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
mmap.copy_from_slice(map.as_slice());
|
||||
};
|
||||
|
||||
sender
|
||||
.lock()
|
||||
.unwrap()
|
||||
.send_fds(&[0u8; 1], &[mfd.as_raw_fd()])
|
||||
.map_err(|_| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to send fd over unix stream")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
};
|
||||
|
||||
Ok(gst::FlowSuccess::Ok)
|
||||
})
|
||||
.build(),
|
||||
);
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
async fn message_loop(bus: gst::Bus) {
|
||||
let mut messages = bus.stream();
|
||||
|
||||
while let Some(msg) = messages.next().await {
|
||||
use gst::MessageView;
|
||||
|
||||
// Determine whether we want to quit: on EOS or error message
|
||||
// we quit, otherwise simply continue.
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
fn example_main() -> Result<(), Error> {
|
||||
gst::init()?;
|
||||
|
||||
let video_info = gst_video::VideoInfo::builder(gst_video::VideoFormat::Bgra, 1920, 1080)
|
||||
.fps(gst::Fraction::new(30, 1))
|
||||
.build()?;
|
||||
|
||||
let (sender, receiver) = std::os::unix::net::UnixStream::pair()?;
|
||||
let sender_pipeline = create_sender_pipeline(&video_info, sender)?;
|
||||
let receiver_pipeline = create_receiver_pipeline(&video_info, receiver)?;
|
||||
|
||||
let receiver_bus = receiver_pipeline.bus().expect("pipeline without bus");
|
||||
receiver_pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let sender_bus = sender_pipeline.bus().expect("pipeline without bus");
|
||||
sender_pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
futures::executor::block_on(futures::future::join(
|
||||
message_loop(sender_bus),
|
||||
message_loop(receiver_bus),
|
||||
));
|
||||
|
||||
sender_pipeline.set_state(gst::State::Null)?;
|
||||
receiver_pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
// The purpose of this custom video filter is to demonstrate how
|
||||
// the file descriptor of a FdMemory can be accessed.
|
||||
mod video_filter {
|
||||
glib::wrapper! {
|
||||
pub struct FdMemoryFadeInVideoFilter(ObjectSubclass<imp::FdMemoryFadeInVideoFilter>) @extends gst_video::VideoFilter, gst_base::BaseTransform, gst::Element, gst::Object;
|
||||
}
|
||||
|
||||
impl Default for FdMemoryFadeInVideoFilter {
|
||||
fn default() -> Self {
|
||||
glib::Object::builder().build()
|
||||
}
|
||||
}
|
||||
mod imp {
|
||||
use std::{mem::ManuallyDrop, os::unix::prelude::FromRawFd};
|
||||
|
||||
use anyhow::Error;
|
||||
use gst::{subclass::prelude::*, PadDirection, PadPresence, PadTemplate};
|
||||
use gst_app::gst_base::subclass::BaseTransformMode;
|
||||
use gst_video::{prelude::*, subclass::prelude::*, VideoFrameRef};
|
||||
use memmap2::MmapMut;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
|
||||
gst::DebugCategory::new(
|
||||
"fdmemoryfilter",
|
||||
gst::DebugColorFlags::empty(),
|
||||
Some("Example FdMemory filter"),
|
||||
)
|
||||
});
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct FdMemoryFadeInVideoFilter;
|
||||
|
||||
impl FdMemoryFadeInVideoFilter {
|
||||
fn transform_fd_mem_ip(
|
||||
&self,
|
||||
frame: &mut VideoFrameRef<&mut gst::BufferRef>,
|
||||
) -> Result<(), Error> {
|
||||
let buffer = frame.buffer();
|
||||
if buffer.n_memory() != 1 {
|
||||
return Err(anyhow::anyhow!(
|
||||
"only buffers with single memory are supported"
|
||||
));
|
||||
}
|
||||
let mem = buffer.peek_memory(0);
|
||||
if !mem.is_memory_type::<gst_allocators::FdMemory>() {
|
||||
return Err(anyhow::anyhow!("only fd memory is supported"));
|
||||
}
|
||||
|
||||
let timestamp = buffer.pts().unwrap();
|
||||
let factor = (timestamp.nseconds() as f64
|
||||
/ (5 * gst::ClockTime::SECOND).nseconds() as f64)
|
||||
.min(1.0f64);
|
||||
|
||||
// If the fade-in has finished return early
|
||||
if factor >= 1.0f64 {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let fd = mem
|
||||
.downcast_memory_ref::<gst_allocators::FdMemory>()
|
||||
.unwrap()
|
||||
.fd();
|
||||
|
||||
unsafe {
|
||||
// We wrap the Memmfd in ManuallyDrop here because from_raw_fd takes ownership of
|
||||
// the file descriptor which would close it on drop
|
||||
//
|
||||
// see: https://github.com/lucab/memfd-rs/issues/29
|
||||
let mfd = ManuallyDrop::new(memfd::Memfd::from_raw_fd(fd));
|
||||
let mut mmap = MmapMut::map_mut(mfd.as_file())?;
|
||||
|
||||
for pixel in mmap.chunks_exact_mut(4) {
|
||||
pixel[0] = (pixel[0] as f64 * factor).clamp(0.0, 255.0) as u8;
|
||||
pixel[1] = (pixel[1] as f64 * factor).clamp(0.0, 255.0) as u8;
|
||||
pixel[2] = (pixel[2] as f64 * factor).clamp(0.0, 255.0) as u8;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ElementImpl for FdMemoryFadeInVideoFilter {
|
||||
fn pad_templates() -> &'static [PadTemplate] {
|
||||
static PAD_TEMPLATES: std::sync::OnceLock<Vec<PadTemplate>> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
PAD_TEMPLATES.get_or_init(|| {
|
||||
let caps = gst_video::VideoCapsBuilder::new()
|
||||
.format(gst_video::VideoFormat::Bgra)
|
||||
.build();
|
||||
vec![
|
||||
PadTemplate::new("sink", PadDirection::Sink, PadPresence::Always, &caps)
|
||||
.unwrap(),
|
||||
PadTemplate::new("src", PadDirection::Src, PadPresence::Always, &caps)
|
||||
.unwrap(),
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl BaseTransformImpl for FdMemoryFadeInVideoFilter {
|
||||
const MODE: BaseTransformMode = BaseTransformMode::AlwaysInPlace;
|
||||
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
|
||||
const TRANSFORM_IP_ON_PASSTHROUGH: bool = true;
|
||||
}
|
||||
|
||||
impl VideoFilterImpl for FdMemoryFadeInVideoFilter {
|
||||
fn transform_frame_ip(
|
||||
&self,
|
||||
frame: &mut VideoFrameRef<&mut gst::BufferRef>,
|
||||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
self.transform_fd_mem_ip(frame).map_err(|err| {
|
||||
gst::error!(CAT, imp: self, "Failed to transform frame`: {}", err);
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
Ok(gst::FlowSuccess::Ok)
|
||||
}
|
||||
}
|
||||
|
||||
impl ObjectImpl for FdMemoryFadeInVideoFilter {}
|
||||
|
||||
impl GstObjectImpl for FdMemoryFadeInVideoFilter {}
|
||||
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for FdMemoryFadeInVideoFilter {
|
||||
const NAME: &'static str = "FdMemoryVideoFilter";
|
||||
type Type = super::FdMemoryFadeInVideoFilter;
|
||||
type ParentType = gst_video::VideoFilter;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -3,14 +3,11 @@
|
|||
// or for an EOS message. When a message notifying about either of both
|
||||
// is received, the future is resolved.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use futures::executor::LocalPool;
|
||||
use futures::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use futures::{executor::LocalPool, prelude::*};
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
|
@ -27,9 +24,9 @@ async fn message_loop(bus: gst::Bus) {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
@ -45,8 +42,8 @@ fn example_main() {
|
|||
gst::init().unwrap();
|
||||
|
||||
// Create a pipeline from the launch-syntax given on the cli.
|
||||
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
|
@ -64,7 +61,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -35,19 +35,44 @@
|
|||
// those with lowers (higher number). Thus, Layers with higher priority are "in the front".
|
||||
// - The timeline is the enclosing element, grouping all layers and providing a timeframe.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_editing_services as ges;
|
||||
use ges::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use ges::prelude::*;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
||||
fn configure_pipeline(pipeline: &ges::Pipeline, output_name: &str) {
|
||||
// Every audiostream piped into the encodebin should be encoded using opus.
|
||||
let audio_profile =
|
||||
gst_pbutils::EncodingAudioProfile::builder(&gst::Caps::builder("audio/x-opus").build())
|
||||
.build();
|
||||
|
||||
// Every videostream piped into the encodebin should be encoded using vp8.
|
||||
let video_profile =
|
||||
gst_pbutils::EncodingVideoProfile::builder(&gst::Caps::builder("video/x-vp8").build())
|
||||
.build();
|
||||
|
||||
// All streams are then finally combined into a webm container.
|
||||
let container_profile =
|
||||
gst_pbutils::EncodingContainerProfile::builder(&gst::Caps::builder("video/webm").build())
|
||||
.name("container")
|
||||
.add_profile(video_profile)
|
||||
.add_profile(audio_profile)
|
||||
.build();
|
||||
|
||||
// Apply the EncodingProfile to the pipeline, and set it to render mode
|
||||
let output_uri = format!("{output_name}.webm");
|
||||
pipeline
|
||||
.set_render_settings(&output_uri, &container_profile)
|
||||
.expect("Failed to set render settings");
|
||||
pipeline
|
||||
.set_mode(ges::PipelineFlags::RENDER)
|
||||
.expect("Failed to set pipeline to render mode");
|
||||
}
|
||||
|
||||
fn main_loop(uri: &str, output: Option<&String>) -> Result<(), glib::BoolError> {
|
||||
ges::init()?;
|
||||
|
||||
// Begin by creating a timeline with audio and video tracks
|
||||
|
@ -57,6 +82,11 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
|||
let pipeline = ges::Pipeline::new();
|
||||
pipeline.set_timeline(&timeline)?;
|
||||
|
||||
// If requested, configure the pipeline so it renders to a file.
|
||||
if let Some(output_name) = output {
|
||||
configure_pipeline(&pipeline, output_name);
|
||||
}
|
||||
|
||||
// Load a clip from the given uri and add it to the layer.
|
||||
let clip = ges::UriClip::new(uri).expect("Failed to create clip");
|
||||
layer.add_clip(&clip)?;
|
||||
|
@ -67,7 +97,7 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
|||
|
||||
println!(
|
||||
"Agingtv scratch-lines: {}",
|
||||
clip.get_child_property("scratch-lines")
|
||||
clip.child_property("scratch-lines")
|
||||
.unwrap()
|
||||
.serialize()
|
||||
.unwrap()
|
||||
|
@ -75,16 +105,17 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
|||
|
||||
// Retrieve the asset that was automatically used behind the scenes, to
|
||||
// extract the clip from.
|
||||
let asset = clip.get_asset().unwrap();
|
||||
let asset = clip.asset().unwrap();
|
||||
let duration = asset
|
||||
.downcast::<ges::UriClipAsset>()
|
||||
.unwrap()
|
||||
.get_duration();
|
||||
.duration()
|
||||
.expect("unknown duration");
|
||||
println!(
|
||||
"Clip duration: {} - playing file from {} for {}",
|
||||
duration,
|
||||
duration / 2,
|
||||
duration / 4
|
||||
duration / 4,
|
||||
);
|
||||
|
||||
// The inpoint specifies where in the clip we start, the duration specifies
|
||||
|
@ -98,8 +129,8 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
|||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
let bus = pipeline.bus().unwrap();
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -107,9 +138,9 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
@ -127,21 +158,22 @@ fn main_loop(uri: &str) -> Result<(), glib::BoolError> {
|
|||
#[allow(unused_variables)]
|
||||
fn example_main() {
|
||||
let args: Vec<_> = env::args().collect();
|
||||
let uri: &str = if args.len() == 2 {
|
||||
args[1].as_ref()
|
||||
} else {
|
||||
println!("Usage: ges launch");
|
||||
if args.len() < 2 || args.len() > 3 {
|
||||
println!("Usage: ges input [output]");
|
||||
std::process::exit(-1)
|
||||
};
|
||||
}
|
||||
|
||||
match main_loop(uri) {
|
||||
let input_uri: &str = args[1].as_ref();
|
||||
let output = args.get(2);
|
||||
|
||||
match main_loop(input_uri, output) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
174
examples/src/bin/glfilter.rs
Normal file
174
examples/src/bin/glfilter.rs
Normal file
|
@ -0,0 +1,174 @@
|
|||
#![allow(clippy::non_send_fields_in_send_ty)]
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
#[path = "../glupload.rs"]
|
||||
mod glupload;
|
||||
use glupload::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
pub mod examples_common;
|
||||
|
||||
/// The fragment shader used for transforming GL textures travelling through the
|
||||
/// pipeline. This fragment shader links against the default vertex shader
|
||||
/// provided by [`GLSLStage::new_default_vertex`].
|
||||
const FRAGMENT_SHADER: &str = r#"
|
||||
#ifdef GL_ES
|
||||
precision mediump float;
|
||||
#endif
|
||||
|
||||
// The filter draws a fullscreen quad and provides its coordinates here:
|
||||
varying vec2 v_texcoord;
|
||||
|
||||
// The input texture is bound on a uniform sampler named `tex`:
|
||||
uniform sampler2D tex;
|
||||
|
||||
void main () {
|
||||
// Flip texture read coordinate on the x axis to create a mirror effect:
|
||||
gl_FragColor = texture2D(tex, vec2(1.0 - v_texcoord.x, v_texcoord.y));
|
||||
}
|
||||
"#;
|
||||
|
||||
mod mirror {
|
||||
use std::sync::Mutex;
|
||||
|
||||
use gst_base::subclass::BaseTransformMode;
|
||||
use gst_gl::{
|
||||
prelude::*,
|
||||
subclass::{prelude::*, GLFilterMode},
|
||||
*,
|
||||
};
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use super::{gl, FRAGMENT_SHADER};
|
||||
|
||||
pub static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
|
||||
gst::DebugCategory::new(
|
||||
"rsglmirrorfilter",
|
||||
gst::DebugColorFlags::empty(),
|
||||
Some("Rust GL Mirror Filter"),
|
||||
)
|
||||
});
|
||||
|
||||
glib::wrapper! {
|
||||
pub struct GLMirrorFilter(ObjectSubclass<imp::GLMirrorFilter>) @extends gst_gl::GLFilter, gst_gl::GLBaseFilter, gst_base::BaseTransform, gst::Element, gst::Object;
|
||||
}
|
||||
|
||||
impl GLMirrorFilter {
|
||||
pub fn new(name: Option<&str>) -> Self {
|
||||
glib::Object::builder().property("name", name).build()
|
||||
}
|
||||
}
|
||||
|
||||
mod imp {
|
||||
use super::*;
|
||||
|
||||
/// Private data consists of the transformation shader which is compiled
|
||||
/// in advance to running the actual filter.
|
||||
#[derive(Default)]
|
||||
pub struct GLMirrorFilter {
|
||||
shader: Mutex<Option<GLShader>>,
|
||||
}
|
||||
|
||||
impl GLMirrorFilter {
|
||||
fn create_shader(&self, context: &GLContext) -> Result<(), gst::LoggableError> {
|
||||
let shader = GLShader::new(context);
|
||||
|
||||
let vertex = GLSLStage::new_default_vertex(context);
|
||||
vertex.compile().unwrap();
|
||||
shader.attach_unlocked(&vertex)?;
|
||||
|
||||
gst::debug!(
|
||||
CAT,
|
||||
imp: self,
|
||||
"Compiling fragment shader {}",
|
||||
FRAGMENT_SHADER
|
||||
);
|
||||
|
||||
let fragment = GLSLStage::with_strings(
|
||||
context,
|
||||
gl::FRAGMENT_SHADER,
|
||||
// new_default_vertex is compiled with this version and profile:
|
||||
GLSLVersion::None,
|
||||
GLSLProfile::ES | GLSLProfile::COMPATIBILITY,
|
||||
&[FRAGMENT_SHADER],
|
||||
);
|
||||
fragment.compile().unwrap();
|
||||
shader.attach_unlocked(&fragment)?;
|
||||
shader.link().unwrap();
|
||||
|
||||
gst::debug!(
|
||||
CAT,
|
||||
imp: self,
|
||||
"Successfully compiled and linked {:?}",
|
||||
shader
|
||||
);
|
||||
|
||||
*self.shader.lock().unwrap() = Some(shader);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// See `subclass.rs` for general documentation on creating a subclass. Extended
|
||||
// information like element metadata have been omitted for brevity.
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for GLMirrorFilter {
|
||||
const NAME: &'static str = "RsGLMirrorFilter";
|
||||
type Type = super::GLMirrorFilter;
|
||||
type ParentType = gst_gl::GLFilter;
|
||||
}
|
||||
|
||||
impl ElementImpl for GLMirrorFilter {}
|
||||
impl GstObjectImpl for GLMirrorFilter {}
|
||||
impl ObjectImpl for GLMirrorFilter {}
|
||||
impl BaseTransformImpl for GLMirrorFilter {
|
||||
const MODE: BaseTransformMode = BaseTransformMode::NeverInPlace;
|
||||
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
|
||||
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
|
||||
}
|
||||
impl GLBaseFilterImpl for GLMirrorFilter {
|
||||
fn gl_start(&self) -> Result<(), gst::LoggableError> {
|
||||
let filter = self.obj();
|
||||
|
||||
// Create a shader when GL is started, knowing that the OpenGL context is
|
||||
// available.
|
||||
let context = GLBaseFilterExt::context(&*filter).unwrap();
|
||||
self.create_shader(&context)?;
|
||||
self.parent_gl_start()
|
||||
}
|
||||
}
|
||||
impl GLFilterImpl for GLMirrorFilter {
|
||||
const MODE: GLFilterMode = GLFilterMode::Texture;
|
||||
|
||||
fn filter_texture(
|
||||
&self,
|
||||
input: &gst_gl::GLMemory,
|
||||
output: &gst_gl::GLMemory,
|
||||
) -> Result<(), gst::LoggableError> {
|
||||
let filter = self.obj();
|
||||
|
||||
let shader = self.shader.lock().unwrap();
|
||||
// Use the underlying filter implementation to transform the input texture into
|
||||
// an output texture with the shader.
|
||||
filter.render_to_target_with_shader(
|
||||
input,
|
||||
output,
|
||||
shader
|
||||
.as_ref()
|
||||
.expect("No shader, call `create_shader` first!"),
|
||||
);
|
||||
self.parent_filter_texture(input, output)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn example_main() -> Result<()> {
|
||||
gst::init().unwrap();
|
||||
let glfilter = mirror::GLMirrorFilter::new(Some("Mirror filter"));
|
||||
App::new(Some(glfilter.as_ref())).and_then(main_loop)
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
examples_common::run(example_main)
|
||||
}
|
|
@ -1,9 +1,7 @@
|
|||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
use std::env;
|
||||
|
||||
use futures::prelude::*;
|
||||
|
||||
use std::env;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
@ -21,9 +19,9 @@ async fn message_handler(loop_: glib::MainLoop, bus: gst::Bus) {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
loop_.quit();
|
||||
}
|
||||
|
@ -36,7 +34,6 @@ fn example_main() {
|
|||
// Get the default main context and make it also the thread default, then create
|
||||
// a main loop for it
|
||||
let ctx = glib::MainContext::default();
|
||||
ctx.push_thread_default();
|
||||
let loop_ = glib::MainLoop::new(Some(&ctx), false);
|
||||
|
||||
// Read the pipeline to launch from the commandline, using the launch syntax.
|
||||
|
@ -45,8 +42,8 @@ fn example_main() {
|
|||
gst::init().unwrap();
|
||||
|
||||
// Create a pipeline from the launch-syntax given on the cli.
|
||||
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
|
@ -62,12 +59,10 @@ fn example_main() {
|
|||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
ctx.pop_thread_default();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -1,701 +0,0 @@
|
|||
// This example demostrates how to output GL textures, within an
|
||||
// EGL/X11 context provided by the application, and render those
|
||||
// textures in the GL application.
|
||||
|
||||
// {videotestsrc} - { glsinkbin }
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_app as gst_app;
|
||||
extern crate gstreamer_gl as gst_gl;
|
||||
use gst_gl::prelude::*;
|
||||
extern crate gstreamer_video as gst_video;
|
||||
|
||||
use std::ffi::CStr;
|
||||
use std::mem;
|
||||
use std::ptr;
|
||||
use std::sync::mpsc;
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
static VERTICES: [f32; 20] = [
|
||||
1.0, 1.0, 0.0, 1.0, 0.0,
|
||||
-1.0, 1.0, 0.0, 0.0, 0.0,
|
||||
-1.0, -1.0, 0.0, 0.0, 1.0,
|
||||
1.0, -1.0, 0.0, 1.0, 1.0,
|
||||
];
|
||||
|
||||
static INDICES: [u16; 6] = [0, 1, 2, 0, 2, 3];
|
||||
|
||||
#[rustfmt::skip]
|
||||
static IDENTITY: [f32; 16] = [
|
||||
1.0, 0.0, 0.0, 0.0,
|
||||
0.0, 1.0, 0.0, 0.0,
|
||||
0.0, 0.0, 1.0, 0.0,
|
||||
0.0, 0.0, 0.0, 1.0,
|
||||
];
|
||||
|
||||
const VS_SRC: &[u8] = b"
|
||||
uniform mat4 u_transformation;
|
||||
attribute vec4 a_position;
|
||||
attribute vec2 a_texcoord;
|
||||
varying vec2 v_texcoord;
|
||||
|
||||
void main() {
|
||||
gl_Position = u_transformation * a_position;
|
||||
v_texcoord = a_texcoord;
|
||||
}
|
||||
\0";
|
||||
|
||||
const FS_SRC: &[u8] = b"
|
||||
#ifdef GL_ES
|
||||
precision mediump float;
|
||||
#endif
|
||||
varying vec2 v_texcoord;
|
||||
uniform sampler2D tex;
|
||||
|
||||
void main() {
|
||||
gl_FragColor = texture2D(tex, v_texcoord);
|
||||
}
|
||||
\0";
|
||||
|
||||
#[allow(clippy::unreadable_literal)]
|
||||
#[allow(clippy::unused_unit)]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
mod gl {
|
||||
pub use self::Gles2 as Gl;
|
||||
include!(concat!(env!("OUT_DIR"), "/test_gl_bindings.rs"));
|
||||
}
|
||||
|
||||
struct Gl {
|
||||
gl: gl::Gl,
|
||||
program: gl::types::GLuint,
|
||||
attr_position: gl::types::GLint,
|
||||
attr_texture: gl::types::GLint,
|
||||
vao: Option<gl::types::GLuint>,
|
||||
vertex_buffer: gl::types::GLuint,
|
||||
vbo_indices: gl::types::GLuint,
|
||||
}
|
||||
|
||||
impl Gl {
|
||||
fn draw_frame(&self, texture_id: gl::types::GLuint) {
|
||||
unsafe {
|
||||
// render
|
||||
self.gl.ClearColor(0.0, 0.0, 0.0, 1.0);
|
||||
self.gl.Clear(gl::COLOR_BUFFER_BIT);
|
||||
|
||||
self.gl.BlendColor(0.0, 0.0, 0.0, 1.0);
|
||||
if self.gl.BlendFuncSeparate.is_loaded() {
|
||||
self.gl.BlendFuncSeparate(
|
||||
gl::SRC_ALPHA,
|
||||
gl::CONSTANT_COLOR,
|
||||
gl::ONE,
|
||||
gl::ONE_MINUS_SRC_ALPHA,
|
||||
);
|
||||
} else {
|
||||
self.gl.BlendFunc(gl::SRC_ALPHA, gl::CONSTANT_COLOR);
|
||||
}
|
||||
self.gl.BlendEquation(gl::FUNC_ADD);
|
||||
self.gl.Enable(gl::BLEND);
|
||||
|
||||
self.gl.UseProgram(self.program);
|
||||
|
||||
if self.gl.BindVertexArray.is_loaded() {
|
||||
self.gl.BindVertexArray(self.vao.unwrap());
|
||||
}
|
||||
|
||||
{
|
||||
self.gl
|
||||
.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.vbo_indices);
|
||||
self.gl.BindBuffer(gl::ARRAY_BUFFER, self.vertex_buffer);
|
||||
|
||||
// Load the vertex position
|
||||
self.gl.VertexAttribPointer(
|
||||
self.attr_position as gl::types::GLuint,
|
||||
3,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
ptr::null(),
|
||||
);
|
||||
|
||||
// Load the texture coordinate
|
||||
self.gl.VertexAttribPointer(
|
||||
self.attr_texture as gl::types::GLuint,
|
||||
2,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
(3 * mem::size_of::<f32>()) as *const () as *const _,
|
||||
);
|
||||
|
||||
self.gl.EnableVertexAttribArray(self.attr_position as _);
|
||||
self.gl.EnableVertexAttribArray(self.attr_texture as _);
|
||||
}
|
||||
|
||||
self.gl.ActiveTexture(gl::TEXTURE0);
|
||||
self.gl.BindTexture(gl::TEXTURE_2D, texture_id);
|
||||
|
||||
let location = self
|
||||
.gl
|
||||
.GetUniformLocation(self.program, b"tex\0".as_ptr() as *const _);
|
||||
self.gl.Uniform1i(location, 0);
|
||||
|
||||
let location = self
|
||||
.gl
|
||||
.GetUniformLocation(self.program, b"u_transformation\0".as_ptr() as *const _);
|
||||
self.gl
|
||||
.UniformMatrix4fv(location, 1, gl::FALSE, IDENTITY.as_ptr() as *const _);
|
||||
|
||||
self.gl
|
||||
.DrawElements(gl::TRIANGLES, 6, gl::UNSIGNED_SHORT, ptr::null());
|
||||
|
||||
self.gl.BindTexture(gl::TEXTURE_2D, 0);
|
||||
self.gl.UseProgram(0);
|
||||
|
||||
if self.gl.BindVertexArray.is_loaded() {
|
||||
self.gl.BindVertexArray(0);
|
||||
}
|
||||
|
||||
{
|
||||
self.gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
|
||||
self.gl.BindBuffer(gl::ARRAY_BUFFER, 0);
|
||||
|
||||
self.gl.DisableVertexAttribArray(self.attr_position as _);
|
||||
self.gl.DisableVertexAttribArray(self.attr_texture as _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resize(&self, size: glutin::dpi::PhysicalSize) {
|
||||
unsafe {
|
||||
self.gl
|
||||
.Viewport(0, 0, size.width as i32, size.height as i32);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load(gl_context: &glutin::WindowedContext<glutin::PossiblyCurrent>) -> Gl {
|
||||
let gl = gl::Gl::load_with(|ptr| gl_context.get_proc_address(ptr) as *const _);
|
||||
|
||||
let version = unsafe {
|
||||
let data = CStr::from_ptr(gl.GetString(gl::VERSION) as *const _)
|
||||
.to_bytes()
|
||||
.to_vec();
|
||||
String::from_utf8(data).unwrap()
|
||||
};
|
||||
|
||||
println!("OpenGL version {}", version);
|
||||
|
||||
let (program, attr_position, attr_texture, vao, vertex_buffer, vbo_indices) = unsafe {
|
||||
let vs = gl.CreateShader(gl::VERTEX_SHADER);
|
||||
gl.ShaderSource(vs, 1, [VS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
|
||||
gl.CompileShader(vs);
|
||||
|
||||
let fs = gl.CreateShader(gl::FRAGMENT_SHADER);
|
||||
gl.ShaderSource(fs, 1, [FS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
|
||||
gl.CompileShader(fs);
|
||||
|
||||
let program = gl.CreateProgram();
|
||||
gl.AttachShader(program, vs);
|
||||
gl.AttachShader(program, fs);
|
||||
gl.LinkProgram(program);
|
||||
|
||||
{
|
||||
let mut success: gl::types::GLint = 1;
|
||||
gl.GetProgramiv(fs, gl::LINK_STATUS, &mut success);
|
||||
assert!(success != 0);
|
||||
}
|
||||
|
||||
let attr_position = gl.GetAttribLocation(program, b"a_position\0".as_ptr() as *const _);
|
||||
let attr_texture = gl.GetAttribLocation(program, b"a_texcoord\0".as_ptr() as *const _);
|
||||
|
||||
let vao = if gl.BindVertexArray.is_loaded() {
|
||||
let mut vao = mem::MaybeUninit::uninit();
|
||||
gl.GenVertexArrays(1, vao.as_mut_ptr());
|
||||
let vao = vao.assume_init();
|
||||
gl.BindVertexArray(vao);
|
||||
Some(vao)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut vertex_buffer = mem::MaybeUninit::uninit();
|
||||
gl.GenBuffers(1, vertex_buffer.as_mut_ptr());
|
||||
let vertex_buffer = vertex_buffer.assume_init();
|
||||
gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);
|
||||
gl.BufferData(
|
||||
gl::ARRAY_BUFFER,
|
||||
(VERTICES.len() * mem::size_of::<f32>()) as gl::types::GLsizeiptr,
|
||||
VERTICES.as_ptr() as *const _,
|
||||
gl::STATIC_DRAW,
|
||||
);
|
||||
|
||||
let mut vbo_indices = mem::MaybeUninit::uninit();
|
||||
gl.GenBuffers(1, vbo_indices.as_mut_ptr());
|
||||
let vbo_indices = vbo_indices.assume_init();
|
||||
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
|
||||
gl.BufferData(
|
||||
gl::ELEMENT_ARRAY_BUFFER,
|
||||
(INDICES.len() * mem::size_of::<u16>()) as gl::types::GLsizeiptr,
|
||||
INDICES.as_ptr() as *const _,
|
||||
gl::STATIC_DRAW,
|
||||
);
|
||||
|
||||
if gl.BindVertexArray.is_loaded() {
|
||||
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
|
||||
gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);
|
||||
|
||||
// Load the vertex position
|
||||
gl.VertexAttribPointer(
|
||||
attr_position as gl::types::GLuint,
|
||||
3,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
ptr::null(),
|
||||
);
|
||||
|
||||
// Load the texture coordinate
|
||||
gl.VertexAttribPointer(
|
||||
attr_texture as gl::types::GLuint,
|
||||
2,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
(3 * mem::size_of::<f32>()) as *const () as *const _,
|
||||
);
|
||||
|
||||
gl.EnableVertexAttribArray(attr_position as _);
|
||||
gl.EnableVertexAttribArray(attr_texture as _);
|
||||
|
||||
gl.BindVertexArray(0);
|
||||
}
|
||||
|
||||
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
|
||||
gl.BindBuffer(gl::ARRAY_BUFFER, 0);
|
||||
|
||||
(
|
||||
program,
|
||||
attr_position,
|
||||
attr_texture,
|
||||
vao,
|
||||
vertex_buffer,
|
||||
vbo_indices,
|
||||
)
|
||||
};
|
||||
|
||||
Gl {
|
||||
gl,
|
||||
program,
|
||||
attr_position,
|
||||
attr_texture,
|
||||
vao,
|
||||
vertex_buffer,
|
||||
vbo_indices,
|
||||
}
|
||||
}
|
||||
|
||||
struct App {
|
||||
pipeline: gst::Pipeline,
|
||||
appsink: gst_app::AppSink,
|
||||
glupload: gst::Element,
|
||||
bus: gst::Bus,
|
||||
events_loop: glutin::EventsLoop,
|
||||
windowed_context: glutin::WindowedContext<glutin::PossiblyCurrent>,
|
||||
shared_context: gst_gl::GLContext,
|
||||
}
|
||||
|
||||
impl App {
|
||||
fn new() -> Result<App, Error> {
|
||||
gst::init()?;
|
||||
|
||||
let (pipeline, appsink, glupload) = App::create_pipeline()?;
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
let events_loop = glutin::EventsLoop::new();
|
||||
let window = glutin::WindowBuilder::new().with_title("GL rendering");
|
||||
let windowed_context = glutin::ContextBuilder::new()
|
||||
.with_vsync(true)
|
||||
.build_windowed(window, &events_loop)?;
|
||||
|
||||
let windowed_context = unsafe { windowed_context.make_current().map_err(|(_, err)| err)? };
|
||||
|
||||
#[cfg(any(feature = "gl-x11", feature = "gl-wayland"))]
|
||||
let inner_window = windowed_context.window();
|
||||
|
||||
let shared_context: gst_gl::GLContext;
|
||||
if cfg!(target_os = "linux") {
|
||||
use glutin::os::unix::RawHandle;
|
||||
#[cfg(any(feature = "gl-x11", feature = "gl-wayland"))]
|
||||
use glutin::os::unix::WindowExt;
|
||||
use glutin::os::ContextTraitExt;
|
||||
|
||||
let api = App::map_gl_api(windowed_context.get_api());
|
||||
|
||||
let (gl_context, gl_display, platform) = match unsafe { windowed_context.raw_handle() }
|
||||
{
|
||||
#[cfg(any(feature = "gl-egl", feature = "gl-wayland"))]
|
||||
RawHandle::Egl(egl_context) => {
|
||||
#[cfg(feature = "gl-egl")]
|
||||
let gl_display = if let Some(display) =
|
||||
unsafe { windowed_context.get_egl_display() }
|
||||
{
|
||||
unsafe { gst_gl::GLDisplayEGL::with_egl_display(display as usize) }.unwrap()
|
||||
} else {
|
||||
panic!("EGL context without EGL display");
|
||||
};
|
||||
|
||||
#[cfg(not(feature = "gl-egl"))]
|
||||
let gl_display = if let Some(display) = inner_window.get_wayland_display() {
|
||||
unsafe { gst_gl::GLDisplayWayland::with_display(display as usize) }.unwrap()
|
||||
} else {
|
||||
panic!("Wayland window without Wayland display");
|
||||
};
|
||||
|
||||
(
|
||||
egl_context as usize,
|
||||
gl_display.upcast::<gst_gl::GLDisplay>(),
|
||||
gst_gl::GLPlatform::EGL,
|
||||
)
|
||||
}
|
||||
#[cfg(feature = "gl-x11")]
|
||||
RawHandle::Glx(glx_context) => {
|
||||
let gl_display = if let Some(display) = inner_window.get_xlib_display() {
|
||||
unsafe { gst_gl::GLDisplayX11::with_display(display as usize) }.unwrap()
|
||||
} else {
|
||||
panic!("X11 window without X Display");
|
||||
};
|
||||
|
||||
(
|
||||
glx_context as usize,
|
||||
gl_display.upcast::<gst_gl::GLDisplay>(),
|
||||
gst_gl::GLPlatform::GLX,
|
||||
)
|
||||
}
|
||||
#[allow(unreachable_patterns)]
|
||||
handler => panic!("Unsupported platform: {:?}.", handler),
|
||||
};
|
||||
|
||||
shared_context =
|
||||
unsafe { gst_gl::GLContext::new_wrapped(&gl_display, gl_context, platform, api) }
|
||||
.unwrap();
|
||||
|
||||
shared_context
|
||||
.activate(true)
|
||||
.expect("Couldn't activate wrapped GL context");
|
||||
|
||||
shared_context.fill_info()?;
|
||||
|
||||
let gl_context = shared_context.clone();
|
||||
let events_proxy = events_loop.create_proxy();
|
||||
|
||||
#[allow(clippy::single_match)]
|
||||
bus.set_sync_handler(move |_, msg| {
|
||||
match msg.view() {
|
||||
gst::MessageView::NeedContext(ctxt) => {
|
||||
let context_type = ctxt.get_context_type();
|
||||
if context_type == *gst_gl::GL_DISPLAY_CONTEXT_TYPE {
|
||||
if let Some(el) =
|
||||
msg.get_src().map(|s| s.downcast::<gst::Element>().unwrap())
|
||||
{
|
||||
let context = gst::Context::new(context_type, true);
|
||||
context.set_gl_display(&gl_display);
|
||||
el.set_context(&context);
|
||||
}
|
||||
}
|
||||
if context_type == "gst.gl.app_context" {
|
||||
if let Some(el) =
|
||||
msg.get_src().map(|s| s.downcast::<gst::Element>().unwrap())
|
||||
{
|
||||
let mut context = gst::Context::new(context_type, true);
|
||||
{
|
||||
let context = context.get_mut().unwrap();
|
||||
let s = context.get_mut_structure();
|
||||
s.set_value("context", gl_context.to_send_value());
|
||||
}
|
||||
el.set_context(&context);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
let _ = events_proxy.wakeup();
|
||||
|
||||
gst::BusSyncReply::Pass
|
||||
});
|
||||
} else {
|
||||
panic!("This example only has Linux support");
|
||||
}
|
||||
|
||||
Ok(App {
|
||||
pipeline,
|
||||
appsink,
|
||||
glupload,
|
||||
bus,
|
||||
events_loop,
|
||||
windowed_context,
|
||||
shared_context,
|
||||
})
|
||||
}
|
||||
|
||||
fn setup(
|
||||
&self,
|
||||
events_loop: &glutin::EventsLoop,
|
||||
) -> Result<mpsc::Receiver<gst::Sample>, Error> {
|
||||
let events_proxy = events_loop.create_proxy();
|
||||
let (sender, receiver) = mpsc::channel();
|
||||
self.appsink.set_callbacks(
|
||||
gst_app::AppSinkCallbacks::builder()
|
||||
.new_sample(move |appsink| {
|
||||
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
|
||||
|
||||
{
|
||||
let _buffer = sample.get_buffer().ok_or_else(|| {
|
||||
gst_element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get buffer from appsink")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
let _info = sample
|
||||
.get_caps()
|
||||
.and_then(|caps| gst_video::VideoInfo::from_caps(caps).ok())
|
||||
.ok_or_else(|| {
|
||||
gst_element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get video info from sample")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
}
|
||||
|
||||
sender
|
||||
.send(sample)
|
||||
.map(|_| gst::FlowSuccess::Ok)
|
||||
.map_err(|_| gst::FlowError::Error)?;
|
||||
|
||||
let _ = events_proxy.wakeup();
|
||||
|
||||
Ok(gst::FlowSuccess::Ok)
|
||||
})
|
||||
.build(),
|
||||
);
|
||||
|
||||
self.pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
Ok(receiver)
|
||||
}
|
||||
|
||||
fn map_gl_api(api: glutin::Api) -> gst_gl::GLAPI {
|
||||
match api {
|
||||
glutin::Api::OpenGl => gst_gl::GLAPI::OPENGL3,
|
||||
glutin::Api::OpenGlEs => gst_gl::GLAPI::GLES2,
|
||||
_ => gst_gl::GLAPI::empty(),
|
||||
}
|
||||
}
|
||||
|
||||
fn create_pipeline() -> Result<(gst::Pipeline, gst_app::AppSink, gst::Element), Error> {
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("videotestsrc", None)
|
||||
.map_err(|_| MissingElement("videotestsrc"))?;
|
||||
let sink = gst::ElementFactory::make("glsinkbin", None)
|
||||
.map_err(|_| MissingElement("glsinkbin"))?;
|
||||
|
||||
pipeline.add_many(&[&src, &sink])?;
|
||||
src.link(&sink)?;
|
||||
|
||||
let appsink = gst::ElementFactory::make("appsink", None)
|
||||
.map_err(|_| MissingElement("appsink"))?
|
||||
.dynamic_cast::<gst_app::AppSink>()
|
||||
.expect("Sink element is expected to be an appsink!");
|
||||
|
||||
sink.set_property("sink", &appsink)?;
|
||||
|
||||
appsink.set_property("enable-last-sample", &false.to_value())?;
|
||||
appsink.set_property("emit-signals", &false.to_value())?;
|
||||
appsink.set_property("max-buffers", &1u32.to_value())?;
|
||||
|
||||
let caps = gst::Caps::builder("video/x-raw")
|
||||
.features(&[&gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY])
|
||||
.field("format", &gst_video::VideoFormat::Rgba.to_str())
|
||||
.field("texture-target", &"2D")
|
||||
.build();
|
||||
appsink.set_caps(Some(&caps));
|
||||
|
||||
// get the glupload element to extract later the used context in it
|
||||
let mut iter = sink.dynamic_cast::<gst::Bin>().unwrap().iterate_elements();
|
||||
let glupload = loop {
|
||||
match iter.next() {
|
||||
Ok(Some(element)) => {
|
||||
if "glupload" == element.get_factory().unwrap().get_name() {
|
||||
break Some(element);
|
||||
}
|
||||
}
|
||||
Err(gst::IteratorError::Resync) => iter.resync(),
|
||||
_ => break None,
|
||||
}
|
||||
};
|
||||
|
||||
Ok((pipeline, appsink, glupload.unwrap()))
|
||||
}
|
||||
|
||||
fn handle_messages(bus: &gst::Bus) -> Result<(), Error> {
|
||||
use gst::MessageView;
|
||||
|
||||
for msg in bus.iter() {
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn into_context(self: App) -> glutin::WindowedContext<glutin::PossiblyCurrent> {
|
||||
self.windowed_context
|
||||
}
|
||||
}
|
||||
|
||||
fn main_loop(mut app: App) -> Result<glutin::WindowedContext<glutin::PossiblyCurrent>, Error> {
|
||||
println!(
|
||||
"Pixel format of the window's GL context {:?}",
|
||||
app.windowed_context.get_pixel_format()
|
||||
);
|
||||
|
||||
let gl = load(&app.windowed_context);
|
||||
|
||||
let receiver = app.setup(&app.events_loop)?;
|
||||
|
||||
let mut curr_frame: Option<gst_video::VideoFrame<gst_video::video_frame::Readable>> = None;
|
||||
let mut running = true;
|
||||
let mut gst_gl_context: Option<gst_gl::GLContext> = None;
|
||||
let events_loop = &mut app.events_loop;
|
||||
let windowed_context = &mut app.windowed_context;
|
||||
let bus = &app.bus;
|
||||
|
||||
while running {
|
||||
#[allow(clippy::single_match)]
|
||||
events_loop.poll_events(|event| match event {
|
||||
glutin::Event::WindowEvent { event, .. } => match event {
|
||||
glutin::WindowEvent::CloseRequested => running = false,
|
||||
glutin::WindowEvent::Resized(logical_size) => {
|
||||
let dpi_factor = windowed_context.window().get_hidpi_factor();
|
||||
windowed_context.resize(logical_size.to_physical(dpi_factor));
|
||||
gl.resize(logical_size.to_physical(dpi_factor));
|
||||
}
|
||||
_ => (),
|
||||
},
|
||||
_ => (),
|
||||
});
|
||||
|
||||
// Handle all pending messages. Whenever there is a message we will
|
||||
// wake up the events loop above
|
||||
App::handle_messages(&bus)?;
|
||||
|
||||
// get the last frame in channel
|
||||
if let Some(sample) = receiver.try_iter().last() {
|
||||
let buffer = sample.get_buffer_owned().unwrap();
|
||||
let info = sample
|
||||
.get_caps()
|
||||
.and_then(|caps| gst_video::VideoInfo::from_caps(caps).ok())
|
||||
.unwrap();
|
||||
|
||||
{
|
||||
if gst_gl_context.is_none() {
|
||||
gst_gl_context = app
|
||||
.glupload
|
||||
.get_property("context")
|
||||
.unwrap()
|
||||
.get::<gst_gl::GLContext>()
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let sync_meta = buffer.get_meta::<gst_gl::GLSyncMeta>().unwrap();
|
||||
sync_meta.set_sync_point(gst_gl_context.as_ref().unwrap());
|
||||
}
|
||||
|
||||
if let Ok(frame) = gst_video::VideoFrame::from_buffer_readable_gl(buffer, &info) {
|
||||
curr_frame = Some(frame);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(frame) = curr_frame.as_ref() {
|
||||
let sync_meta = frame.buffer().get_meta::<gst_gl::GLSyncMeta>().unwrap();
|
||||
sync_meta.wait(&app.shared_context);
|
||||
if let Some(texture) = frame.get_texture_id(0) {
|
||||
gl.draw_frame(texture as gl::types::GLuint);
|
||||
}
|
||||
}
|
||||
windowed_context.swap_buffers()?;
|
||||
}
|
||||
|
||||
app.pipeline.send_event(gst::event::Eos::new());
|
||||
app.pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
Ok(app.into_context())
|
||||
}
|
||||
|
||||
fn cleanup(
|
||||
_windowed_context: glutin::WindowedContext<glutin::PossiblyCurrent>,
|
||||
) -> Result<(), Error> {
|
||||
// To ensure that the context stays alive longer than the pipeline or any reference
|
||||
// inside GStreamer to the GL context, its display or anything else. See
|
||||
// https://gitlab.freedesktop.org/gstreamer/gstreamer-rs/issues/196
|
||||
//
|
||||
// We might do any window/GL specific cleanup here as needed.
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn example_main() {
|
||||
match App::new().and_then(main_loop).and_then(cleanup) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
examples_common::run(example_main);
|
||||
}
|
18
examples/src/bin/glwindow.rs
Normal file
18
examples/src/bin/glwindow.rs
Normal file
|
@ -0,0 +1,18 @@
|
|||
#![allow(clippy::non_send_fields_in_send_ty)]
|
||||
|
||||
use anyhow::Result;
|
||||
|
||||
#[path = "../glupload.rs"]
|
||||
mod glupload;
|
||||
use glupload::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
pub mod examples_common;
|
||||
|
||||
fn example_main() -> Result<()> {
|
||||
App::new(None).and_then(main_loop)
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
examples_common::run(example_main)
|
||||
}
|
|
@ -1,163 +0,0 @@
|
|||
// This example demonstrates how to use gstreamer in conjunction with the gtk widget toolkit.
|
||||
// This example shows the video produced by a videotestsrc within a small gtk gui.
|
||||
// For this, the gtkglsink is used, which creates a gtk widget one can embed the gtk gui.
|
||||
// For this, there multiple types of widgets. gtkglsink uses OpenGL to render frames, and
|
||||
// gtksink uses the CPU to render the frames (which is way slower).
|
||||
// So the example application first tries to use OpenGL, and when that fails, fall back.
|
||||
// The pipeline looks like the following:
|
||||
|
||||
// gtk-gui: {gtkglsink}-widget
|
||||
// (|)
|
||||
// {videotestsrc} - {glsinkbin}
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use gio::prelude::*;
|
||||
|
||||
use gtk::prelude::*;
|
||||
|
||||
use std::cell::RefCell;
|
||||
use std::env;
|
||||
|
||||
fn create_ui(app: >k::Application) {
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
|
||||
// Create the gtk sink and retrieve the widget from it. The sink element will be used
|
||||
// in the pipeline, and the widget will be embedded in our gui.
|
||||
// Gstreamer then displays frames in the gtk widget.
|
||||
// First, we try to use the OpenGL version - and if that fails, we fall back to non-OpenGL.
|
||||
let (sink, widget) = if let Ok(gtkglsink) = gst::ElementFactory::make("gtkglsink", None) {
|
||||
// Using the OpenGL widget succeeded, so we are in for a nice playback experience with
|
||||
// low cpu usage. :)
|
||||
// The gtkglsink essentially allocates an OpenGL texture on the GPU, that it will display.
|
||||
// Now we create the glsinkbin element, which is responsible for conversions and for uploading
|
||||
// video frames to our texture (if they are not already in the GPU). Now we tell the OpenGL-sink
|
||||
// about our gtkglsink element, form where it will retrieve the OpenGL texture to fill.
|
||||
let glsinkbin = gst::ElementFactory::make("glsinkbin", None).unwrap();
|
||||
glsinkbin
|
||||
.set_property("sink", >kglsink.to_value())
|
||||
.unwrap();
|
||||
// The gtkglsink creates the gtk widget for us. This is accessible through a property.
|
||||
// So we get it and use it later to add it to our gui.
|
||||
let widget = gtkglsink.get_property("widget").unwrap();
|
||||
(glsinkbin, widget.get::<gtk::Widget>().unwrap().unwrap())
|
||||
} else {
|
||||
// Unfortunately, using the OpenGL widget didn't work out, so we will have to render
|
||||
// our frames manually, using the CPU. An example why this may fail is, when
|
||||
// the PC doesn't have proper graphics drivers installed.
|
||||
let sink = gst::ElementFactory::make("gtksink", None).unwrap();
|
||||
// The gtksink creates the gtk widget for us. This is accessible through a property.
|
||||
// So we get it and use it later to add it to our gui.
|
||||
let widget = sink.get_property("widget").unwrap();
|
||||
(sink, widget.get::<gtk::Widget>().unwrap().unwrap())
|
||||
};
|
||||
|
||||
pipeline.add_many(&[&src, &sink]).unwrap();
|
||||
src.link(&sink).unwrap();
|
||||
|
||||
// Create a simple gtk gui window to place our widget into.
|
||||
let window = gtk::Window::new(gtk::WindowType::Toplevel);
|
||||
window.set_default_size(320, 240);
|
||||
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
|
||||
// Add our widget to the gui
|
||||
vbox.pack_start(&widget, true, true, 0);
|
||||
let label = gtk::Label::new(Some("Position: 00:00:00"));
|
||||
vbox.pack_start(&label, true, true, 5);
|
||||
window.add(&vbox);
|
||||
window.show_all();
|
||||
|
||||
app.add_window(&window);
|
||||
|
||||
// Need to move a new reference into the closure.
|
||||
// !!ATTENTION!!:
|
||||
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||
// simplifies the code within the callback. What this actually does, however, is creating
|
||||
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||
// reference cycle.
|
||||
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||
let pipeline_weak = pipeline.downgrade();
|
||||
// Add a timeout to the main loop that will periodically (every 500ms) be
|
||||
// executed. This will query the current position within the stream from
|
||||
// the underlying pipeline, and display it in our gui.
|
||||
// Since this closure is called by the mainloop thread, we are allowed
|
||||
// to modify the gui widgets here.
|
||||
let timeout_id = gtk::timeout_add(500, move || {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return glib::Continue(true),
|
||||
};
|
||||
|
||||
// Query the current playing position from the underlying pipeline.
|
||||
let position = pipeline
|
||||
.query_position::<gst::ClockTime>()
|
||||
.unwrap_or_else(|| 0.into());
|
||||
// Display the playing position in the gui.
|
||||
label.set_text(&format!("Position: {:.0}", position));
|
||||
// Tell the callback to continue calling this closure.
|
||||
glib::Continue(true)
|
||||
});
|
||||
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
let app_weak = app.downgrade();
|
||||
bus.add_watch_local(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
|
||||
let app = match app_weak.upgrade() {
|
||||
Some(app) => app,
|
||||
None => return glib::Continue(false),
|
||||
};
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => gtk::main_quit(),
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
);
|
||||
app.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
glib::Continue(true)
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
|
||||
// Pipeline reference is owned by the closure below, so will be
|
||||
// destroyed once the app is destroyed
|
||||
let timeout_id = RefCell::new(Some(timeout_id));
|
||||
app.connect_shutdown(move |_| {
|
||||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
bus.remove_watch().unwrap();
|
||||
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
|
||||
glib::source_remove(timeout_id);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// Initialize gstreamer and the gtk widget toolkit libraries.
|
||||
gst::init().unwrap();
|
||||
gtk::init().unwrap();
|
||||
|
||||
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE).unwrap();
|
||||
|
||||
app.connect_activate(create_ui);
|
||||
let args = env::args().collect::<Vec<_>>();
|
||||
app.run(&args);
|
||||
}
|
|
@ -1,280 +0,0 @@
|
|||
// This example demonstrates another type of combination of gtk and gstreamer,
|
||||
// in comparision to the gtksink example.
|
||||
// This example uses regions that are managed by the window system, and uses
|
||||
// the window system's api to insert a videostream into these regions.
|
||||
// So essentially, the window system of the system overlays our gui with
|
||||
// the video frames - within the region that we tell it to use.
|
||||
// Disadvantage of this method is, that it's highly platform specific, since
|
||||
// the big platforms all have their own window system. Thus, this example
|
||||
// has special code to handle differences between platforms.
|
||||
// Windows could theoretically be supported by this example, but is not yet implemented.
|
||||
// One of the very few (if not the single one) platform, that can not provide the API
|
||||
// needed for this are Linux desktops using Wayland.
|
||||
// TODO: Add Windows support
|
||||
// In this case, a testvideo is displayed within our gui, using the
|
||||
// following pipeline:
|
||||
|
||||
// {videotestsrc} - {xvimagesink(on linux)}
|
||||
// {videotestsrc} - {glimagesink(on mac)}
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_video as gst_video;
|
||||
use gst_video::prelude::*;
|
||||
|
||||
use glib::object::ObjectType;
|
||||
|
||||
use gio::prelude::*;
|
||||
|
||||
use gtk::prelude::*;
|
||||
|
||||
use gdk::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use std::os::raw::c_void;
|
||||
|
||||
use std::cell::RefCell;
|
||||
|
||||
use std::process;
|
||||
|
||||
#[cfg(all(target_os = "linux", feature = "gtkvideooverlay-x11"))]
|
||||
fn create_video_sink() -> gst::Element {
|
||||
// When we are on linux with the Xorg display server, we use the
|
||||
// X11 protocol's XV extension, which allows to overlay regions
|
||||
// with video streams. For this, we use the xvimagesink element.
|
||||
gst::ElementFactory::make("xvimagesink", None).unwrap()
|
||||
}
|
||||
#[cfg(all(target_os = "linux", feature = "gtkvideooverlay-x11"))]
|
||||
fn set_window_handle(video_overlay: &gst_video::VideoOverlay, gdk_window: &gdk::Window) {
|
||||
let display_type_name = gdk_window.get_display().get_type().name();
|
||||
|
||||
// Check if we're using X11 or ...
|
||||
if display_type_name == "GdkX11Display" {
|
||||
extern "C" {
|
||||
pub fn gdk_x11_window_get_xid(window: *mut glib::object::GObject) -> *mut c_void;
|
||||
}
|
||||
|
||||
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
|
||||
// If a wrong value were to be passed here (and you can pass any integer), then the window
|
||||
// system will most likely cause the application to crash.
|
||||
#[allow(clippy::cast_ptr_alignment)]
|
||||
unsafe {
|
||||
// Here we ask gdk what native window handle we got assigned for
|
||||
// our video region from the window system, and then we will
|
||||
// pass this unique identifier to the overlay provided by our
|
||||
// sink - so the sink can then arrange the overlay.
|
||||
let xid = gdk_x11_window_get_xid(gdk_window.as_ptr() as *mut _);
|
||||
video_overlay.set_window_handle(xid as usize);
|
||||
}
|
||||
} else {
|
||||
println!("Add support for display type '{}'", display_type_name);
|
||||
process::exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(all(target_os = "macos", feature = "gtkvideooverlay-quartz"))]
|
||||
fn create_video_sink() -> gst::Element {
|
||||
// On Mac, this is done by overlaying a window region with an
|
||||
// OpenGL-texture, using the glimagesink element.
|
||||
gst::ElementFactory::make("glimagesink", None).unwrap()
|
||||
}
|
||||
|
||||
#[cfg(all(target_os = "macos", feature = "gtkvideooverlay-quartz"))]
|
||||
fn set_window_handle(video_overlay: &gst_video::VideoOverlay, gdk_window: &gdk::Window) {
|
||||
let display_type_name = gdk_window.get_display().get_type().name();
|
||||
|
||||
if display_type_name == "GdkQuartzDisplay" {
|
||||
extern "C" {
|
||||
pub fn gdk_quartz_window_get_nsview(window: *mut glib::object::GObject) -> *mut c_void;
|
||||
}
|
||||
|
||||
// This is unsafe because the "window handle" we pass here is basically like a raw pointer.
|
||||
// If a wrong value were to be passed here (and you can pass any integer), then the window
|
||||
// system will most likely cause the application to crash.
|
||||
#[allow(clippy::cast_ptr_alignment)]
|
||||
unsafe {
|
||||
// Here we ask gdk what native window handle we got assigned for
|
||||
// our video region from the windowing system, and then we will
|
||||
// pass this unique identifier to the overlay provided by our
|
||||
// sink - so the sink can then arrange the overlay.
|
||||
let window = gdk_quartz_window_get_nsview(gdk_window.as_ptr() as *mut _);
|
||||
video_overlay.set_window_handle(window as usize);
|
||||
}
|
||||
} else {
|
||||
println!("Unsupported display type '{}", display_type_name);
|
||||
process::exit(-1);
|
||||
}
|
||||
}
|
||||
|
||||
fn create_ui(app: >k::Application) {
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
|
||||
|
||||
// Since using the window system to overlay our gui window is making
|
||||
// direct contact with the windowing system, this is highly platform-
|
||||
// specific. This example supports Linux and Mac (using X11 and Quartz).
|
||||
let sink = create_video_sink();
|
||||
|
||||
pipeline.add_many(&[&src, &sink]).unwrap();
|
||||
src.link(&sink).unwrap();
|
||||
|
||||
// First, we create our gtk window - which will contain a region where
|
||||
// our overlayed video will be displayed in.
|
||||
let window = gtk::Window::new(gtk::WindowType::Toplevel);
|
||||
window.set_default_size(320, 240);
|
||||
|
||||
let vbox = gtk::Box::new(gtk::Orientation::Vertical, 0);
|
||||
|
||||
// This creates the widget we will display our overlay in.
|
||||
// Later, we will try to tell our window system about this region, so
|
||||
// it can overlay it with our video stream.
|
||||
let video_window = gtk::DrawingArea::new();
|
||||
video_window.set_size_request(320, 240);
|
||||
|
||||
// Use the platform-specific sink to create our overlay.
|
||||
// Since we only use the video_overlay in the closure below, we need a weak reference.
|
||||
// !!ATTENTION!!:
|
||||
// It might seem appealing to use .clone() here, because that greatly
|
||||
// simplifies the code within the callback. What this actually does, however, is creating
|
||||
// a memory leak.
|
||||
let video_overlay = sink
|
||||
.dynamic_cast::<gst_video::VideoOverlay>()
|
||||
.unwrap()
|
||||
.downgrade();
|
||||
// Connect to this widget's realize signal, which will be emitted
|
||||
// after its display has been initialized. This is neccessary, because
|
||||
// the window system doesn't know about our region until it was initialized.
|
||||
video_window.connect_realize(move |video_window| {
|
||||
// Here we temporarily retrieve a strong reference on the video-overlay from the
|
||||
// weak reference that we moved into the closure.
|
||||
let video_overlay = match video_overlay.upgrade() {
|
||||
Some(video_overlay) => video_overlay,
|
||||
None => return,
|
||||
};
|
||||
|
||||
// Gtk uses gdk under the hood, to handle its drawing. Drawing regions are
|
||||
// called gdk windows. We request this underlying drawing region from the
|
||||
// widget we will overlay with our video.
|
||||
let gdk_window = video_window.get_window().unwrap();
|
||||
|
||||
// This is where we tell our window system about the drawing-region we
|
||||
// want it to overlay. Most often, the window system would only know
|
||||
// about our most outer region (or: our window).
|
||||
if !gdk_window.ensure_native() {
|
||||
println!("Can't create native window for widget");
|
||||
process::exit(-1);
|
||||
}
|
||||
|
||||
set_window_handle(&video_overlay, &gdk_window);
|
||||
});
|
||||
|
||||
vbox.pack_start(&video_window, true, true, 0);
|
||||
|
||||
let label = gtk::Label::new(Some("Position: 00:00:00"));
|
||||
vbox.pack_start(&label, true, true, 5);
|
||||
window.add(&vbox);
|
||||
|
||||
window.show_all();
|
||||
|
||||
app.add_window(&window);
|
||||
|
||||
// Need to move a new reference into the closure.
|
||||
// !!ATTENTION!!:
|
||||
// It might seem appealing to use pipeline.clone() here, because that greatly
|
||||
// simplifies the code within the callback. What this actually does, however, is creating
|
||||
// a memory leak. The clone of a pipeline is a new strong reference on the pipeline.
|
||||
// Storing this strong reference of the pipeline within the callback (we are moving it in!),
|
||||
// which is in turn stored in another strong reference on the pipeline is creating a
|
||||
// reference cycle.
|
||||
// DO NOT USE pipeline.clone() TO USE THE PIPELINE WITHIN A CALLBACK
|
||||
let pipeline_weak = pipeline.downgrade();
|
||||
// Add a timeout to the main loop that will periodically (every 500ms) be
|
||||
// executed. This will query the current position within the stream from
|
||||
// the underlying pipeline, and display it in our gui.
|
||||
// Since this closure is called by the mainloop thread, we are allowed
|
||||
// to modify the gui widgets here.
|
||||
let timeout_id = gtk::timeout_add(500, move || {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return glib::Continue(false),
|
||||
};
|
||||
|
||||
// Query the current playing position from the underlying pipeline.
|
||||
let position = pipeline
|
||||
.query_position::<gst::ClockTime>()
|
||||
.unwrap_or_else(|| 0.into());
|
||||
// Display the playing position in the gui.
|
||||
label.set_text(&format!("Position: {:.0}", position));
|
||||
// Tell the timeout to continue calling this callback.
|
||||
glib::Continue(true)
|
||||
});
|
||||
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
let app_weak = app.downgrade();
|
||||
bus.add_watch_local(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
|
||||
let app = match app_weak.upgrade() {
|
||||
Some(app) => app,
|
||||
None => return glib::Continue(false),
|
||||
};
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => gtk::main_quit(),
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
);
|
||||
app.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
glib::Continue(true)
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
|
||||
// Pipeline reference is owned by the closure below, so will be
|
||||
// destroyed once the app is destroyed
|
||||
let timeout_id = RefCell::new(Some(timeout_id));
|
||||
app.connect_shutdown(move |_| {
|
||||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
bus.remove_watch().unwrap();
|
||||
if let Some(timeout_id) = timeout_id.borrow_mut().take() {
|
||||
glib::source_remove(timeout_id);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn main() {
|
||||
#[cfg(not(unix))]
|
||||
{
|
||||
println!("Add support for target platform");
|
||||
process::exit(-1);
|
||||
}
|
||||
|
||||
// Initialize gstreamer and the gtk widget toolkit libraries.
|
||||
gst::init().unwrap();
|
||||
gtk::init().unwrap();
|
||||
|
||||
let app = gtk::Application::new(None, gio::ApplicationFlags::FLAGS_NONE).unwrap();
|
||||
|
||||
app.connect_activate(create_ui);
|
||||
let args = env::args().collect::<Vec<_>>();
|
||||
app.run(&args);
|
||||
}
|
|
@ -1,8 +1,7 @@
|
|||
// This example demonstrates how to use GStreamer's iteration APIs.
|
||||
// This is used at multiple occassions - for example to iterate an
|
||||
// This is used at multiple occasions - for example to iterate an
|
||||
// element's pads.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
|
@ -14,14 +13,14 @@ fn example_main() {
|
|||
// Create and use an identity element here.
|
||||
// This element does nothing, really. We also never add it to a pipeline.
|
||||
// We just want to iterate the identity element's pads.
|
||||
let identity = gst::ElementFactory::make("identity", None).unwrap();
|
||||
let identity = gst::ElementFactory::make("identity").build().unwrap();
|
||||
// Get an iterator over all pads of the identity-element.
|
||||
let mut iter = identity.iterate_pads();
|
||||
loop {
|
||||
// In an endless-loop, we use the iterator until we either reach the end
|
||||
// or we hit an error.
|
||||
match iter.next() {
|
||||
Ok(Some(pad)) => println!("Pad: {}", pad.get_name()),
|
||||
Ok(Some(pad)) => println!("Pad: {}", pad.name()),
|
||||
Ok(None) => {
|
||||
// We reached the end of the iterator, there are no more pads
|
||||
println!("Done");
|
||||
|
@ -46,7 +45,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -3,11 +3,9 @@
|
|||
// as launch syntax.
|
||||
// When the parsing succeeded, the pipeline is run until the stream ends or an error happens.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
use std::{env, process};
|
||||
|
||||
use std::env;
|
||||
use std::process;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
@ -19,7 +17,7 @@ fn example_main() {
|
|||
gst::init().unwrap();
|
||||
|
||||
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
|
||||
// In comparision to the launch_glib_main example, this is using the advanced launch syntax
|
||||
// In comparison to the launch_glib_main example, this is using the advanced launch syntax
|
||||
// parsing API of GStreamer. The function returns a Result, handing us the pipeline if
|
||||
// parsing and creating succeeded, and hands us detailed error information if something
|
||||
// went wrong. The error is passed as gst::ParseError. In this example, we separately
|
||||
|
@ -28,26 +26,29 @@ fn example_main() {
|
|||
// Especially GUIs should probably handle this case, to tell users that they need to
|
||||
// install the corresponding gstreamer plugins.
|
||||
let mut context = gst::ParseContext::new();
|
||||
let pipeline =
|
||||
match gst::parse_launch_full(&pipeline_str, Some(&mut context), gst::ParseFlags::empty()) {
|
||||
Ok(pipeline) => pipeline,
|
||||
Err(err) => {
|
||||
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
|
||||
println!("Missing element(s): {:?}", context.get_missing_elements());
|
||||
} else {
|
||||
println!("Failed to parse pipeline: {}", err);
|
||||
}
|
||||
|
||||
process::exit(-1)
|
||||
let pipeline = match gst::parse::launch_full(
|
||||
&pipeline_str,
|
||||
Some(&mut context),
|
||||
gst::ParseFlags::empty(),
|
||||
) {
|
||||
Ok(pipeline) => pipeline,
|
||||
Err(err) => {
|
||||
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
|
||||
println!("Missing element(s): {:?}", context.missing_elements());
|
||||
} else {
|
||||
println!("Failed to parse pipeline: {err}");
|
||||
}
|
||||
};
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
|
||||
process::exit(-1)
|
||||
}
|
||||
};
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -55,9 +56,9 @@ fn example_main() {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
@ -71,7 +72,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -7,11 +7,10 @@
|
|||
// things from the main loop (timeouts, UI events, socket events, ...) instead
|
||||
// of just handling messages from GStreamer's bus.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
|
@ -25,8 +24,8 @@ fn example_main() {
|
|||
let main_loop = glib::MainLoop::new(None, false);
|
||||
|
||||
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
|
||||
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
|
@ -35,43 +34,39 @@ fn example_main() {
|
|||
let main_loop_clone = main_loop.clone();
|
||||
|
||||
//bus.add_signal_watch();
|
||||
//bus.connect_message(move |_, msg| {
|
||||
bus.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
//bus.connect_message(None, move |_, msg| {
|
||||
let _bus_watch = bus
|
||||
.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => main_loop.quit(),
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => main_loop.quit(),
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
glib::Continue(true)
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
glib::ControlFlow::Continue
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
|
||||
main_loop.run();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
// Here we remove the bus watch we added above. This avoids a memory leak, that might
|
||||
// otherwise happen because we moved a strong reference (clone of main_loop) into the
|
||||
// callback closure above.
|
||||
bus.remove_watch().unwrap();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
359
examples/src/bin/overlay-composition-d2d.rs
Normal file
359
examples/src/bin/overlay-composition-d2d.rs
Normal file
|
@ -0,0 +1,359 @@
|
|||
// This example demonstrates how to draw an overlay on a video stream using
|
||||
// Direct2D/DirectWrite/WIC and the overlay composition element.
|
||||
|
||||
// {videotestsrc} - {overlaycomposition} - {capsfilter} - {videoconvert} - {autovideosink}
|
||||
// The capsfilter element allows us to dictate the video resolution we want for the
|
||||
// videotestsrc and the overlaycomposition element.
|
||||
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use byte_slice_cast::*;
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::prelude::*;
|
||||
use windows::{
|
||||
Foundation::Numerics::*,
|
||||
Win32::{
|
||||
Graphics::{
|
||||
Direct2D::{Common::*, *},
|
||||
DirectWrite::*,
|
||||
Dxgi::Common::*,
|
||||
Imaging::*,
|
||||
},
|
||||
System::Com::*,
|
||||
},
|
||||
};
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
struct ErrorMessage {
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
struct DrawingContext {
|
||||
// Factory for creating render target
|
||||
d2d_factory: ID2D1Factory,
|
||||
|
||||
// Used to create WIC bitmap surface
|
||||
wic_factory: IWICImagingFactory,
|
||||
|
||||
// text layout holding text information (string, font, size, etc)
|
||||
text_layout: IDWriteTextLayout,
|
||||
|
||||
// Holding rendred image
|
||||
bitmap: Option<IWICBitmap>,
|
||||
|
||||
// Bound to bitmap and used to actual Direct2D rendering
|
||||
render_target: Option<ID2D1RenderTarget>,
|
||||
|
||||
info: Option<gst_video::VideoInfo>,
|
||||
}
|
||||
|
||||
// Required for IWICBitmap
|
||||
unsafe impl Send for DrawingContext {}
|
||||
|
||||
fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
let pipeline = gst::Pipeline::default();
|
||||
|
||||
// The videotestsrc supports multiple test patterns. In this example, we will use the
|
||||
// pattern with a white ball moving around the video's center point.
|
||||
let src = gst::ElementFactory::make("videotestsrc")
|
||||
.property_from_str("pattern", "ball")
|
||||
.build()?;
|
||||
|
||||
let overlay = gst::ElementFactory::make("overlaycomposition").build()?;
|
||||
|
||||
let caps = gst_video::VideoCapsBuilder::new()
|
||||
.width(800)
|
||||
.height(800)
|
||||
.framerate((30, 1).into())
|
||||
.build();
|
||||
let capsfilter = gst::ElementFactory::make("capsfilter")
|
||||
.property("caps", &caps)
|
||||
.build()?;
|
||||
|
||||
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
|
||||
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
|
||||
// Most Direct2D/DirectWrite APIs (including factory methods) are marked as
|
||||
// "unsafe", but they shouldn't fail in practice
|
||||
let drawer = unsafe {
|
||||
let d2d_factory =
|
||||
D2D1CreateFactory::<ID2D1Factory>(D2D1_FACTORY_TYPE_MULTI_THREADED, None).unwrap();
|
||||
let dwrite_factory =
|
||||
DWriteCreateFactory::<IDWriteFactory>(DWRITE_FACTORY_TYPE_SHARED).unwrap();
|
||||
let text_format = dwrite_factory
|
||||
.CreateTextFormat(
|
||||
windows::core::w!("Arial"),
|
||||
None,
|
||||
DWRITE_FONT_WEIGHT_BOLD,
|
||||
DWRITE_FONT_STYLE_NORMAL,
|
||||
DWRITE_FONT_STRETCH_NORMAL,
|
||||
32f32,
|
||||
windows::core::w!("en-us"),
|
||||
)
|
||||
.unwrap();
|
||||
let text_layout = dwrite_factory
|
||||
.CreateTextLayout(
|
||||
windows::core::w!("GStreamer").as_wide(),
|
||||
&text_format,
|
||||
// Size will be updated later on "caps-changed" signal
|
||||
800f32,
|
||||
800f32,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Top (default) and center alignment
|
||||
text_layout
|
||||
.SetTextAlignment(DWRITE_TEXT_ALIGNMENT_CENTER)
|
||||
.unwrap();
|
||||
|
||||
let wic_factory: IWICImagingFactory =
|
||||
CoCreateInstance(&CLSID_WICImagingFactory, None, CLSCTX_ALL).unwrap();
|
||||
|
||||
Arc::new(Mutex::new(DrawingContext {
|
||||
d2d_factory,
|
||||
wic_factory,
|
||||
text_layout,
|
||||
bitmap: None,
|
||||
render_target: None,
|
||||
info: None,
|
||||
}))
|
||||
};
|
||||
|
||||
overlay.connect_closure(
|
||||
"draw",
|
||||
false,
|
||||
glib::closure!(@strong drawer => move |_overlay: &gst::Element,
|
||||
sample: &gst::Sample| {
|
||||
use std::f64::consts::PI;
|
||||
|
||||
let drawer = drawer.lock().unwrap();
|
||||
|
||||
let buffer = sample.buffer().unwrap();
|
||||
let timestamp = buffer.pts().unwrap();
|
||||
|
||||
let info = drawer.info.as_ref().unwrap();
|
||||
let text_layout = &drawer.text_layout;
|
||||
let bitmap = drawer.bitmap.as_ref().unwrap();
|
||||
let render_target = drawer.render_target.as_ref().unwrap();
|
||||
|
||||
let global_angle = 360. * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
|
||||
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
|
||||
let center_x = (info.width() / 2) as f32;
|
||||
let center_y = (info.height() / 2) as f32;
|
||||
let top_margin = (info.height() / 20) as f32;
|
||||
|
||||
unsafe {
|
||||
// Begin drawing
|
||||
render_target.BeginDraw();
|
||||
|
||||
// Clear background
|
||||
render_target.Clear(Some(&D2D1_COLOR_F {
|
||||
r: 0f32,
|
||||
g: 0f32,
|
||||
b: 0f32,
|
||||
a: 0f32,
|
||||
}));
|
||||
|
||||
// This loop will render 10 times the string "GStreamer" in a circle
|
||||
for i in 0..10 {
|
||||
let angle = (360. * f64::from(i)) / 10.0;
|
||||
let red = ((1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0) as f32;
|
||||
let text_brush = render_target
|
||||
.CreateSolidColorBrush(
|
||||
&D2D1_COLOR_F {
|
||||
r: red,
|
||||
g: 0f32,
|
||||
b: 1f32 - red,
|
||||
a: 1f32,
|
||||
},
|
||||
None,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let angle = (angle + global_angle) as f32;
|
||||
let matrix = Matrix3x2::rotation(angle, center_x, center_y);
|
||||
render_target.SetTransform(&matrix);
|
||||
render_target.DrawTextLayout(
|
||||
D2D_POINT_2F { x: 0f32, y: top_margin },
|
||||
text_layout,
|
||||
&text_brush,
|
||||
D2D1_DRAW_TEXT_OPTIONS_NONE,
|
||||
);
|
||||
}
|
||||
|
||||
// EndDraw may not be successful for some reasons.
|
||||
// Ignores any error in this example
|
||||
let _ = render_target.EndDraw(None, None);
|
||||
|
||||
// Make sure all operations is completed before copying
|
||||
// bitmap to buffer
|
||||
let _ = render_target.Flush(None::<*mut u64>, None::<*mut u64>);
|
||||
}
|
||||
|
||||
let mut buffer = gst::Buffer::with_size((info.width() * info.height() * 4) as usize).unwrap();
|
||||
{
|
||||
let buffer_mut = buffer.get_mut().unwrap();
|
||||
let mut map = buffer_mut.map_writable().unwrap();
|
||||
let dst = map.as_mut_slice_of::<u8>().unwrap();
|
||||
|
||||
unsafe {
|
||||
// Bitmap size is equal to the background image size.
|
||||
// Copy entire memory
|
||||
bitmap.CopyPixels(std::ptr::null(), info.width() * 4, dst).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
gst_video::VideoMeta::add_full(
|
||||
buffer.get_mut().unwrap(),
|
||||
gst_video::VideoFrameFlags::empty(),
|
||||
gst_video::VideoFormat::Bgra,
|
||||
info.width(),
|
||||
info.height(),
|
||||
&[0],
|
||||
&[(info.width() * 4) as i32],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
// Turn the buffer into a VideoOverlayRectangle, then place
|
||||
// that into a VideoOverlayComposition and return it.
|
||||
//
|
||||
// A VideoOverlayComposition can take a Vec of such rectangles
|
||||
// spaced around the video frame, but we're just outputting 1
|
||||
// here
|
||||
let rect = gst_video::VideoOverlayRectangle::new_raw(
|
||||
&buffer,
|
||||
0,
|
||||
0,
|
||||
info.width(),
|
||||
info.height(),
|
||||
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
|
||||
);
|
||||
|
||||
gst_video::VideoOverlayComposition::new(Some(&rect))
|
||||
.unwrap()
|
||||
}),
|
||||
);
|
||||
|
||||
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
|
||||
// be called when the sink that we render to does not support resizing the image
|
||||
// itself - but the user just changed the window-size. The element after the overlay
|
||||
// will then change its caps and we use the notification about this change to
|
||||
// resize our canvas's size.
|
||||
// Another possibility for when this might happen is, when our video is a network
|
||||
// stream that dynamically changes resolution when enough bandwidth is available.
|
||||
overlay.connect_closure(
|
||||
"caps-changed",
|
||||
false,
|
||||
glib::closure!(move |_overlay: &gst::Element,
|
||||
caps: &gst::Caps,
|
||||
_width: u32,
|
||||
_height: u32| {
|
||||
let mut drawer = drawer.lock().unwrap();
|
||||
let info = gst_video::VideoInfo::from_caps(caps).unwrap();
|
||||
|
||||
unsafe {
|
||||
// Update text layout to be identical to new video resolution
|
||||
drawer.text_layout.SetMaxWidth(info.width() as f32).unwrap();
|
||||
drawer
|
||||
.text_layout
|
||||
.SetMaxHeight(info.height() as f32)
|
||||
.unwrap();
|
||||
|
||||
// Create new WIC bitmap with PBGRA format (pre-multiplied BGRA)
|
||||
let bitmap = drawer
|
||||
.wic_factory
|
||||
.CreateBitmap(
|
||||
info.width(),
|
||||
info.height(),
|
||||
&GUID_WICPixelFormat32bppPBGRA,
|
||||
WICBitmapCacheOnDemand,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let render_target = drawer
|
||||
.d2d_factory
|
||||
.CreateWicBitmapRenderTarget(
|
||||
&bitmap,
|
||||
&D2D1_RENDER_TARGET_PROPERTIES {
|
||||
r#type: D2D1_RENDER_TARGET_TYPE_DEFAULT,
|
||||
pixelFormat: D2D1_PIXEL_FORMAT {
|
||||
format: DXGI_FORMAT_B8G8R8A8_UNORM,
|
||||
alphaMode: D2D1_ALPHA_MODE_PREMULTIPLIED,
|
||||
},
|
||||
// zero means default DPI
|
||||
dpiX: 0f32,
|
||||
dpiY: 0f32,
|
||||
usage: D2D1_RENDER_TARGET_USAGE_NONE,
|
||||
minLevel: D2D1_FEATURE_LEVEL_DEFAULT,
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
drawer.render_target = Some(render_target);
|
||||
drawer.bitmap = Some(bitmap);
|
||||
}
|
||||
drawer.info = Some(info);
|
||||
}),
|
||||
);
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// WIC requires COM initialization
|
||||
unsafe {
|
||||
CoInitializeEx(None, COINIT_MULTITHREADED).unwrap();
|
||||
}
|
||||
|
||||
match create_pipeline().and_then(main_loop) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
}
|
||||
|
||||
unsafe {
|
||||
CoUninitialize();
|
||||
}
|
||||
}
|
|
@ -7,40 +7,30 @@
|
|||
// {videotestsrc} - {overlaycomposition} - {capsfilter} - {videoconvert} - {autovideosink}
|
||||
// The capsfilter element allows us to dictate the video resolution we want for the
|
||||
// videotestsrc and the overlaycomposition element.
|
||||
//
|
||||
// There is a small amount of unsafe code that demonstrates how to work around
|
||||
// Cairo's internal refcounting of the target buffer surface
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_video as gst_video;
|
||||
use pango::prelude::*;
|
||||
|
||||
use std::ops;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{
|
||||
ops,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::prelude::*;
|
||||
use pango::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
struct DrawingContext {
|
||||
layout: glib::SendUniqueCell<LayoutWrapper>,
|
||||
layout: LayoutWrapper,
|
||||
info: Option<gst_video::VideoInfo>,
|
||||
}
|
||||
|
||||
|
@ -51,52 +41,49 @@ impl ops::Deref for LayoutWrapper {
|
|||
type Target = pango::Layout;
|
||||
|
||||
fn deref(&self) -> &pango::Layout {
|
||||
assert_eq!(self.0.ref_count(), 1);
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl glib::SendUnique for LayoutWrapper {
|
||||
fn is_unique(&self) -> bool {
|
||||
self.0.ref_count() == 1
|
||||
}
|
||||
}
|
||||
// SAFETY: We ensure that there are never multiple references to the layout.
|
||||
unsafe impl Send for LayoutWrapper {}
|
||||
|
||||
fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("videotestsrc", None)
|
||||
.map_err(|_| MissingElement("videotestsrc"))?;
|
||||
let overlay = gst::ElementFactory::make("overlaycomposition", None)
|
||||
.map_err(|_| MissingElement("overlaycomposition"))?;
|
||||
let capsfilter =
|
||||
gst::ElementFactory::make("capsfilter", None).map_err(|_| MissingElement("capsfilter"))?;
|
||||
let videoconvert = gst::ElementFactory::make("videoconvert", None)
|
||||
.map_err(|_| MissingElement("videoconvert"))?;
|
||||
let sink = gst::ElementFactory::make("autovideosink", None)
|
||||
.map_err(|_| MissingElement("autovideosink"))?;
|
||||
let pipeline = gst::Pipeline::default();
|
||||
|
||||
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
// The videotestsrc supports multiple test patterns. In this example, we will use the
|
||||
// pattern with a white ball moving around the video's center point.
|
||||
let src = gst::ElementFactory::make("videotestsrc")
|
||||
.property_from_str("pattern", "ball")
|
||||
.build()?;
|
||||
|
||||
let overlay = gst::ElementFactory::make("overlaycomposition").build()?;
|
||||
|
||||
// Plug in a capsfilter element that will force the videotestsrc and the overlay to work
|
||||
// with images of the size 800x800, and framerate of 15 fps, since my laptop struggles
|
||||
// rendering it at the default 30 fps
|
||||
let caps = gst::Caps::builder("video/x-raw")
|
||||
.field("width", &800i32)
|
||||
.field("height", &800i32)
|
||||
.field("framerate", &gst::Fraction::new(15, 1))
|
||||
let caps = gst_video::VideoCapsBuilder::new()
|
||||
.width(800)
|
||||
.height(800)
|
||||
.framerate((15, 1).into())
|
||||
.build();
|
||||
capsfilter.set_property("caps", &caps).unwrap();
|
||||
let capsfilter = gst::ElementFactory::make("capsfilter")
|
||||
.property("caps", &caps)
|
||||
.build()?;
|
||||
|
||||
// The videotestsrc supports multiple test patterns. In this example, we will use the
|
||||
// pattern with a white ball moving around the video's center point.
|
||||
src.set_property_from_str("pattern", "ball");
|
||||
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
|
||||
pipeline.add_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
gst::Element::link_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
|
||||
// The PangoFontMap represents the set of fonts available for a particular rendering system.
|
||||
let fontmap = pangocairo::FontMap::new().unwrap();
|
||||
let fontmap = pangocairo::FontMap::new();
|
||||
// Create a new pango layouting context for the fontmap.
|
||||
let context = fontmap.create_context().unwrap();
|
||||
let context = fontmap.create_context();
|
||||
// Create a pango layout object. This object is a string of text we want to layout.
|
||||
// It is wrapped in a LayoutWrapper (defined above) to be able to send it across threads.
|
||||
let layout = LayoutWrapper(pango::Layout::new(&context));
|
||||
|
@ -116,12 +103,8 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// interior mutability (see Rust docs). Via this we can get a mutable reference to the contained
|
||||
// data which is checked at runtime for uniqueness (blocking in case of mutex, panic in case
|
||||
// of refcell) instead of compile-time (like with normal references).
|
||||
let drawer = Arc::new(Mutex::new(DrawingContext {
|
||||
layout: glib::SendUniqueCell::new(layout).unwrap(),
|
||||
info: None,
|
||||
}));
|
||||
let drawer = Arc::new(Mutex::new(DrawingContext { layout, info: None }));
|
||||
|
||||
let drawer_clone = drawer.clone();
|
||||
// Connect to the overlaycomposition element's "draw" signal, which is emitted for
|
||||
// each videoframe piped through the element. The signal handler needs to
|
||||
// return a gst_video::VideoOverlayComposition to be drawn on the frame
|
||||
|
@ -133,125 +116,104 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
//
|
||||
// In this case, the signal passes the gst::Element and a gst::Sample with
|
||||
// the current buffer
|
||||
overlay
|
||||
.connect("draw", false, move |args| {
|
||||
overlay.connect_closure(
|
||||
"draw",
|
||||
false,
|
||||
glib::closure!(@strong drawer => move |_overlay: &gst::Element,
|
||||
sample: &gst::Sample| {
|
||||
use std::f64::consts::PI;
|
||||
|
||||
let drawer = &drawer_clone;
|
||||
let drawer = drawer.lock().unwrap();
|
||||
|
||||
// Get the signal's arguments
|
||||
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
|
||||
let sample = args[1].get::<gst::Sample>().unwrap().unwrap();
|
||||
let buffer = sample.get_buffer().unwrap();
|
||||
let timestamp = buffer.get_pts();
|
||||
let buffer = sample.buffer().unwrap();
|
||||
let timestamp = buffer.pts().unwrap();
|
||||
|
||||
let info = drawer.info.as_ref().unwrap();
|
||||
let layout = drawer.layout.borrow();
|
||||
let layout = &drawer.layout;
|
||||
|
||||
let angle = 2.0
|
||||
* PI
|
||||
* ((timestamp % (10 * gst::SECOND)).unwrap() as f64
|
||||
/ (10.0 * gst::SECOND_VAL as f64));
|
||||
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
|
||||
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
|
||||
|
||||
/* Create a gst::Buffer for Cairo to draw into */
|
||||
let frame_width = info.width() as usize;
|
||||
let frame_height = info.height() as usize;
|
||||
let stride = 4 * frame_width;
|
||||
let frame_size = stride * frame_height;
|
||||
/* Create a Cairo image surface to draw into and the context around it. */
|
||||
let surface = cairo::ImageSurface::create(
|
||||
cairo::Format::ARgb32,
|
||||
info.width() as i32,
|
||||
info.height() as i32,
|
||||
)
|
||||
.unwrap();
|
||||
let cr = cairo::Context::new(&surface).expect("Failed to create cairo context");
|
||||
|
||||
cr.save().expect("Failed to save state");
|
||||
cr.set_operator(cairo::Operator::Clear);
|
||||
cr.paint().expect("Failed to clear background");
|
||||
cr.restore().expect("Failed to restore state");
|
||||
|
||||
// The image we draw (the text) will be static, but we will change the
|
||||
// transformation on the drawing context, which rotates and shifts everything
|
||||
// that we draw afterwards. Like this, we have no complicated calculations
|
||||
// in the actual drawing below.
|
||||
// Calling multiple transformation methods after each other will apply the
|
||||
// new transformation on top. If you repeat the cr.rotate(angle) line below
|
||||
// this a second time, everything in the canvas will rotate twice as fast.
|
||||
cr.translate(
|
||||
f64::from(info.width()) / 2.0,
|
||||
f64::from(info.height()) / 2.0,
|
||||
);
|
||||
cr.rotate(angle);
|
||||
|
||||
// This loop will render 10 times the string "GStreamer" in a circle
|
||||
for i in 0..10 {
|
||||
// Cairo, like most rendering frameworks, is using a stack for transformations
|
||||
// with this, we push our current transformation onto this stack - allowing us
|
||||
// to make temporary changes / render something / and then returning to the
|
||||
// previous transformations.
|
||||
cr.save().expect("Failed to save state");
|
||||
|
||||
let angle = (360. * f64::from(i)) / 10.0;
|
||||
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
|
||||
cr.set_source_rgb(red, 0.0, 1.0 - red);
|
||||
cr.rotate(angle * PI / 180.0);
|
||||
|
||||
// Update the text layout. This function is only updating pango's internal state.
|
||||
// So e.g. that after a 90 degree rotation it knows that what was previously going
|
||||
// to end up as a 200x100 rectangle would now be 100x200.
|
||||
pangocairo::functions::update_layout(&cr, layout);
|
||||
let (width, _height) = layout.size();
|
||||
// Using width and height of the text, we can properly position it within
|
||||
// our canvas.
|
||||
cr.move_to(
|
||||
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
|
||||
-(f64::from(info.height())) / 2.0,
|
||||
);
|
||||
// After telling the layout object where to draw itself, we actually tell
|
||||
// it to draw itself into our cairo context.
|
||||
pangocairo::functions::show_layout(&cr, layout);
|
||||
|
||||
// Here we go one step up in our stack of transformations, removing any
|
||||
// changes we did to them since the last call to cr.save();
|
||||
cr.restore().expect("Failed to restore state");
|
||||
}
|
||||
|
||||
/* Drop the Cairo context to release the additional reference to the data and
|
||||
* then take ownership of the data. This only works if we have the one and only
|
||||
* reference to the image surface */
|
||||
drop(cr);
|
||||
let stride = surface.stride();
|
||||
let data = surface.take_data().unwrap();
|
||||
|
||||
/* Create an RGBA buffer, and add a video meta that the videooverlaycomposition expects */
|
||||
let mut buffer = gst::Buffer::with_size(frame_size).unwrap();
|
||||
let mut buffer = gst::Buffer::from_mut_slice(data);
|
||||
|
||||
gst_video::VideoMeta::add(
|
||||
gst_video::VideoMeta::add_full(
|
||||
buffer.get_mut().unwrap(),
|
||||
gst_video::VideoFrameFlags::empty(),
|
||||
gst_video::VideoFormat::Bgra,
|
||||
frame_width as u32,
|
||||
frame_height as u32,
|
||||
).unwrap();
|
||||
|
||||
let buffer = buffer.into_mapped_buffer_writable().unwrap();
|
||||
let buffer = {
|
||||
let buffer_ptr = unsafe { buffer.get_buffer().as_ptr() };
|
||||
let surface = cairo::ImageSurface::create_for_data(
|
||||
buffer,
|
||||
cairo::Format::ARgb32,
|
||||
frame_width as i32,
|
||||
frame_height as i32,
|
||||
stride as i32,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let cr = cairo::Context::new(&surface);
|
||||
|
||||
cr.save();
|
||||
cr.set_operator(cairo::Operator::Clear);
|
||||
cr.paint();
|
||||
cr.restore();
|
||||
|
||||
// The image we draw (the text) will be static, but we will change the
|
||||
// transformation on the drawing context, which rotates and shifts everything
|
||||
// that we draw afterwards. Like this, we have no complicated calulations
|
||||
// in the actual drawing below.
|
||||
// Calling multiple transformation methods after each other will apply the
|
||||
// new transformation on top. If you repeat the cr.rotate(angle) line below
|
||||
// this a second time, everything in the canvas will rotate twice as fast.
|
||||
cr.translate(
|
||||
f64::from(info.width()) / 2.0,
|
||||
f64::from(info.height()) / 2.0,
|
||||
);
|
||||
cr.rotate(angle);
|
||||
|
||||
// This loop will render 10 times the string "GStreamer" in a circle
|
||||
for i in 0..10 {
|
||||
// Cairo, like most rendering frameworks, is using a stack for transformations
|
||||
// with this, we push our current transformation onto this stack - allowing us
|
||||
// to make temporary changes / render something / and then returning to the
|
||||
// previous transformations.
|
||||
cr.save();
|
||||
|
||||
let angle = (360. * f64::from(i)) / 10.0;
|
||||
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
|
||||
cr.set_source_rgb(red, 0.0, 1.0 - red);
|
||||
cr.rotate(angle * PI / 180.0);
|
||||
|
||||
// Update the text layout. This function is only updating pango's internal state.
|
||||
// So e.g. that after a 90 degree rotation it knows that what was previously going
|
||||
// to end up as a 200x100 rectangle would now be 100x200.
|
||||
pangocairo::functions::update_layout(&cr, &**layout);
|
||||
let (width, _height) = layout.get_size();
|
||||
// Using width and height of the text, we can properly possition it within
|
||||
// our canvas.
|
||||
cr.move_to(
|
||||
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
|
||||
-(f64::from(info.height())) / 2.0,
|
||||
);
|
||||
// After telling the layout object where to draw itself, we actually tell
|
||||
// it to draw itself into our cairo context.
|
||||
pangocairo::functions::show_layout(&cr, &**layout);
|
||||
|
||||
// Here we go one step up in our stack of transformations, removing any
|
||||
// changes we did to them since the last call to cr.save();
|
||||
cr.restore();
|
||||
}
|
||||
|
||||
// Safety: The surface still owns a mutable reference to the buffer but our reference
|
||||
// to the surface here is the last one. After dropping the surface the buffer would be
|
||||
// freed, so we keep an additional strong reference here before dropping the surface,
|
||||
// which is then returned. As such it's guaranteed that nothing is using the buffer
|
||||
// anymore mutably.
|
||||
drop(cr);
|
||||
unsafe {
|
||||
assert_eq!(
|
||||
cairo_sys::cairo_surface_get_reference_count(surface.to_raw_none()),
|
||||
1
|
||||
);
|
||||
let buffer = glib::translate::from_glib_none(buffer_ptr);
|
||||
drop(surface);
|
||||
buffer
|
||||
}
|
||||
};
|
||||
info.width(),
|
||||
info.height(),
|
||||
&[0],
|
||||
&[stride],
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
/* Turn the buffer into a VideoOverlayRectangle, then place
|
||||
* that into a VideoOverlayComposition and return it.
|
||||
|
@ -260,14 +222,18 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
* spaced around the video frame, but we're just outputting 1
|
||||
* here */
|
||||
let rect = gst_video::VideoOverlayRectangle::new_raw(
|
||||
&buffer,
|
||||
0, 0, frame_width as u32, frame_height as u32,
|
||||
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
|
||||
&buffer,
|
||||
0,
|
||||
0,
|
||||
info.width(),
|
||||
info.height(),
|
||||
gst_video::VideoOverlayFormatFlags::PREMULTIPLIED_ALPHA,
|
||||
);
|
||||
|
||||
Some(gst_video::VideoOverlayComposition::new(Some(&rect)).unwrap().to_value())
|
||||
})
|
||||
.unwrap();
|
||||
gst_video::VideoOverlayComposition::new(Some(&rect))
|
||||
.unwrap()
|
||||
}),
|
||||
);
|
||||
|
||||
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
|
||||
// be called when the sink that we render to does not support resizing the image
|
||||
|
@ -275,18 +241,18 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// will then change its caps and we use the notification about this change to
|
||||
// resize our canvas's size.
|
||||
// Another possibility for when this might happen is, when our video is a network
|
||||
// stream that dynamically changes resolution when enough bandwith is available.
|
||||
overlay
|
||||
.connect("caps-changed", false, move |args| {
|
||||
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
|
||||
let caps = args[1].get::<gst::Caps>().unwrap().unwrap();
|
||||
|
||||
// stream that dynamically changes resolution when enough bandwidth is available.
|
||||
overlay.connect_closure(
|
||||
"caps-changed",
|
||||
false,
|
||||
glib::closure!(move |_overlay: &gst::Element,
|
||||
caps: &gst::Caps,
|
||||
_width: u32,
|
||||
_height: u32| {
|
||||
let mut drawer = drawer.lock().unwrap();
|
||||
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
|
||||
|
||||
None
|
||||
})
|
||||
.unwrap();
|
||||
drawer.info = Some(gst_video::VideoInfo::from_caps(caps).unwrap());
|
||||
}),
|
||||
);
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
@ -295,10 +261,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -307,12 +273,11 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Null)?;
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
@ -328,12 +293,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
fn example_main() {
|
||||
match create_pipeline().and_then(main_loop) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -6,15 +6,13 @@
|
|||
// |-[probe]
|
||||
// /
|
||||
// {audiotestsrc} - {fakesink}
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
extern crate gstreamer_audio as gst_audio;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
#![allow(clippy::question_mark)]
|
||||
|
||||
use std::i16;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
|
@ -24,52 +22,54 @@ fn example_main() {
|
|||
// Parse the pipeline we want to probe from a static in-line string.
|
||||
// Here we give our audiotestsrc a name, so we can retrieve that element
|
||||
// from the resulting pipeline.
|
||||
let pipeline = gst::parse_launch(&format!(
|
||||
let pipeline = gst::parse::launch(&format!(
|
||||
"audiotestsrc name=src ! audio/x-raw,format={},channels=1 ! fakesink",
|
||||
gst_audio::AUDIO_FORMAT_S16.to_str()
|
||||
gst_audio::AUDIO_FORMAT_S16
|
||||
))
|
||||
.unwrap();
|
||||
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
|
||||
|
||||
// Get the audiotestsrc element from the pipeline that GStreamer
|
||||
// created for us while parsing the launch syntax above.
|
||||
let src = pipeline.get_by_name("src").unwrap();
|
||||
let src = pipeline.by_name("src").unwrap();
|
||||
// Get the audiotestsrc's src-pad.
|
||||
let src_pad = src.get_static_pad("src").unwrap();
|
||||
let src_pad = src.static_pad("src").unwrap();
|
||||
// Add a probe handler on the audiotestsrc's src-pad.
|
||||
// This handler gets called for every buffer that passes the pad we probe.
|
||||
src_pad.add_probe(gst::PadProbeType::BUFFER, |_, probe_info| {
|
||||
// Interpret the data sent over the pad as one buffer
|
||||
if let Some(gst::PadProbeData::Buffer(ref buffer)) = probe_info.data {
|
||||
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||
// When we want to access its content, we have to map it while requesting the required
|
||||
// mode of access (read, read/write).
|
||||
// This type of abstraction is necessary, because the buffer in question might not be
|
||||
// on the machine's main memory itself, but rather in the GPU's memory.
|
||||
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||
let map = buffer.map_readable().unwrap();
|
||||
let Some(buffer) = probe_info.buffer() else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
// We know what format the data in the memory region has, since we requested
|
||||
// it by setting the appsink's caps. So what we do here is interpret the
|
||||
// memory region we mapped as an array of signed 16 bit integers.
|
||||
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
|
||||
samples
|
||||
} else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||
// When we want to access its content, we have to map it while requesting the required
|
||||
// mode of access (read, read/write).
|
||||
// This type of abstraction is necessary, because the buffer in question might not be
|
||||
// on the machine's main memory itself, but rather in the GPU's memory.
|
||||
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||
let map = buffer.map_readable().unwrap();
|
||||
|
||||
// For buffer (= chunk of samples), we calculate the root mean square:
|
||||
let sum: f64 = samples
|
||||
.iter()
|
||||
.map(|sample| {
|
||||
let f = f64::from(*sample) / f64::from(i16::MAX);
|
||||
f * f
|
||||
})
|
||||
.sum();
|
||||
let rms = (sum / (samples.len() as f64)).sqrt();
|
||||
println!("rms: {}", rms);
|
||||
}
|
||||
// We know what format the data in the memory region has, since we requested
|
||||
// it by setting the appsink's caps. So what we do here is interpret the
|
||||
// memory region we mapped as an array of signed 16 bit integers.
|
||||
let samples = if let Ok(samples) = map.as_slice_of::<i16>() {
|
||||
samples
|
||||
} else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
// For buffer (= chunk of samples), we calculate the root mean square:
|
||||
let sum: f64 = samples
|
||||
.iter()
|
||||
.map(|sample| {
|
||||
let f = f64::from(*sample) / f64::from(i16::MAX);
|
||||
f * f
|
||||
})
|
||||
.sum();
|
||||
let rms = (sum / (samples.len() as f64)).sqrt();
|
||||
println!("rms: {rms}");
|
||||
|
||||
gst::PadProbeReturn::Ok
|
||||
});
|
||||
|
@ -78,8 +78,8 @@ fn example_main() {
|
|||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
let bus = pipeline.bus().unwrap();
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -87,9 +87,9 @@ fn example_main() {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
@ -103,7 +103,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -11,36 +11,29 @@
|
|||
// The capsfilter element allows us to dictate the video resolution we want for the
|
||||
// videotestsrc and the cairooverlay element.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_video as gst_video;
|
||||
use pango::prelude::*;
|
||||
|
||||
use std::ops;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{
|
||||
ops,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::prelude::*;
|
||||
use pango::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
struct DrawingContext {
|
||||
layout: glib::SendUniqueCell<LayoutWrapper>,
|
||||
layout: LayoutWrapper,
|
||||
info: Option<gst_video::VideoInfo>,
|
||||
}
|
||||
|
||||
|
@ -51,50 +44,45 @@ impl ops::Deref for LayoutWrapper {
|
|||
type Target = pango::Layout;
|
||||
|
||||
fn deref(&self) -> &pango::Layout {
|
||||
assert_eq!(self.0.ref_count(), 1);
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl glib::SendUnique for LayoutWrapper {
|
||||
fn is_unique(&self) -> bool {
|
||||
self.0.ref_count() == 1
|
||||
}
|
||||
}
|
||||
// SAFETY: We ensure that there are never multiple references to the layout.
|
||||
unsafe impl Send for LayoutWrapper {}
|
||||
|
||||
fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("videotestsrc", None)
|
||||
.map_err(|_| MissingElement("videotestsrc"))?;
|
||||
let overlay = gst::ElementFactory::make("cairooverlay", None)
|
||||
.map_err(|_| MissingElement("cairooverlay"))?;
|
||||
let capsfilter =
|
||||
gst::ElementFactory::make("capsfilter", None).map_err(|_| MissingElement("capsfilter"))?;
|
||||
let videoconvert = gst::ElementFactory::make("videoconvert", None)
|
||||
.map_err(|_| MissingElement("videoconvert"))?;
|
||||
let sink = gst::ElementFactory::make("autovideosink", None)
|
||||
.map_err(|_| MissingElement("autovideosink"))?;
|
||||
|
||||
pipeline.add_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
gst::Element::link_many(&[&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("videotestsrc")
|
||||
// The videotestsrc supports multiple test patterns. In this example, we will use the
|
||||
// pattern with a white ball moving around the video's center point.
|
||||
.property_from_str("pattern", "ball")
|
||||
.build()?;
|
||||
let overlay = gst::ElementFactory::make("cairooverlay").build()?;
|
||||
|
||||
// Plug in a capsfilter element that will force the videotestsrc and the cairooverlay to work
|
||||
// with images of the size 800x800.
|
||||
let caps = gst::Caps::builder("video/x-raw")
|
||||
.field("width", &800i32)
|
||||
.field("height", &800i32)
|
||||
let caps = gst_video::VideoCapsBuilder::new()
|
||||
.width(800)
|
||||
.height(800)
|
||||
.build();
|
||||
capsfilter.set_property("caps", &caps).unwrap();
|
||||
let capsfilter = gst::ElementFactory::make("capsfilter")
|
||||
.property("caps", &caps)
|
||||
.build()?;
|
||||
|
||||
// The videotestsrc supports multiple test patterns. In this example, we will use the
|
||||
// pattern with a white ball moving around the video's center point.
|
||||
src.set_property_from_str("pattern", "ball");
|
||||
let videoconvert = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
|
||||
pipeline.add_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
gst::Element::link_many([&src, &overlay, &capsfilter, &videoconvert, &sink])?;
|
||||
|
||||
// The PangoFontMap represents the set of fonts available for a particular rendering system.
|
||||
let fontmap = pangocairo::FontMap::new().unwrap();
|
||||
let fontmap = pangocairo::FontMap::new();
|
||||
// Create a new pango layouting context for the fontmap.
|
||||
let context = fontmap.create_context().unwrap();
|
||||
let context = fontmap.create_context();
|
||||
// Create a pango layout object. This object is a string of text we want to layout.
|
||||
// It is wrapped in a LayoutWrapper (defined above) to be able to send it across threads.
|
||||
let layout = LayoutWrapper(pango::Layout::new(&context));
|
||||
|
@ -114,10 +102,7 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// interior mutability (see Rust docs). Via this we can get a mutable reference to the contained
|
||||
// data which is checked at runtime for uniqueness (blocking in case of mutex, panic in case
|
||||
// of refcell) instead of compile-time (like with normal references).
|
||||
let drawer = Arc::new(Mutex::new(DrawingContext {
|
||||
layout: glib::SendUniqueCell::new(layout).unwrap(),
|
||||
info: None,
|
||||
}));
|
||||
let drawer = Arc::new(Mutex::new(DrawingContext { layout, info: None }));
|
||||
|
||||
let drawer_clone = drawer.clone();
|
||||
// Connect to the cairooverlay element's "draw" signal, which is emitted for
|
||||
|
@ -127,78 +112,74 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// passed as array of glib::Value. For a documentation about the actual arguments
|
||||
// it is always a good idea to either check the element's signals using either
|
||||
// gst-inspect, or the online documentation.
|
||||
overlay
|
||||
.connect("draw", false, move |args| {
|
||||
use std::f64::consts::PI;
|
||||
overlay.connect("draw", false, move |args| {
|
||||
use std::f64::consts::PI;
|
||||
|
||||
let drawer = &drawer_clone;
|
||||
let drawer = drawer.lock().unwrap();
|
||||
let drawer = &drawer_clone;
|
||||
let drawer = drawer.lock().unwrap();
|
||||
|
||||
// Get the signal's arguments
|
||||
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
|
||||
// This is the cairo context. This is the root of all of cairo's
|
||||
// drawing functionality.
|
||||
let cr = args[1].get::<cairo::Context>().unwrap().unwrap();
|
||||
let timestamp = args[2].get_some::<gst::ClockTime>().unwrap();
|
||||
let _duration = args[3].get_some::<gst::ClockTime>().unwrap();
|
||||
// Get the signal's arguments
|
||||
let _overlay = args[0].get::<gst::Element>().unwrap();
|
||||
// This is the cairo context. This is the root of all of cairo's
|
||||
// drawing functionality.
|
||||
let cr = args[1].get::<cairo::Context>().unwrap();
|
||||
let timestamp = args[2].get::<gst::ClockTime>().unwrap();
|
||||
let _duration = args[3].get::<gst::ClockTime>().unwrap();
|
||||
|
||||
let info = drawer.info.as_ref().unwrap();
|
||||
let layout = drawer.layout.borrow();
|
||||
let info = drawer.info.as_ref().unwrap();
|
||||
let layout = &drawer.layout;
|
||||
|
||||
let angle = 2.0
|
||||
* PI
|
||||
* ((timestamp % (10 * gst::SECOND)).unwrap() as f64
|
||||
/ (10.0 * gst::SECOND_VAL as f64));
|
||||
let angle = 2.0 * PI * (timestamp % (10 * gst::ClockTime::SECOND)).nseconds() as f64
|
||||
/ (10.0 * gst::ClockTime::SECOND.nseconds() as f64);
|
||||
|
||||
// The image we draw (the text) will be static, but we will change the
|
||||
// transformation on the drawing context, which rotates and shifts everything
|
||||
// that we draw afterwards. Like this, we have no complicated calulations
|
||||
// in the actual drawing below.
|
||||
// Calling multiple transformation methods after each other will apply the
|
||||
// new transformation on top. If you repeat the cr.rotate(angle) line below
|
||||
// this a second time, everything in the canvas will rotate twice as fast.
|
||||
cr.translate(
|
||||
f64::from(info.width()) / 2.0,
|
||||
f64::from(info.height()) / 2.0,
|
||||
// The image we draw (the text) will be static, but we will change the
|
||||
// transformation on the drawing context, which rotates and shifts everything
|
||||
// that we draw afterwards. Like this, we have no complicated calculations
|
||||
// in the actual drawing below.
|
||||
// Calling multiple transformation methods after each other will apply the
|
||||
// new transformation on top. If you repeat the cr.rotate(angle) line below
|
||||
// this a second time, everything in the canvas will rotate twice as fast.
|
||||
cr.translate(
|
||||
f64::from(info.width()) / 2.0,
|
||||
f64::from(info.height()) / 2.0,
|
||||
);
|
||||
cr.rotate(angle);
|
||||
|
||||
// This loop will render 10 times the string "GStreamer" in a circle
|
||||
for i in 0..10 {
|
||||
// Cairo, like most rendering frameworks, is using a stack for transformations
|
||||
// with this, we push our current transformation onto this stack - allowing us
|
||||
// to make temporary changes / render something / and then returning to the
|
||||
// previous transformations.
|
||||
cr.save().expect("Failed to save state");
|
||||
|
||||
let angle = (360. * f64::from(i)) / 10.0;
|
||||
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
|
||||
cr.set_source_rgb(red, 0.0, 1.0 - red);
|
||||
cr.rotate(angle * PI / 180.0);
|
||||
|
||||
// Update the text layout. This function is only updating pango's internal state.
|
||||
// So e.g. that after a 90 degree rotation it knows that what was previously going
|
||||
// to end up as a 200x100 rectangle would now be 100x200.
|
||||
pangocairo::functions::update_layout(&cr, layout);
|
||||
let (width, _height) = layout.size();
|
||||
// Using width and height of the text, we can properly position it within
|
||||
// our canvas.
|
||||
cr.move_to(
|
||||
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
|
||||
-(f64::from(info.height())) / 2.0,
|
||||
);
|
||||
cr.rotate(angle);
|
||||
// After telling the layout object where to draw itself, we actually tell
|
||||
// it to draw itself into our cairo context.
|
||||
pangocairo::functions::show_layout(&cr, layout);
|
||||
|
||||
// This loop will render 10 times the string "GStreamer" in a circle
|
||||
for i in 0..10 {
|
||||
// Cairo, like most rendering frameworks, is using a stack for transformations
|
||||
// with this, we push our current transformation onto this stack - allowing us
|
||||
// to make temporary changes / render something / and then returning to the
|
||||
// previous transformations.
|
||||
cr.save();
|
||||
// Here we go one step up in our stack of transformations, removing any
|
||||
// changes we did to them since the last call to cr.save();
|
||||
cr.restore().expect("Failed to restore state");
|
||||
}
|
||||
|
||||
let angle = (360. * f64::from(i)) / 10.0;
|
||||
let red = (1.0 + f64::cos((angle - 60.0) * PI / 180.0)) / 2.0;
|
||||
cr.set_source_rgb(red, 0.0, 1.0 - red);
|
||||
cr.rotate(angle * PI / 180.0);
|
||||
|
||||
// Update the text layout. This function is only updating pango's internal state.
|
||||
// So e.g. that after a 90 degree rotation it knows that what was previously going
|
||||
// to end up as a 200x100 rectangle would now be 100x200.
|
||||
pangocairo::functions::update_layout(&cr, &**layout);
|
||||
let (width, _height) = layout.get_size();
|
||||
// Using width and height of the text, we can properly possition it within
|
||||
// our canvas.
|
||||
cr.move_to(
|
||||
-(f64::from(width) / f64::from(pango::SCALE)) / 2.0,
|
||||
-(f64::from(info.height())) / 2.0,
|
||||
);
|
||||
// After telling the layout object where to draw itself, we actually tell
|
||||
// it to draw itself into our cairo context.
|
||||
pangocairo::functions::show_layout(&cr, &**layout);
|
||||
|
||||
// Here we go one step up in our stack of transformations, removing any
|
||||
// changes we did to them since the last call to cr.save();
|
||||
cr.restore();
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.unwrap();
|
||||
None
|
||||
});
|
||||
|
||||
// Add a signal handler to the overlay's "caps-changed" signal. This could e.g.
|
||||
// be called when the sink that we render to does not support resizing the image
|
||||
|
@ -206,18 +187,16 @@ fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
|||
// will then change its caps and we use the notification about this change to
|
||||
// resize our canvas's size.
|
||||
// Another possibility for when this might happen is, when our video is a network
|
||||
// stream that dynamically changes resolution when enough bandwith is available.
|
||||
overlay
|
||||
.connect("caps-changed", false, move |args| {
|
||||
let _overlay = args[0].get::<gst::Element>().unwrap().unwrap();
|
||||
let caps = args[1].get::<gst::Caps>().unwrap().unwrap();
|
||||
// stream that dynamically changes resolution when enough bandwidth is available.
|
||||
overlay.connect("caps-changed", false, move |args| {
|
||||
let _overlay = args[0].get::<gst::Element>().unwrap();
|
||||
let caps = args[1].get::<gst::Caps>().unwrap();
|
||||
|
||||
let mut drawer = drawer.lock().unwrap();
|
||||
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
|
||||
let mut drawer = drawer.lock().unwrap();
|
||||
drawer.info = Some(gst_video::VideoInfo::from_caps(&caps).unwrap());
|
||||
|
||||
None
|
||||
})
|
||||
.unwrap();
|
||||
None
|
||||
});
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
@ -226,10 +205,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -238,12 +217,11 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Null)?;
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
@ -259,12 +237,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
fn example_main() {
|
||||
match create_pipeline().and_then(main_loop) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
67
examples/src/bin/play.rs
Normal file
67
examples/src/bin/play.rs
Normal file
|
@ -0,0 +1,67 @@
|
|||
// This example shows how to use the GstPlay API.
|
||||
// The GstPlay API is a convenience API to allow implement playback applications
|
||||
// without having to write too much code.
|
||||
// Most of the tasks a play needs to support (such as seeking and switching
|
||||
// audio / subtitle streams or changing the volume) are all supported by simple
|
||||
// one-line function calls on the GstPlay.
|
||||
|
||||
use std::env;
|
||||
|
||||
use anyhow::Error;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
use gst_play::{Play, PlayMessage, PlayVideoRenderer};
|
||||
|
||||
fn main_loop(uri: &str) -> Result<(), Error> {
|
||||
gst::init()?;
|
||||
|
||||
let play = Play::new(None::<PlayVideoRenderer>);
|
||||
play.set_uri(Some(uri));
|
||||
play.play();
|
||||
|
||||
let mut result = Ok(());
|
||||
for msg in play.message_bus().iter_timed(gst::ClockTime::NONE) {
|
||||
match PlayMessage::parse(&msg) {
|
||||
Ok(PlayMessage::EndOfStream) => {
|
||||
play.stop();
|
||||
break;
|
||||
}
|
||||
Ok(PlayMessage::Error { error, details: _ }) => {
|
||||
result = Err(error);
|
||||
play.stop();
|
||||
break;
|
||||
}
|
||||
Ok(_) => (),
|
||||
Err(_) => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
// Set the message bus to flushing to ensure that all pending messages are dropped and there
|
||||
// are no further references to the play instance.
|
||||
play.message_bus().set_flushing(true);
|
||||
|
||||
result.map_err(|e| e.into())
|
||||
}
|
||||
|
||||
fn example_main() {
|
||||
let args: Vec<_> = env::args().collect();
|
||||
let uri: &str = if args.len() == 2 {
|
||||
args[1].as_ref()
|
||||
} else {
|
||||
println!("Usage: play uri");
|
||||
std::process::exit(-1)
|
||||
};
|
||||
|
||||
match main_loop(uri) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
|
@ -9,11 +9,10 @@
|
|||
// Much of the playbin's behavior can be controlled by so-called flags, as well
|
||||
// as the playbin's properties and signals.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
|
@ -29,22 +28,22 @@ fn example_main() {
|
|||
};
|
||||
|
||||
// Create a new playbin element, and tell it what uri to play back.
|
||||
let playbin = gst::ElementFactory::make("playbin", None).unwrap();
|
||||
playbin
|
||||
.set_property("uri", &glib::Value::from(uri))
|
||||
let playbin = gst::ElementFactory::make("playbin")
|
||||
.property("uri", uri)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
// For flags handling
|
||||
// With flags, one can configure playbin's behavior such as whether it
|
||||
// should play back contained video streams, or if it should render subtitles.
|
||||
// let flags = playbin.get_property("flags").unwrap();
|
||||
// let flags_class = FlagsClass::new(flags.type_()).unwrap();
|
||||
// let flags = playbin.property_value("flags");
|
||||
// let flags_class = FlagsClass::with_type(flags.type_()).unwrap();
|
||||
// let flags = flags_class.builder_with_value(flags).unwrap()
|
||||
// .unset_by_nick("text")
|
||||
// .unset_by_nick("video")
|
||||
// .build()
|
||||
// .unwrap();
|
||||
// playbin.set_property("flags", &flags).unwrap();
|
||||
// playbin.set_property_from_value("flags", &flags);
|
||||
|
||||
// The playbin also provides any kind of metadata that it found in the played stream.
|
||||
// For this, the playbin provides signals notifying about changes in the metadata.
|
||||
|
@ -53,64 +52,59 @@ fn example_main() {
|
|||
// - Live streams (such as internet radios) update this metadata during the stream
|
||||
// Note that this signal will be emitted from the streaming threads usually,
|
||||
// not the application's threads!
|
||||
playbin
|
||||
.connect("audio-tags-changed", false, |values| {
|
||||
// The metadata of any of the contained audio streams changed
|
||||
// In the case of a live-stream from an internet radio, this could for example
|
||||
// mark the beginning of a new track, or a new DJ.
|
||||
let playbin = values[0]
|
||||
.get::<glib::Object>()
|
||||
.expect("playbin \"audio-tags-changed\" signal values[1]")
|
||||
.unwrap();
|
||||
// This gets the index of the stream that changed. This is neccessary, since
|
||||
// there could e.g. be multiple audio streams (english, spanish, ...).
|
||||
let idx = values[1]
|
||||
.get_some::<i32>()
|
||||
.expect("playbin \"audio-tags-changed\" signal values[1]");
|
||||
playbin.connect("audio-tags-changed", false, |values| {
|
||||
// The metadata of any of the contained audio streams changed
|
||||
// In the case of a live-stream from an internet radio, this could for example
|
||||
// mark the beginning of a new track, or a new DJ.
|
||||
let playbin = values[0]
|
||||
.get::<glib::Object>()
|
||||
.expect("playbin \"audio-tags-changed\" signal values[1]");
|
||||
// This gets the index of the stream that changed. This is necessary, since
|
||||
// there could e.g. be multiple audio streams (english, spanish, ...).
|
||||
let idx = values[1]
|
||||
.get::<i32>()
|
||||
.expect("playbin \"audio-tags-changed\" signal values[1]");
|
||||
|
||||
println!("audio tags of audio stream {} changed:", idx);
|
||||
println!("audio tags of audio stream {idx} changed:");
|
||||
|
||||
// HELP: is this correct?
|
||||
// We were only notified about the change of metadata. If we want to do
|
||||
// something with it, we first need to actually query the metadata from the playbin.
|
||||
// We do this by facilliating the get-audio-tags action-signal on playbin.
|
||||
// Sending an action-signal to an element essentially is a function call on the element.
|
||||
// It is done that way, because elements do not have their own function API, they are
|
||||
// relying on GStreamer and GLib's API. The only way an element can communicate with an
|
||||
// application is via properties, signals or action signals (or custom messages, events, queries).
|
||||
// So what the following code does, is essentially asking playbin to tell us its already
|
||||
// internally stored tag list for this stream index.
|
||||
let tags = playbin
|
||||
.emit("get-audio-tags", &[&idx.to_value()])
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
let tags = tags.get::<gst::TagList>().expect("tags").unwrap();
|
||||
// HELP: is this correct?
|
||||
// We were only notified about the change of metadata. If we want to do
|
||||
// something with it, we first need to actually query the metadata from the playbin.
|
||||
// We do this by facilliating the get-audio-tags action-signal on playbin.
|
||||
// Sending an action-signal to an element essentially is a function call on the element.
|
||||
// It is done that way, because elements do not have their own function API, they are
|
||||
// relying on GStreamer and GLib's API. The only way an element can communicate with an
|
||||
// application is via properties, signals or action signals (or custom messages, events, queries).
|
||||
// So what the following code does, is essentially asking playbin to tell us its already
|
||||
// internally stored tag list for this stream index.
|
||||
let tags = playbin.emit_by_name::<Option<gst::TagList>>("get-audio-tags", &[&idx]);
|
||||
|
||||
if let Some(tags) = tags {
|
||||
if let Some(artist) = tags.get::<gst::tags::Artist>() {
|
||||
println!(" Artist: {}", artist.get().unwrap());
|
||||
println!(" Artist: {}", artist.get());
|
||||
}
|
||||
|
||||
if let Some(title) = tags.get::<gst::tags::Title>() {
|
||||
println!(" Title: {}", title.get().unwrap());
|
||||
println!(" Title: {}", title.get());
|
||||
}
|
||||
|
||||
if let Some(album) = tags.get::<gst::tags::Album>() {
|
||||
println!(" Album: {}", album.get().unwrap());
|
||||
println!(" Album: {}", album.get());
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
})
|
||||
.unwrap();
|
||||
None
|
||||
});
|
||||
|
||||
// The playbin element itself is a playbin, so it can be used as one, despite being
|
||||
// created from an element factory.
|
||||
let bus = playbin.get_bus().unwrap();
|
||||
let bus = playbin.bus().unwrap();
|
||||
|
||||
playbin
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -118,20 +112,17 @@ fn example_main() {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
MessageView::StateChanged(state_changed) =>
|
||||
// We are only interested in state-changed messages from playbin
|
||||
{
|
||||
if state_changed
|
||||
.get_src()
|
||||
.map(|s| s == playbin)
|
||||
.unwrap_or(false)
|
||||
&& state_changed.get_current() == gst::State::Playing
|
||||
if state_changed.src().map(|s| s == &playbin).unwrap_or(false)
|
||||
&& state_changed.current() == gst::State::Playing
|
||||
{
|
||||
// Generate a dot graph of the pipeline to GST_DEBUG_DUMP_DOT_DIR if defined
|
||||
let bin_ref = playbin.downcast_ref::<gst::Bin>().unwrap();
|
||||
|
@ -149,7 +140,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -5,15 +5,13 @@
|
|||
// audio / subtitle streams or changing the volume) are all supported by simple
|
||||
// one-line function calls on the GstPlayer.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
extern crate gstreamer_player as gst_player;
|
||||
|
||||
use std::env;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::{
|
||||
env,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use anyhow::Error;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[allow(unused_imports)]
|
||||
#[path = "../examples-common.rs"]
|
||||
|
@ -26,12 +24,12 @@ fn main_loop(uri: &str) -> Result<(), Error> {
|
|||
|
||||
let dispatcher = gst_player::PlayerGMainContextSignalDispatcher::new(None);
|
||||
let player = gst_player::Player::new(
|
||||
None,
|
||||
Some(&dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
|
||||
None::<gst_player::PlayerVideoRenderer>,
|
||||
Some(dispatcher.upcast::<gst_player::PlayerSignalDispatcher>()),
|
||||
);
|
||||
|
||||
// Tell the player what uri to play.
|
||||
player.set_uri(uri);
|
||||
player.set_uri(Some(uri));
|
||||
|
||||
let error = Arc::new(Mutex::new(Ok(())));
|
||||
|
||||
|
@ -78,12 +76,12 @@ fn example_main() {
|
|||
|
||||
match main_loop(uri) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -12,12 +12,10 @@
|
|||
// For convenience, the API has a set of pre-defined queries, but also
|
||||
// allows custom queries (which can be defined and used by your own elements).
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use std::convert::TryInto;
|
||||
use std::env;
|
||||
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
|
@ -30,8 +28,8 @@ fn example_main() {
|
|||
let main_loop = glib::MainLoop::new(None, false);
|
||||
|
||||
// Let GStreamer create a pipeline from the parsed launch syntax on the cli.
|
||||
let pipeline = gst::parse_launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let pipeline = gst::parse::launch(&pipeline_str).unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
|
@ -52,20 +50,19 @@ fn example_main() {
|
|||
let timeout_id = glib::timeout_add_seconds(1, move || {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return glib::Continue(true),
|
||||
let Some(pipeline) = pipeline_weak.upgrade() else {
|
||||
return glib::ControlFlow::Break;
|
||||
};
|
||||
|
||||
//let pos = pipeline.query_position(gst::Format::Time).unwrap_or(-1);
|
||||
//let dur = pipeline.query_duration(gst::Format::Time).unwrap_or(-1);
|
||||
let pos: gst::ClockTime = {
|
||||
let pos: Option<gst::ClockTime> = {
|
||||
// Create a new position query and send it to the pipeline.
|
||||
// This will traverse all elements in the pipeline, until one feels
|
||||
// capable of answering the query.
|
||||
let mut q = gst::query::Position::new(gst::Format::Time);
|
||||
if pipeline.query(&mut q) {
|
||||
Some(q.get_result())
|
||||
Some(q.result())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -73,13 +70,13 @@ fn example_main() {
|
|||
.and_then(|pos| pos.try_into().ok())
|
||||
.unwrap();
|
||||
|
||||
let dur: gst::ClockTime = {
|
||||
let dur: Option<gst::ClockTime> = {
|
||||
// Create a new duration query and send it to the pipeline.
|
||||
// This will traverse all elements in the pipeline, until one feels
|
||||
// capable of answering the query.
|
||||
let mut q = gst::query::Duration::new(gst::Format::Time);
|
||||
if pipeline.query(&mut q) {
|
||||
Some(q.get_result())
|
||||
Some(q.result())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
|
@ -87,36 +84,37 @@ fn example_main() {
|
|||
.and_then(|dur| dur.try_into().ok())
|
||||
.unwrap();
|
||||
|
||||
println!("{} / {}", pos, dur);
|
||||
println!("{} / {}", pos.display(), dur.display());
|
||||
|
||||
glib::Continue(true)
|
||||
glib::ControlFlow::Continue
|
||||
});
|
||||
|
||||
// Need to move a new reference into the closure.
|
||||
let main_loop_clone = main_loop.clone();
|
||||
//bus.add_signal_watch();
|
||||
//bus.connect_message(move |_, msg| {
|
||||
bus.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
//bus.connect_message(None, move |_, msg| {
|
||||
let _bus_watch = bus
|
||||
.add_watch(move |_, msg| {
|
||||
use gst::MessageView;
|
||||
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => main_loop.quit(),
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
let main_loop = &main_loop_clone;
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => main_loop.quit(),
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
main_loop.quit();
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
|
||||
glib::Continue(true)
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
glib::ControlFlow::Continue
|
||||
})
|
||||
.expect("Failed to add bus watch");
|
||||
|
||||
main_loop.run();
|
||||
|
||||
|
@ -124,12 +122,11 @@ fn example_main() {
|
|||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
|
||||
bus.remove_watch().unwrap();
|
||||
glib::source_remove(timeout_id);
|
||||
timeout_id.remove();
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -1,9 +1,7 @@
|
|||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use gst::{element_error, prelude::*};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
|
@ -11,69 +9,54 @@ use anyhow::Error;
|
|||
use derive_more::{Display, Error};
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "No such pad {} in {}", _0, _1)]
|
||||
#[display(fmt = "No such pad {_0} in {_1}")]
|
||||
struct NoSuchPad(#[error(not(source))] &'static str, String);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Unknown payload type {}", _0)]
|
||||
#[display(fmt = "Unknown payload type {_0}")]
|
||||
struct UnknownPT(#[error(not(source))] u32);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Usage: {} (play | record) DROP_PROBABILITY", _0)]
|
||||
#[display(fmt = "Usage: {_0} (play | record) DROP_PROBABILITY")]
|
||||
struct UsageError(#[error(not(source))] String);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
fn make_element(
|
||||
factory_name: &'static str,
|
||||
element_name: Option<&str>,
|
||||
) -> Result<gst::Element, Error> {
|
||||
match gst::ElementFactory::make(factory_name, element_name) {
|
||||
Ok(elem) => Ok(elem),
|
||||
Err(_) => Err(Error::from(MissingElement(factory_name))),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.get_static_pad(pad_name) {
|
||||
fn static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.static_pad(pad_name) {
|
||||
Some(pad) => Ok(pad),
|
||||
None => {
|
||||
let element_name = element.get_name();
|
||||
let element_name = element.name();
|
||||
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.get_request_pad(pad_name) {
|
||||
fn request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.request_pad_simple(pad_name) {
|
||||
Some(pad) => Ok(pad),
|
||||
None => {
|
||||
let element_name = element.get_name();
|
||||
let element_name = element.name();
|
||||
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn connect_rtpbin_srcpad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(), Error> {
|
||||
let name = src_pad.get_name();
|
||||
let name = src_pad.name();
|
||||
let split_name = name.split('_');
|
||||
let split_name = split_name.collect::<Vec<&str>>();
|
||||
let pt = split_name[5].parse::<u32>()?;
|
||||
|
||||
match pt {
|
||||
96 => {
|
||||
let sinkpad = get_static_pad(sink, "sink")?;
|
||||
let sinkpad = static_pad(sink, "sink")?;
|
||||
src_pad.link(&sinkpad)?;
|
||||
Ok(())
|
||||
}
|
||||
|
@ -82,14 +65,11 @@ fn connect_rtpbin_srcpad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(),
|
|||
}
|
||||
|
||||
fn make_fec_decoder(rtpbin: &gst::Element, sess_id: u32) -> Result<gst::Element, Error> {
|
||||
let fecdec = make_element("rtpulpfecdec", None)?;
|
||||
let internal_storage = rtpbin
|
||||
.emit("get-internal-storage", &[&sess_id.to_value()])
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
fecdec.set_property("storage", &internal_storage.to_value())?;
|
||||
fecdec.set_property("pt", &100u32.to_value())?;
|
||||
let internal_storage = rtpbin.emit_by_name::<glib::Object>("get-internal-storage", &[&sess_id]);
|
||||
let fecdec = gst::ElementFactory::make("rtpulpfecdec")
|
||||
.property("storage", &internal_storage)
|
||||
.property("pt", 100u32)
|
||||
.build()?;
|
||||
|
||||
Ok(fecdec)
|
||||
}
|
||||
|
@ -105,34 +85,55 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
let drop_probability = args[2].parse::<f32>()?;
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = make_element("udpsrc", None)?;
|
||||
let netsim = make_element("netsim", None)?;
|
||||
let rtpbin = make_element("rtpbin", None)?;
|
||||
let depay = make_element("rtpvp8depay", None)?;
|
||||
let dec = make_element("vp8dec", None)?;
|
||||
let conv = make_element("videoconvert", None)?;
|
||||
let scale = make_element("videoscale", None)?;
|
||||
let filter = make_element("capsfilter", None)?;
|
||||
let pipeline = gst::Pipeline::default();
|
||||
|
||||
pipeline.add_many(&[&src, &netsim, &rtpbin, &depay, &dec, &conv, &scale, &filter])?;
|
||||
gst::Element::link_many(&[&depay, &dec, &conv, &scale, &filter])?;
|
||||
let rtp_caps = gst::Caps::builder("application/x-rtp")
|
||||
.field("clock-rate", 90000i32)
|
||||
.build();
|
||||
|
||||
let video_caps = gst_video::VideoCapsBuilder::new()
|
||||
.width(1920)
|
||||
.height(1080)
|
||||
.build();
|
||||
|
||||
let src = gst::ElementFactory::make("udpsrc")
|
||||
.property("address", "127.0.0.1")
|
||||
.property("caps", &rtp_caps)
|
||||
.build()?;
|
||||
let netsim = gst::ElementFactory::make("netsim")
|
||||
.property("drop-probability", drop_probability)
|
||||
.build()?;
|
||||
let rtpbin = gst::ElementFactory::make("rtpbin")
|
||||
.property("do-lost", true)
|
||||
.build()?;
|
||||
let depay = gst::ElementFactory::make("rtpvp8depay").build()?;
|
||||
let dec = gst::ElementFactory::make("vp8dec").build()?;
|
||||
let conv = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let scale = gst::ElementFactory::make("videoscale").build()?;
|
||||
let filter = gst::ElementFactory::make("capsfilter")
|
||||
.property("caps", &video_caps)
|
||||
.build()?;
|
||||
|
||||
pipeline.add_many([&src, &netsim, &rtpbin, &depay, &dec, &conv, &scale, &filter])?;
|
||||
gst::Element::link_many([&depay, &dec, &conv, &scale, &filter])?;
|
||||
|
||||
match args[1].as_str() {
|
||||
"play" => {
|
||||
let sink = make_element("autovideosink", None)?;
|
||||
let sink = gst::ElementFactory::make("autovideosink").build()?;
|
||||
pipeline.add(&sink)?;
|
||||
filter.link(&sink)?;
|
||||
}
|
||||
"record" => {
|
||||
let enc = make_element("x264enc", None)?;
|
||||
let mux = make_element("matroskamux", None)?;
|
||||
let sink = make_element("filesink", None)?;
|
||||
let enc = gst::ElementFactory::make("x264enc")
|
||||
.property_from_str("tune", "zerolatency")
|
||||
.build()?;
|
||||
let mux = gst::ElementFactory::make("matroskamux").build()?;
|
||||
let sink = gst::ElementFactory::make("filesink")
|
||||
.property("location", "out.mkv")
|
||||
.build()?;
|
||||
|
||||
pipeline.add_many(&[&enc, &mux, &sink])?;
|
||||
gst::Element::link_many(&[&filter, &enc, &mux, &sink])?;
|
||||
sink.set_property("location", &"out.mkv".to_value())?;
|
||||
enc.set_property_from_str("tune", "zerolatency");
|
||||
pipeline.add_many([&enc, &mux, &sink])?;
|
||||
gst::Element::link_many([&filter, &enc, &mux, &sink])?;
|
||||
eprintln!("Recording to out.mkv");
|
||||
}
|
||||
_ => return Err(Error::from(UsageError(args[0].clone()))),
|
||||
|
@ -143,59 +144,49 @@ fn example_main() -> Result<(), Error> {
|
|||
rtpbin.connect("new-storage", false, |values| {
|
||||
let storage = values[1]
|
||||
.get::<gst::Element>()
|
||||
.expect("rtpbin \"new-storage\" signal values[1]")
|
||||
.expect("rtpbin \"new-storage\" signal values[1]: no `Element`");
|
||||
storage
|
||||
.set_property("size-time", &250_000_000u64.to_value())
|
||||
.unwrap();
|
||||
.expect("rtpbin \"new-storage\" signal values[1]");
|
||||
storage.set_property("size-time", 250_000_000u64);
|
||||
|
||||
None
|
||||
})?;
|
||||
});
|
||||
|
||||
rtpbin.connect("request-pt-map", false, |values| {
|
||||
let pt = values[2]
|
||||
.get_some::<u32>()
|
||||
.get::<u32>()
|
||||
.expect("rtpbin \"new-storage\" signal values[2]");
|
||||
match pt {
|
||||
100 => Some(
|
||||
gst::Caps::new_simple(
|
||||
"application/x-rtp",
|
||||
&[
|
||||
("media", &"video"),
|
||||
("clock-rate", &90000i32),
|
||||
("is-fec", &true),
|
||||
],
|
||||
)
|
||||
.to_value(),
|
||||
gst::Caps::builder("application/x-rtp")
|
||||
.field("media", "video")
|
||||
.field("clock-rate", 90000i32)
|
||||
.field("is-fec", true)
|
||||
.build()
|
||||
.to_value(),
|
||||
),
|
||||
96 => Some(
|
||||
gst::Caps::new_simple(
|
||||
"application/x-rtp",
|
||||
&[
|
||||
("media", &"video"),
|
||||
("clock-rate", &90000i32),
|
||||
("encoding-name", &"VP8"),
|
||||
],
|
||||
)
|
||||
.to_value(),
|
||||
gst::Caps::builder("application/x-rtp")
|
||||
.field("media", "video")
|
||||
.field("clock-rate", 90000i32)
|
||||
.field("encoding-name", "VP8")
|
||||
.build()
|
||||
.to_value(),
|
||||
),
|
||||
_ => None,
|
||||
}
|
||||
})?;
|
||||
});
|
||||
|
||||
rtpbin.connect("request-fec-decoder", false, |values| {
|
||||
let rtpbin = values[0]
|
||||
.get::<gst::Element>()
|
||||
.expect("rtpbin \"request-fec-decoder\" signal values[0]")
|
||||
.expect("rtpbin \"request-fec-decoder\" signal values[0]: no `Element`");
|
||||
.expect("rtpbin \"request-fec-decoder\" signal values[0]");
|
||||
let sess_id = values[1]
|
||||
.get_some::<u32>()
|
||||
.get::<u32>()
|
||||
.expect("rtpbin \"request-fec-decoder\" signal values[1]");
|
||||
|
||||
match make_fec_decoder(&rtpbin, sess_id) {
|
||||
Ok(elem) => Some(elem.to_value()),
|
||||
Err(err) => {
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
rtpbin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to make FEC decoder"),
|
||||
|
@ -204,23 +195,22 @@ fn example_main() -> Result<(), Error> {
|
|||
None
|
||||
}
|
||||
}
|
||||
})?;
|
||||
});
|
||||
|
||||
let srcpad = get_static_pad(&netsim, "src")?;
|
||||
let sinkpad = get_request_pad(&rtpbin, "recv_rtp_sink_0")?;
|
||||
let srcpad = static_pad(&netsim, "src")?;
|
||||
let sinkpad = request_pad(&rtpbin, "recv_rtp_sink_0")?;
|
||||
srcpad.link(&sinkpad)?;
|
||||
|
||||
let depay_weak = depay.downgrade();
|
||||
rtpbin.connect_pad_added(move |rtpbin, src_pad| {
|
||||
let depay = match depay_weak.upgrade() {
|
||||
Some(depay) => depay,
|
||||
None => return,
|
||||
let Some(depay) = depay_weak.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
match connect_rtpbin_srcpad(&src_pad, &depay) {
|
||||
match connect_rtpbin_srcpad(src_pad, &depay) {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
rtpbin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to link srcpad"),
|
||||
|
@ -230,26 +220,15 @@ fn example_main() -> Result<(), Error> {
|
|||
}
|
||||
});
|
||||
|
||||
let rtp_caps = gst::Caps::new_simple("application/x-rtp", &[("clock-rate", &90000i32)]);
|
||||
|
||||
let video_caps =
|
||||
gst::Caps::new_simple("video/x-raw", &[("width", &1920i32), ("height", &1080i32)]);
|
||||
|
||||
src.set_property("address", &"127.0.0.1".to_value())?;
|
||||
src.set_property("caps", &rtp_caps.to_value())?;
|
||||
netsim.set_property("drop-probability", &drop_probability.to_value())?;
|
||||
rtpbin.set_property("do-lost", &true.to_value())?;
|
||||
filter.set_property("caps", &video_caps.to_value())?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -261,24 +240,19 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
MessageView::StateChanged(s) => {
|
||||
if let Some(element) = msg.get_src() {
|
||||
if element == pipeline && s.get_current() == gst::State::Playing {
|
||||
if let Some(element) = msg.src() {
|
||||
if element == &pipeline && s.current() == gst::State::Playing {
|
||||
eprintln!("PLAYING");
|
||||
gst::debug_bin_to_dot_file(
|
||||
&pipeline,
|
||||
gst::DebugGraphDetails::all(),
|
||||
"client-playing",
|
||||
);
|
||||
pipeline.debug_to_dot_file(gst::DebugGraphDetails::all(), "client-playing");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -296,6 +270,6 @@ fn example_main() -> Result<(), Error> {
|
|||
fn main() {
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::prelude::*;
|
||||
use gst::{element_error, prelude::*};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
@ -11,69 +9,54 @@ use anyhow::Error;
|
|||
use derive_more::{Display, Error};
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "No such pad {} in {}", _0, _1)]
|
||||
#[display(fmt = "No such pad {_0} in {_1}")]
|
||||
struct NoSuchPad(&'static str, String);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Usage: {} URI FEC_PERCENTAGE", _0)]
|
||||
#[display(fmt = "Usage: {_0} URI FEC_PERCENTAGE")]
|
||||
struct UsageError(#[error(not(source))] String);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
fn make_element(
|
||||
factory_name: &'static str,
|
||||
element_name: Option<&str>,
|
||||
) -> Result<gst::Element, Error> {
|
||||
match gst::ElementFactory::make(factory_name, element_name) {
|
||||
Ok(elem) => Ok(elem),
|
||||
Err(_) => Err(Error::from(MissingElement(factory_name))),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.get_static_pad(pad_name) {
|
||||
fn static_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.static_pad(pad_name) {
|
||||
Some(pad) => Ok(pad),
|
||||
None => {
|
||||
let element_name = element.get_name();
|
||||
let element_name = element.name();
|
||||
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.get_request_pad(pad_name) {
|
||||
fn request_pad(element: &gst::Element, pad_name: &'static str) -> Result<gst::Pad, Error> {
|
||||
match element.request_pad_simple(pad_name) {
|
||||
Some(pad) => Ok(pad),
|
||||
None => {
|
||||
let element_name = element.get_name();
|
||||
let element_name = element.name();
|
||||
Err(Error::from(NoSuchPad(pad_name, element_name.to_string())))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn connect_decodebin_pad(src_pad: &gst::Pad, sink: &gst::Element) -> Result<(), Error> {
|
||||
let sinkpad = get_static_pad(&sink, "sink")?;
|
||||
let sinkpad = static_pad(sink, "sink")?;
|
||||
src_pad.link(&sinkpad)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn make_fec_encoder(fec_percentage: u32) -> Result<gst::Element, Error> {
|
||||
let fecenc = make_element("rtpulpfecenc", None)?;
|
||||
|
||||
fecenc.set_property("pt", &100u32.to_value())?;
|
||||
fecenc.set_property("multipacket", &true.to_value())?;
|
||||
fecenc.set_property("percentage", &fec_percentage.to_value())?;
|
||||
let fecenc = gst::ElementFactory::make("rtpulpfecenc")
|
||||
.property("pt", 100u32)
|
||||
.property("multipacket", true)
|
||||
.property("percentage", fec_percentage)
|
||||
.build()?;
|
||||
|
||||
Ok(fecenc)
|
||||
}
|
||||
|
@ -90,17 +73,33 @@ fn example_main() -> Result<(), Error> {
|
|||
let uri = &args[1];
|
||||
let fec_percentage = args[2].parse::<u32>()?;
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = make_element("uridecodebin", None)?;
|
||||
let conv = make_element("videoconvert", None)?;
|
||||
let q1 = make_element("queue", None)?;
|
||||
let enc = make_element("vp8enc", None)?;
|
||||
let q2 = make_element("queue", None)?;
|
||||
let pay = make_element("rtpvp8pay", None)?;
|
||||
let rtpbin = make_element("rtpbin", None)?;
|
||||
let sink = make_element("udpsink", None)?;
|
||||
let video_caps = gst::Caps::builder("video/x-raw").build();
|
||||
|
||||
pipeline.add_many(&[&src, &conv, &q1, &enc, &q2, &pay, &rtpbin, &sink])?;
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("uridecodebin")
|
||||
.property_from_str("pattern", "ball")
|
||||
.property("expose-all-streams", false)
|
||||
.property("caps", video_caps)
|
||||
.property("uri", uri)
|
||||
.build()?;
|
||||
let conv = gst::ElementFactory::make("videoconvert").build()?;
|
||||
let q1 = gst::ElementFactory::make("queue").build()?;
|
||||
let enc = gst::ElementFactory::make("vp8enc")
|
||||
.property("keyframe-max-dist", 30i32)
|
||||
.property("threads", 12i32)
|
||||
.property("cpu-used", -16i32)
|
||||
.property("deadline", 1i64)
|
||||
.property_from_str("error-resilient", "default")
|
||||
.build()?;
|
||||
let q2 = gst::ElementFactory::make("queue").build()?;
|
||||
let pay = gst::ElementFactory::make("rtpvp8pay").build()?;
|
||||
let rtpbin = gst::ElementFactory::make("rtpbin").build()?;
|
||||
let sink = gst::ElementFactory::make("udpsink")
|
||||
.property("host", "127.0.0.1")
|
||||
.property("sync", true)
|
||||
.build()?;
|
||||
|
||||
pipeline.add_many([&src, &conv, &q1, &enc, &q2, &pay, &rtpbin, &sink])?;
|
||||
|
||||
conv.link(&q1)?;
|
||||
q1.link(&enc)?;
|
||||
|
@ -110,13 +109,12 @@ fn example_main() -> Result<(), Error> {
|
|||
rtpbin.connect("request-fec-encoder", false, move |values| {
|
||||
let rtpbin = values[0]
|
||||
.get::<gst::Element>()
|
||||
.expect("rtpbin \"request-fec-encoder\" signal values[0]")
|
||||
.expect("rtpbin \"request-fec-encoder\" signal values[0]: no `Element`");
|
||||
.expect("rtpbin \"request-fec-encoder\" signal values[0]");
|
||||
|
||||
match make_fec_encoder(fec_percentage) {
|
||||
Ok(elem) => Some(elem.to_value()),
|
||||
Err(err) => {
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
rtpbin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to make FEC encoder"),
|
||||
|
@ -125,21 +123,21 @@ fn example_main() -> Result<(), Error> {
|
|||
None
|
||||
}
|
||||
}
|
||||
})?;
|
||||
});
|
||||
|
||||
let srcpad = get_static_pad(&q2, "src")?;
|
||||
let sinkpad = get_request_pad(&rtpbin, "send_rtp_sink_0")?;
|
||||
let srcpad = static_pad(&q2, "src")?;
|
||||
let sinkpad = request_pad(&rtpbin, "send_rtp_sink_0")?;
|
||||
srcpad.link(&sinkpad)?;
|
||||
|
||||
let srcpad = get_static_pad(&rtpbin, "send_rtp_src_0")?;
|
||||
let sinkpad = get_static_pad(&sink, "sink")?;
|
||||
let srcpad = static_pad(&rtpbin, "send_rtp_src_0")?;
|
||||
let sinkpad = static_pad(&sink, "sink")?;
|
||||
srcpad.link(&sinkpad)?;
|
||||
|
||||
src.connect_pad_added(
|
||||
move |decodebin, src_pad| match connect_decodebin_pad(&src_pad, &conv) {
|
||||
move |decodebin, src_pad| match connect_decodebin_pad(src_pad, &conv) {
|
||||
Ok(_) => (),
|
||||
Err(err) => {
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
decodebin,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to link decodebin srcpad"),
|
||||
|
@ -149,29 +147,15 @@ fn example_main() -> Result<(), Error> {
|
|||
},
|
||||
);
|
||||
|
||||
let video_caps = gst::Caps::new_simple("video/x-raw", &[]);
|
||||
|
||||
src.set_property_from_str("pattern", "ball");
|
||||
sink.set_property("host", &"127.0.0.1".to_value())?;
|
||||
sink.set_property("sync", &true.to_value())?;
|
||||
enc.set_property("keyframe-max-dist", &30i32.to_value())?;
|
||||
enc.set_property("threads", &12i32.to_value())?;
|
||||
enc.set_property("cpu-used", &(-16i32).to_value())?;
|
||||
enc.set_property("deadline", &1i64.to_value())?;
|
||||
enc.set_property_from_str("error-resilient", "default");
|
||||
src.set_property("expose-all-streams", &false.to_value())?;
|
||||
src.set_property("caps", &video_caps.to_value())?;
|
||||
src.set_property("uri", &uri.to_value())?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -183,24 +167,19 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
MessageView::StateChanged(s) => {
|
||||
if let Some(element) = msg.get_src() {
|
||||
if element == pipeline && s.get_current() == gst::State::Playing {
|
||||
if let Some(element) = msg.src() {
|
||||
if element == &pipeline && s.current() == gst::State::Playing {
|
||||
eprintln!("PLAYING");
|
||||
gst::debug_bin_to_dot_file(
|
||||
&pipeline,
|
||||
gst::DebugGraphDetails::all(),
|
||||
"server-playing",
|
||||
);
|
||||
pipeline.debug_to_dot_file(gst::DebugGraphDetails::all(), "server-playing");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -218,6 +197,6 @@ fn example_main() -> Result<(), Error> {
|
|||
fn main() {
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
223
examples/src/bin/rtsp-server-custom-auth.rs
Normal file
223
examples/src/bin/rtsp-server-custom-auth.rs
Normal file
|
@ -0,0 +1,223 @@
|
|||
// This example demonstrates how to set up a rtsp server using GStreamer
|
||||
// and extending the default auth module behaviour by subclassing RTSPAuth
|
||||
// For this, the example creates a videotestsrc pipeline manually to be used
|
||||
// by the RTSP server for providing data
|
||||
#![allow(clippy::non_send_fields_in_send_ty)]
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst_rtsp_server::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Could not get mount points")]
|
||||
struct NoMountPoints;
|
||||
|
||||
fn main_loop() -> Result<(), Error> {
|
||||
let main_loop = glib::MainLoop::new(None, false);
|
||||
let server = gst_rtsp_server::RTSPServer::new();
|
||||
|
||||
// We create our custom auth module.
|
||||
// The job of the auth module is to authenticate users and authorize
|
||||
// factories access/construction.
|
||||
let auth = auth::Auth::default();
|
||||
server.set_auth(Some(&auth));
|
||||
|
||||
// Much like HTTP servers, RTSP servers have multiple endpoints that
|
||||
// provide different streams. Here, we ask our server to give
|
||||
// us a reference to his list of endpoints, so we can add our
|
||||
// test endpoint, providing the pipeline from the cli.
|
||||
let mounts = server.mount_points().ok_or(NoMountPoints)?;
|
||||
|
||||
// Next, we create a factory for the endpoint we want to create.
|
||||
// The job of the factory is to create a new pipeline for each client that
|
||||
// connects, or (if configured to do so) to reuse an existing pipeline.
|
||||
let factory = gst_rtsp_server::RTSPMediaFactory::new();
|
||||
// Here we tell the media factory the media we want to serve.
|
||||
// This is done in the launch syntax. When the first client connects,
|
||||
// the factory will use this syntax to create a new pipeline instance.
|
||||
factory.set_launch("( videotestsrc ! vp8enc ! rtpvp8pay name=pay0 )");
|
||||
// This setting specifies whether each connecting client gets the output
|
||||
// of a new instance of the pipeline, or whether all connected clients share
|
||||
// the output of the same pipeline.
|
||||
// If you want to stream a fixed video you have stored on the server to any
|
||||
// client, you would not set this to shared here (since every client wants
|
||||
// to start at the beginning of the video). But if you want to distribute
|
||||
// a live source, you will probably want to set this to shared, to save
|
||||
// computing and memory capacity on the server.
|
||||
factory.set_shared(true);
|
||||
|
||||
// Now we add a new mount-point and tell the RTSP server to serve the content
|
||||
// provided by the factory we configured above, when a client connects to
|
||||
// this specific path.
|
||||
mounts.add_factory("/test", factory);
|
||||
|
||||
// Attach the server to our main context.
|
||||
// A main context is the thing where other stuff is registering itself for its
|
||||
// events (e.g. sockets, GStreamer bus, ...) and the main loop is something that
|
||||
// polls the main context for its events and dispatches them to whoever is
|
||||
// interested in them. In this example, we only do have one, so we can
|
||||
// leave the context parameter empty, it will automatically select
|
||||
// the default one.
|
||||
let id = server.attach(None)?;
|
||||
|
||||
println!(
|
||||
"Stream ready at rtsp://127.0.0.1:{}/test",
|
||||
server.bound_port()
|
||||
);
|
||||
println!("user admin/password can access stream");
|
||||
println!("user demo/demo passes authentication but receives 404");
|
||||
println!("other users do not pass pass authentication and receive 401");
|
||||
|
||||
// Start the mainloop. From this point on, the server will start to serve
|
||||
// our quality content to connecting clients.
|
||||
main_loop.run();
|
||||
|
||||
id.remove();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Our custom auth module
|
||||
mod auth {
|
||||
// In the imp submodule we include the actual implementation
|
||||
mod imp {
|
||||
use gst_rtsp::{RTSPHeaderField, RTSPStatusCode};
|
||||
use gst_rtsp_server::{prelude::*, subclass::prelude::*, RTSPContext};
|
||||
|
||||
// This is the private data of our auth
|
||||
#[derive(Default)]
|
||||
pub struct Auth;
|
||||
|
||||
impl Auth {
|
||||
// Simulate external auth validation and user extraction
|
||||
// authorized users are admin/password and demo/demo
|
||||
fn external_auth(&self, auth: &str) -> Option<String> {
|
||||
if let Ok(decoded) = data_encoding::BASE64.decode(auth.as_bytes()) {
|
||||
if let Ok(decoded) = std::str::from_utf8(&decoded) {
|
||||
let tokens = decoded.split(':').collect::<Vec<_>>();
|
||||
if tokens == vec!["admin", "password"] || tokens == vec!["demo", "demo"] {
|
||||
return Some(tokens[0].into());
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
// Simulate external role check
|
||||
// admin user can construct and access media factory
|
||||
fn external_access_check(&self, user: &str) -> bool {
|
||||
user == "admin"
|
||||
}
|
||||
}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for Auth {
|
||||
const NAME: &'static str = "RsRTSPAuth";
|
||||
type Type = super::Auth;
|
||||
type ParentType = gst_rtsp_server::RTSPAuth;
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for Auth {}
|
||||
|
||||
// Implementation of gst_rtsp_server::RTSPAuth virtual methods
|
||||
impl RTSPAuthImpl for Auth {
|
||||
fn authenticate(&self, ctx: &RTSPContext) -> bool {
|
||||
// authenticate should always be called with a valid context request
|
||||
let req = ctx
|
||||
.request()
|
||||
.expect("Context without request. Should not happen !");
|
||||
|
||||
if let Some(auth_credentials) = req.parse_auth_credentials().first() {
|
||||
if let Some(authorization) = auth_credentials.authorization() {
|
||||
if let Some(user) = self.external_auth(authorization) {
|
||||
// Update context token with authenticated username
|
||||
ctx.set_token(
|
||||
gst_rtsp_server::RTSPToken::builder()
|
||||
.field("user", user)
|
||||
.build(),
|
||||
);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
|
||||
fn check(&self, ctx: &RTSPContext, role: &glib::GString) -> bool {
|
||||
// We only check media factory access
|
||||
if !role.starts_with("auth.check.media.factory") {
|
||||
return true;
|
||||
}
|
||||
|
||||
if ctx.token().is_none() {
|
||||
// If we do not have a context token yet, check if there are any auth credentials in request
|
||||
if !self.authenticate(ctx) {
|
||||
// If there were no credentials, send a "401 Unauthorized" response
|
||||
if let Some(resp) = ctx.response() {
|
||||
resp.init_response(RTSPStatusCode::Unauthorized, ctx.request());
|
||||
resp.add_header(
|
||||
RTSPHeaderField::WwwAuthenticate,
|
||||
"Basic realm=\"CustomRealm\"",
|
||||
);
|
||||
if let Some(client) = ctx.client() {
|
||||
client.send_message(resp, ctx.session());
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(token) = ctx.token() {
|
||||
// If we already have a user token...
|
||||
if self.external_access_check(&token.string("user").unwrap_or_default()) {
|
||||
// grant access if user may access factory
|
||||
return true;
|
||||
} else {
|
||||
// send a "404 Not Found" response if user may not access factory
|
||||
if let Some(resp) = ctx.response() {
|
||||
resp.init_response(RTSPStatusCode::NotFound, ctx.request());
|
||||
if let Some(client) = ctx.client() {
|
||||
client.send_message(resp, ctx.session());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This here defines the public interface of our auth and implements
|
||||
// the corresponding traits so that it behaves like any other RTSPAuth
|
||||
glib::wrapper! {
|
||||
pub struct Auth(ObjectSubclass<imp::Auth>) @extends gst_rtsp_server::RTSPAuth;
|
||||
}
|
||||
|
||||
impl Default for Auth {
|
||||
// Creates a new instance of our auth
|
||||
fn default() -> Self {
|
||||
glib::Object::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn example_main() -> Result<(), Error> {
|
||||
gst::init()?;
|
||||
main_loop()
|
||||
}
|
||||
|
||||
fn main() {
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
|
@ -4,21 +4,11 @@
|
|||
// send to the server. For this, the launch syntax pipeline, that is passed
|
||||
// to this example's cli is spawned and the client's media is streamed into it.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
extern crate gstreamer_rtsp as gst_rtsp;
|
||||
extern crate gstreamer_rtsp_server as gst_rtsp_server;
|
||||
extern crate gstreamer_rtsp_server_sys as gst_rtsp_server_sys;
|
||||
|
||||
use std::env;
|
||||
use std::ptr;
|
||||
|
||||
use glib::translate::*;
|
||||
use gst_rtsp::*;
|
||||
use gst_rtsp_server::prelude::*;
|
||||
use gst_rtsp_server::*;
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst_rtsp_server::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
@ -28,7 +18,7 @@ mod examples_common;
|
|||
struct NoMountPoints;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Usage: {} LAUNCH_LINE", _0)]
|
||||
#[display(fmt = "Usage: {_0} LAUNCH_LINE")]
|
||||
struct UsageError(#[error(not(source))] String);
|
||||
|
||||
fn main_loop() -> Result<(), Error> {
|
||||
|
@ -41,22 +31,24 @@ fn main_loop() -> Result<(), Error> {
|
|||
// Mostly analog to the rtsp-server example, the server is created
|
||||
// and the factory for our test mount is configured.
|
||||
let main_loop = glib::MainLoop::new(None, false);
|
||||
let server = RTSPServer::new();
|
||||
let server = gst_rtsp_server::RTSPServer::new();
|
||||
// Much like HTTP servers, RTSP servers have multiple endpoints that
|
||||
// provide or take different streams. Here, we ask our server to give
|
||||
// us a reference to its list of endpoints, so we can add our
|
||||
// test endpoint.
|
||||
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
|
||||
let mounts = server.mount_points().ok_or(NoMountPoints)?;
|
||||
// Next, we create a factory for the endpoint we want to create.
|
||||
// The job of the factory is to create a new pipeline for each client that
|
||||
// connects, or (if configured to do so) to reuse an existing pipeline.
|
||||
let factory = RTSPMediaFactory::new();
|
||||
let factory = gst_rtsp_server::RTSPMediaFactory::new();
|
||||
// Here we configure a method of authentication that we want the
|
||||
// server to require from clients.
|
||||
let auth = RTSPAuth::new();
|
||||
let token = RTSPToken::new(&[(*RTSP_TOKEN_MEDIA_FACTORY_ROLE, &"user")]);
|
||||
let basic = RTSPAuth::make_basic("user", "password");
|
||||
// For propery authentication, we want to use encryption. And there's no
|
||||
let auth = gst_rtsp_server::RTSPAuth::new();
|
||||
let token = gst_rtsp_server::RTSPToken::builder()
|
||||
.field(gst_rtsp_server::RTSP_TOKEN_MEDIA_FACTORY_ROLE, "user")
|
||||
.build();
|
||||
let basic = gst_rtsp_server::RTSPAuth::make_basic("user", "password");
|
||||
// For proper authentication, we want to use encryption. And there's no
|
||||
// encryption without a certificate!
|
||||
let cert = gio::TlsCertificate::from_pem(
|
||||
"-----BEGIN CERTIFICATE-----\
|
||||
|
@ -84,22 +76,14 @@ fn main_loop() -> Result<(), Error> {
|
|||
W535W8UBbEg=-----END PRIVATE KEY-----",
|
||||
)?;
|
||||
|
||||
// Bindable versions were added in b1f515178a363df0322d7adbd5754e1f6e2083c9
|
||||
// This declares that the user "user" (once authenticated) has a role that
|
||||
// allows them to access and construct media factories.
|
||||
unsafe {
|
||||
gst_rtsp_server_sys::gst_rtsp_media_factory_add_role(
|
||||
factory.to_glib_none().0,
|
||||
"user".to_glib_none().0,
|
||||
RTSP_PERM_MEDIA_FACTORY_ACCESS.to_glib_none().0,
|
||||
<bool as StaticType>::static_type().to_glib() as *const u8,
|
||||
true.to_glib() as *const u8,
|
||||
RTSP_PERM_MEDIA_FACTORY_CONSTRUCT.as_ptr() as *const u8,
|
||||
<bool as StaticType>::static_type().to_glib() as *const u8,
|
||||
true.to_glib() as *const u8,
|
||||
ptr::null_mut::<u8>(),
|
||||
);
|
||||
}
|
||||
factory.add_role_from_structure(
|
||||
&gst::Structure::builder("user")
|
||||
.field(gst_rtsp_server::RTSP_PERM_MEDIA_FACTORY_ACCESS, true)
|
||||
.field(gst_rtsp_server::RTSP_PERM_MEDIA_FACTORY_CONSTRUCT, true)
|
||||
.build(),
|
||||
);
|
||||
|
||||
auth.set_tls_certificate(Some(&cert));
|
||||
auth.add_basic(basic.as_str(), &token);
|
||||
|
@ -110,7 +94,7 @@ fn main_loop() -> Result<(), Error> {
|
|||
factory.set_launch(args[1].as_str());
|
||||
// Tell the RTSP server that we want to work in RECORD mode (clients send)
|
||||
// data to us.
|
||||
factory.set_transport_mode(RTSPTransportMode::RECORD);
|
||||
factory.set_transport_mode(gst_rtsp_server::RTSPTransportMode::RECORD);
|
||||
// The RTSP protocol allows a couple of different profiles for the actually
|
||||
// used protocol of data-transmission. With this, we can limit the selection
|
||||
// from which connecting clients have to choose.
|
||||
|
@ -118,12 +102,12 @@ fn main_loop() -> Result<(), Error> {
|
|||
// The F in the end is for feedback (an extension that allows more bidirectional
|
||||
// feedback between sender and receiver). AV is just Audio/Video, P is Profile :)
|
||||
// The default, old RTP profile is AVP
|
||||
factory.set_profiles(RTSPProfile::SAVP | RTSPProfile::SAVPF);
|
||||
factory.set_profiles(gst_rtsp::RTSPProfile::SAVP | gst_rtsp::RTSPProfile::SAVPF);
|
||||
|
||||
// Now we add a new mount-point and tell the RTSP server to use the factory
|
||||
// we configured beforehand. This factory will take on the job of creating
|
||||
// a pipeline, which will take on the incoming data of connected clients.
|
||||
mounts.add_factory("/test", &factory);
|
||||
mounts.add_factory("/test", factory);
|
||||
|
||||
// Attach the server to our main context.
|
||||
// A main context is the thing where other stuff is registering itself for its
|
||||
|
@ -132,18 +116,18 @@ fn main_loop() -> Result<(), Error> {
|
|||
// interested in them. In this example, we only do have one, so we can
|
||||
// leave the context parameter empty, it will automatically select
|
||||
// the default one.
|
||||
let id = server.attach(None);
|
||||
let id = server.attach(None)?;
|
||||
|
||||
println!(
|
||||
"Stream ready at rtsps://127.0.0.1:{}/test",
|
||||
server.get_bound_port()
|
||||
server.bound_port()
|
||||
);
|
||||
|
||||
// Start the mainloop. From this point on, the server will start to take
|
||||
// incoming connections from clients.
|
||||
main_loop.run();
|
||||
|
||||
glib::source_remove(id);
|
||||
id.remove();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -156,6 +140,6 @@ fn example_main() -> Result<(), Error> {
|
|||
fn main() {
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,21 +6,11 @@
|
|||
//
|
||||
// It also comes with a custom RTSP server/client subclass for hooking into
|
||||
// the client machinery and printing some status.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
extern crate gstreamer_rtsp as gst_rtsp;
|
||||
extern crate gstreamer_rtsp_server as gst_rtsp_server;
|
||||
extern crate gstreamer_sdp as gst_sdp;
|
||||
|
||||
use gst_rtsp_server::prelude::*;
|
||||
|
||||
use glib::glib_object_impl;
|
||||
use glib::glib_object_subclass;
|
||||
use glib::glib_object_wrapper;
|
||||
use glib::glib_wrapper;
|
||||
#![allow(clippy::non_send_fields_in_send_ty)]
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst_rtsp_server::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
@ -29,23 +19,23 @@ mod examples_common;
|
|||
#[display(fmt = "Could not get mount points")]
|
||||
struct NoMountPoints;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Usage: {} LAUNCH_LINE", _0)]
|
||||
struct UsageError(#[error(not(source))] String);
|
||||
|
||||
fn main_loop() -> Result<(), Error> {
|
||||
let main_loop = glib::MainLoop::new(None, false);
|
||||
let server = server::Server::new();
|
||||
let server = server::Server::default();
|
||||
|
||||
let mounts = mount_points::MountPoints::default();
|
||||
server.set_mount_points(Some(&mounts));
|
||||
|
||||
// Much like HTTP servers, RTSP servers have multiple endpoints that
|
||||
// provide different streams. Here, we ask our server to give
|
||||
// us a reference to his list of endpoints, so we can add our
|
||||
// test endpoint, providing the pipeline from the cli.
|
||||
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
|
||||
let mounts = server.mount_points().ok_or(NoMountPoints)?;
|
||||
|
||||
// Next, we create our custom factory for the endpoint we want to create.
|
||||
// The job of the factory is to create a new pipeline for each client that
|
||||
// connects, or (if configured to do so) to reuse an existing pipeline.
|
||||
let factory = media_factory::Factory::new();
|
||||
let factory = media_factory::Factory::default();
|
||||
// This setting specifies whether each connecting client gets the output
|
||||
// of a new instance of the pipeline, or whether all connected clients share
|
||||
// the output of the same pipeline.
|
||||
|
@ -59,7 +49,7 @@ fn main_loop() -> Result<(), Error> {
|
|||
// Now we add a new mount-point and tell the RTSP server to serve the content
|
||||
// provided by the factory we configured above, when a client connects to
|
||||
// this specific path.
|
||||
mounts.add_factory("/test", &factory);
|
||||
mounts.add_factory("/test", factory);
|
||||
|
||||
// Attach the server to our main context.
|
||||
// A main context is the thing where other stuff is registering itself for its
|
||||
|
@ -68,71 +58,52 @@ fn main_loop() -> Result<(), Error> {
|
|||
// interested in them. In this example, we only do have one, so we can
|
||||
// leave the context parameter empty, it will automatically select
|
||||
// the default one.
|
||||
let id = server.attach(None);
|
||||
let id = server.attach(None)?;
|
||||
|
||||
println!(
|
||||
"Stream ready at rtsp://127.0.0.1:{}/test",
|
||||
server.get_bound_port()
|
||||
server.bound_port()
|
||||
);
|
||||
|
||||
// Start the mainloop. From this point on, the server will start to serve
|
||||
// our quality content to connecting clients.
|
||||
main_loop.run();
|
||||
|
||||
glib::source_remove(id);
|
||||
id.remove();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// Our custom media factory that creates a media input manually
|
||||
mod media_factory {
|
||||
use super::*;
|
||||
|
||||
use glib::subclass;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
extern crate gstreamer_rtsp_server as gst_rtsp_server;
|
||||
use gst_rtsp_server::subclass::prelude::*;
|
||||
|
||||
use super::*;
|
||||
|
||||
// In the imp submodule we include the actual implementation
|
||||
mod imp {
|
||||
use super::*;
|
||||
|
||||
// This is the private data of our factory
|
||||
#[derive(Default)]
|
||||
pub struct Factory {}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for Factory {
|
||||
const NAME: &'static str = "RsRTSPMediaFactory";
|
||||
type Type = super::Factory;
|
||||
type ParentType = gst_rtsp_server::RTSPMediaFactory;
|
||||
type Instance = gst::subclass::ElementInstanceStruct<Self>;
|
||||
type Class = subclass::simple::ClassStruct<Self>;
|
||||
|
||||
// This macro provides some boilerplate
|
||||
glib_object_subclass!();
|
||||
|
||||
// Called when a new instance is to be created. We need to return an instance
|
||||
// of our struct here.
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for Factory {
|
||||
// This macro provides some boilerplate.
|
||||
glib_object_impl!();
|
||||
|
||||
fn constructed(&self, obj: &glib::Object) {
|
||||
self.parent_constructed(obj);
|
||||
|
||||
let factory = obj
|
||||
.downcast_ref::<gst_rtsp_server::RTSPMediaFactory>()
|
||||
.unwrap();
|
||||
fn constructed(&self) {
|
||||
self.parent_constructed();
|
||||
|
||||
let factory = self.obj();
|
||||
// All media created by this factory are our custom media type. This would
|
||||
// not require a media factory subclass and can also be called on the normal
|
||||
// RTSPMediaFactory.
|
||||
|
@ -142,27 +113,28 @@ mod media_factory {
|
|||
|
||||
// Implementation of gst_rtsp_server::RTSPMediaFactory virtual methods
|
||||
impl RTSPMediaFactoryImpl for Factory {
|
||||
fn create_element(
|
||||
&self,
|
||||
_factory: &gst_rtsp_server::RTSPMediaFactory,
|
||||
_url: &gst_rtsp::RTSPUrl,
|
||||
) -> Option<gst::Element> {
|
||||
fn create_element(&self, _url: &gst_rtsp::RTSPUrl) -> Option<gst::Element> {
|
||||
// Create a simple VP8 videotestsrc input
|
||||
let bin = gst::Bin::new(None);
|
||||
let src = gst::ElementFactory::make("videotestsrc", None).unwrap();
|
||||
let enc = gst::ElementFactory::make("vp8enc", None).unwrap();
|
||||
let bin = gst::Bin::default();
|
||||
let src = gst::ElementFactory::make("videotestsrc")
|
||||
// Configure the videotestsrc live
|
||||
.property("is-live", true)
|
||||
.build()
|
||||
.unwrap();
|
||||
let enc = gst::ElementFactory::make("vp8enc")
|
||||
// Produce encoded data as fast as possible
|
||||
.property("deadline", 1i64)
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
// The names of the payloaders must be payX
|
||||
let pay = gst::ElementFactory::make("rtpvp8pay", Some("pay0")).unwrap();
|
||||
let pay = gst::ElementFactory::make("rtpvp8pay")
|
||||
.name("pay0")
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
// Configure the videotestsrc live
|
||||
src.set_property("is-live", &true).unwrap();
|
||||
|
||||
// Produce encoded data as fast as possible
|
||||
enc.set_property("deadline", &1i64).unwrap();
|
||||
|
||||
bin.add_many(&[&src, &enc, &pay]).unwrap();
|
||||
gst::Element::link_many(&[&src, &enc, &pay]).unwrap();
|
||||
bin.add_many([&src, &enc, &pay]).unwrap();
|
||||
gst::Element::link_many([&src, &enc, &pay]).unwrap();
|
||||
|
||||
Some(bin.upcast())
|
||||
}
|
||||
|
@ -171,44 +143,20 @@ mod media_factory {
|
|||
|
||||
// This here defines the public interface of our factory and implements
|
||||
// the corresponding traits so that it behaves like any other RTSPMediaFactory
|
||||
glib_wrapper! {
|
||||
pub struct Factory(
|
||||
Object<
|
||||
gst::subclass::ElementInstanceStruct<imp::Factory>,
|
||||
subclass::simple::ClassStruct<imp::Factory>,
|
||||
FactoryClass
|
||||
>
|
||||
) @extends gst_rtsp_server::RTSPMediaFactory;
|
||||
|
||||
match fn {
|
||||
get_type => || imp::Factory::get_type().to_glib(),
|
||||
}
|
||||
glib::wrapper! {
|
||||
pub struct Factory(ObjectSubclass<imp::Factory>) @extends gst_rtsp_server::RTSPMediaFactory;
|
||||
}
|
||||
|
||||
// Factories must be Send+Sync, and ours is
|
||||
unsafe impl Send for Factory {}
|
||||
unsafe impl Sync for Factory {}
|
||||
|
||||
impl Factory {
|
||||
impl Default for Factory {
|
||||
// Creates a new instance of our factory
|
||||
pub fn new() -> Factory {
|
||||
glib::Object::new(Self::static_type(), &[])
|
||||
.expect("Failed to create factory")
|
||||
.downcast()
|
||||
.expect("Created factory is of wrong type")
|
||||
fn default() -> Factory {
|
||||
glib::Object::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Our custom media subclass that adds a custom attribute to the SDP returned by DESCRIBE
|
||||
mod media {
|
||||
use super::*;
|
||||
|
||||
use glib::subclass;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
extern crate gstreamer_rtsp_server as gst_rtsp_server;
|
||||
use gst_rtsp_server::subclass::prelude::*;
|
||||
|
||||
// In the imp submodule we include the actual implementation
|
||||
|
@ -216,42 +164,30 @@ mod media {
|
|||
use super::*;
|
||||
|
||||
// This is the private data of our media
|
||||
#[derive(Default)]
|
||||
pub struct Media {}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for Media {
|
||||
const NAME: &'static str = "RsRTSPMedia";
|
||||
type Type = super::Media;
|
||||
type ParentType = gst_rtsp_server::RTSPMedia;
|
||||
type Instance = gst::subclass::ElementInstanceStruct<Self>;
|
||||
type Class = subclass::simple::ClassStruct<Self>;
|
||||
|
||||
// This macro provides some boilerplate
|
||||
glib_object_subclass!();
|
||||
|
||||
// Called when a new instance is to be created. We need to return an instance
|
||||
// of our struct here.
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for Media {
|
||||
// This macro provides some boilerplate.
|
||||
glib_object_impl!();
|
||||
}
|
||||
impl ObjectImpl for Media {}
|
||||
|
||||
// Implementation of gst_rtsp_server::RTSPMedia virtual methods
|
||||
impl RTSPMediaImpl for Media {
|
||||
fn setup_sdp(
|
||||
&self,
|
||||
media: &gst_rtsp_server::RTSPMedia,
|
||||
sdp: &mut gst_sdp::SDPMessageRef,
|
||||
info: &gst_rtsp_server::subclass::SDPInfo,
|
||||
) -> Result<(), gst::LoggableError> {
|
||||
self.parent_setup_sdp(media, sdp, info)?;
|
||||
self.parent_setup_sdp(sdp, info)?;
|
||||
|
||||
sdp.add_attribute("my-custom-attribute", Some("has-a-value"));
|
||||
|
||||
|
@ -262,137 +198,77 @@ mod media {
|
|||
|
||||
// This here defines the public interface of our factory and implements
|
||||
// the corresponding traits so that it behaves like any other RTSPMedia
|
||||
glib_wrapper! {
|
||||
pub struct Media(
|
||||
Object<
|
||||
gst::subclass::ElementInstanceStruct<imp::Media>,
|
||||
subclass::simple::ClassStruct<imp::Media>,
|
||||
MediaClass
|
||||
>
|
||||
) @extends gst_rtsp_server::RTSPMedia;
|
||||
|
||||
match fn {
|
||||
get_type => || imp::Media::get_type().to_glib(),
|
||||
}
|
||||
glib::wrapper! {
|
||||
pub struct Media(ObjectSubclass<imp::Media>) @extends gst_rtsp_server::RTSPMedia;
|
||||
}
|
||||
|
||||
// Medias must be Send+Sync, and ours is
|
||||
unsafe impl Send for Media {}
|
||||
unsafe impl Sync for Media {}
|
||||
}
|
||||
|
||||
// Our custom RTSP server subclass that reports when clients are connecting and uses
|
||||
// our custom RTSP client subclass for each client
|
||||
mod server {
|
||||
use super::*;
|
||||
|
||||
use glib::subclass;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
extern crate gstreamer_rtsp_server as gst_rtsp_server;
|
||||
use gst_rtsp_server::subclass::prelude::*;
|
||||
|
||||
use super::*;
|
||||
|
||||
// In the imp submodule we include the actual implementation
|
||||
mod imp {
|
||||
use super::*;
|
||||
|
||||
// This is the private data of our server
|
||||
#[derive(Default)]
|
||||
pub struct Server {}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for Server {
|
||||
const NAME: &'static str = "RsRTSPServer";
|
||||
type Type = super::Server;
|
||||
type ParentType = gst_rtsp_server::RTSPServer;
|
||||
type Instance = gst::subclass::ElementInstanceStruct<Self>;
|
||||
type Class = subclass::simple::ClassStruct<Self>;
|
||||
|
||||
// This macro provides some boilerplate
|
||||
glib_object_subclass!();
|
||||
|
||||
// Called when a new instance is to be created. We need to return an instance
|
||||
// of our struct here.
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for Server {
|
||||
// This macro provides some boilerplate.
|
||||
glib_object_impl!();
|
||||
}
|
||||
impl ObjectImpl for Server {}
|
||||
|
||||
// Implementation of gst_rtsp_server::RTSPServer virtual methods
|
||||
impl RTSPServerImpl for Server {
|
||||
fn create_client(
|
||||
&self,
|
||||
server: &gst_rtsp_server::RTSPServer,
|
||||
) -> Option<gst_rtsp_server::RTSPClient> {
|
||||
let client = super::client::Client::new();
|
||||
fn create_client(&self) -> Option<gst_rtsp_server::RTSPClient> {
|
||||
let server = self.obj();
|
||||
let client = super::client::Client::default();
|
||||
|
||||
// Duplicated from the default implementation
|
||||
client.set_session_pool(server.get_session_pool().as_ref());
|
||||
client.set_mount_points(server.get_mount_points().as_ref());
|
||||
client.set_auth(server.get_auth().as_ref());
|
||||
client.set_thread_pool(server.get_thread_pool().as_ref());
|
||||
client.set_session_pool(server.session_pool().as_ref());
|
||||
client.set_mount_points(server.mount_points().as_ref());
|
||||
client.set_auth(server.auth().as_ref());
|
||||
client.set_thread_pool(server.thread_pool().as_ref());
|
||||
|
||||
Some(client.upcast())
|
||||
}
|
||||
|
||||
fn client_connected(
|
||||
&self,
|
||||
server: &gst_rtsp_server::RTSPServer,
|
||||
client: &gst_rtsp_server::RTSPClient,
|
||||
) {
|
||||
self.parent_client_connected(server, client);
|
||||
println!("Client {:?} connected", client);
|
||||
fn client_connected(&self, client: &gst_rtsp_server::RTSPClient) {
|
||||
self.parent_client_connected(client);
|
||||
println!("Client {client:?} connected");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This here defines the public interface of our factory and implements
|
||||
// the corresponding traits so that it behaves like any other RTSPServer
|
||||
glib_wrapper! {
|
||||
pub struct Server(
|
||||
Object<
|
||||
gst::subclass::ElementInstanceStruct<imp::Server>,
|
||||
subclass::simple::ClassStruct<imp::Server>,
|
||||
ServerClass
|
||||
>
|
||||
) @extends gst_rtsp_server::RTSPServer;
|
||||
|
||||
match fn {
|
||||
get_type => || imp::Server::get_type().to_glib(),
|
||||
}
|
||||
glib::wrapper! {
|
||||
pub struct Server(ObjectSubclass<imp::Server>) @extends gst_rtsp_server::RTSPServer;
|
||||
}
|
||||
|
||||
// Servers must be Send+Sync, and ours is
|
||||
unsafe impl Send for Server {}
|
||||
unsafe impl Sync for Server {}
|
||||
|
||||
impl Server {
|
||||
impl Default for Server {
|
||||
// Creates a new instance of our factory
|
||||
pub fn new() -> Server {
|
||||
glib::Object::new(Self::static_type(), &[])
|
||||
.expect("Failed to create server")
|
||||
.downcast()
|
||||
.expect("Created server is of wrong type")
|
||||
fn default() -> Server {
|
||||
glib::Object::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Our custom RTSP client subclass.
|
||||
mod client {
|
||||
use super::*;
|
||||
|
||||
use glib::subclass;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
extern crate gstreamer_rtsp_server as gst_rtsp_server;
|
||||
use gst_rtsp_server::subclass::prelude::*;
|
||||
|
||||
// In the imp submodule we include the actual implementation
|
||||
|
@ -400,69 +276,92 @@ mod client {
|
|||
use super::*;
|
||||
|
||||
// This is the private data of our server
|
||||
#[derive(Default)]
|
||||
pub struct Client {}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for Client {
|
||||
const NAME: &'static str = "RsRTSPClient";
|
||||
type Type = super::Client;
|
||||
type ParentType = gst_rtsp_server::RTSPClient;
|
||||
type Instance = gst::subclass::ElementInstanceStruct<Self>;
|
||||
type Class = subclass::simple::ClassStruct<Self>;
|
||||
|
||||
// This macro provides some boilerplate
|
||||
glib_object_subclass!();
|
||||
|
||||
// Called when a new instance is to be created. We need to return an instance
|
||||
// of our struct here.
|
||||
fn new() -> Self {
|
||||
Self {}
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for Client {
|
||||
// This macro provides some boilerplate.
|
||||
glib_object_impl!();
|
||||
}
|
||||
impl ObjectImpl for Client {}
|
||||
|
||||
// Implementation of gst_rtsp_server::RTSPClient virtual methods
|
||||
impl RTSPClientImpl for Client {
|
||||
fn closed(&self, client: &gst_rtsp_server::RTSPClient) {
|
||||
self.parent_closed(client);
|
||||
println!("Client {:?} closed", client);
|
||||
fn closed(&self) {
|
||||
let client = self.obj();
|
||||
self.parent_closed();
|
||||
println!("Client {client:?} closed");
|
||||
}
|
||||
|
||||
fn describe_request(&self, ctx: &gst_rtsp_server::RTSPContext) {
|
||||
self.parent_describe_request(ctx);
|
||||
let request_uri = ctx.uri().unwrap().request_uri();
|
||||
println!("Describe request for uri: {request_uri:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// This here defines the public interface of our factory and implements
|
||||
// the corresponding traits so that it behaves like any other RTSPClient
|
||||
glib_wrapper! {
|
||||
pub struct Client(
|
||||
Object<
|
||||
gst::subclass::ElementInstanceStruct<imp::Client>,
|
||||
subclass::simple::ClassStruct<imp::Client>,
|
||||
ClientClass
|
||||
>
|
||||
) @extends gst_rtsp_server::RTSPClient;
|
||||
glib::wrapper! {
|
||||
pub struct Client(ObjectSubclass<imp::Client>) @extends gst_rtsp_server::RTSPClient;
|
||||
}
|
||||
|
||||
match fn {
|
||||
get_type => || imp::Client::get_type().to_glib(),
|
||||
impl Default for Client {
|
||||
// Creates a new instance of our factory
|
||||
fn default() -> Client {
|
||||
glib::Object::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod mount_points {
|
||||
use gst_rtsp_server::subclass::prelude::*;
|
||||
|
||||
mod imp {
|
||||
use super::*;
|
||||
|
||||
// This is the private data of our mount points
|
||||
#[derive(Default)]
|
||||
pub struct MountPoints {}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for MountPoints {
|
||||
const NAME: &'static str = "RsRTSPMountPoints";
|
||||
type Type = super::MountPoints;
|
||||
type ParentType = gst_rtsp_server::RTSPMountPoints;
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for MountPoints {}
|
||||
|
||||
// Implementation of gst_rtsp_server::RTSPClient virtual methods
|
||||
impl RTSPMountPointsImpl for MountPoints {
|
||||
fn make_path(&self, url: &gst_rtsp::RTSPUrl) -> Option<glib::GString> {
|
||||
println!("Make path called for {url:?} ");
|
||||
self.parent_make_path(url)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Clients must be Send+Sync, and ours is
|
||||
unsafe impl Send for Client {}
|
||||
unsafe impl Sync for Client {}
|
||||
glib::wrapper! {
|
||||
pub struct MountPoints(ObjectSubclass<imp::MountPoints>) @extends gst_rtsp_server::RTSPMountPoints;
|
||||
}
|
||||
|
||||
impl Client {
|
||||
impl Default for MountPoints {
|
||||
// Creates a new instance of our factory
|
||||
pub fn new() -> Client {
|
||||
glib::Object::new(Self::static_type(), &[])
|
||||
.expect("Failed to create client")
|
||||
.downcast()
|
||||
.expect("Created client is of wrong type")
|
||||
fn default() -> Self {
|
||||
glib::Object::new()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -475,6 +374,6 @@ fn example_main() -> Result<(), Error> {
|
|||
fn main() {
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,13 +5,9 @@
|
|||
|
||||
use std::env;
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
|
||||
extern crate gstreamer_rtsp_server as gst_rtsp_server;
|
||||
use gst_rtsp_server::prelude::*;
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst_rtsp_server::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
@ -21,7 +17,7 @@ mod examples_common;
|
|||
struct NoMountPoints;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Usage: {} LAUNCH_LINE", _0)]
|
||||
#[display(fmt = "Usage: {_0} LAUNCH_LINE")]
|
||||
struct UsageError(#[error(not(source))] String);
|
||||
|
||||
fn main_loop() -> Result<(), Error> {
|
||||
|
@ -37,7 +33,7 @@ fn main_loop() -> Result<(), Error> {
|
|||
// provide different streams. Here, we ask our server to give
|
||||
// us a reference to his list of endpoints, so we can add our
|
||||
// test endpoint, providing the pipeline from the cli.
|
||||
let mounts = server.get_mount_points().ok_or(NoMountPoints)?;
|
||||
let mounts = server.mount_points().ok_or(NoMountPoints)?;
|
||||
|
||||
// Next, we create a factory for the endpoint we want to create.
|
||||
// The job of the factory is to create a new pipeline for each client that
|
||||
|
@ -60,7 +56,7 @@ fn main_loop() -> Result<(), Error> {
|
|||
// Now we add a new mount-point and tell the RTSP server to serve the content
|
||||
// provided by the factory we configured above, when a client connects to
|
||||
// this specific path.
|
||||
mounts.add_factory("/test", &factory);
|
||||
mounts.add_factory("/test", factory);
|
||||
|
||||
// Attach the server to our main context.
|
||||
// A main context is the thing where other stuff is registering itself for its
|
||||
|
@ -69,18 +65,18 @@ fn main_loop() -> Result<(), Error> {
|
|||
// interested in them. In this example, we only do have one, so we can
|
||||
// leave the context parameter empty, it will automatically select
|
||||
// the default one.
|
||||
let id = server.attach(None);
|
||||
let id = server.attach(None)?;
|
||||
|
||||
println!(
|
||||
"Stream ready at rtsp://127.0.0.1:{}/test",
|
||||
server.get_bound_port()
|
||||
server.bound_port()
|
||||
);
|
||||
|
||||
// Start the mainloop. From this point on, the server will start to serve
|
||||
// our quality content to connecting clients.
|
||||
main_loop.run();
|
||||
|
||||
glib::source_remove(id);
|
||||
id.remove();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -93,6 +89,6 @@ fn example_main() -> Result<(), Error> {
|
|||
fn main() {
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,41 +5,19 @@
|
|||
//
|
||||
// Our filter can only handle F32 mono and acts as a FIR filter. The filter impulse response /
|
||||
// coefficients are provided via Rust API on the filter as a Vec<f32>.
|
||||
|
||||
use glib::glib_object_impl;
|
||||
use glib::glib_object_subclass;
|
||||
use glib::glib_object_wrapper;
|
||||
use glib::glib_wrapper;
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::gst_info;
|
||||
use gst::gst_trace;
|
||||
use gst::prelude::*;
|
||||
#![allow(clippy::non_send_fields_in_send_ty)]
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
// Our custom FIR filter element is defined in this module
|
||||
mod fir_filter {
|
||||
use super::*;
|
||||
|
||||
use glib::subclass;
|
||||
use glib::subclass::prelude::*;
|
||||
use glib::translate::*;
|
||||
|
||||
use gst::subclass::prelude::*;
|
||||
|
||||
extern crate gstreamer_base as gst_base;
|
||||
use gst_base::subclass::prelude::*;
|
||||
|
||||
extern crate gstreamer_audio as gst_audio;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
|
||||
use gst_base::subclass::prelude::*;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
// The debug category we use below for our filter
|
||||
|
@ -53,12 +31,12 @@ mod fir_filter {
|
|||
|
||||
// In the imp submodule we include the actual implementation
|
||||
mod imp {
|
||||
use std::{collections::VecDeque, sync::Mutex};
|
||||
|
||||
use super::*;
|
||||
use std::collections::VecDeque;
|
||||
use std::i32;
|
||||
use std::sync::Mutex;
|
||||
|
||||
// This is the private data of our filter
|
||||
#[derive(Default)]
|
||||
pub struct FirFilter {
|
||||
pub(super) coeffs: Mutex<Vec<f32>>,
|
||||
history: Mutex<VecDeque<f32>>,
|
||||
|
@ -67,128 +45,104 @@ mod fir_filter {
|
|||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for FirFilter {
|
||||
const NAME: &'static str = "RsFirFilter";
|
||||
type Type = super::FirFilter;
|
||||
type ParentType = gst_base::BaseTransform;
|
||||
type Instance = gst::subclass::ElementInstanceStruct<Self>;
|
||||
type Class = subclass::simple::ClassStruct<Self>;
|
||||
|
||||
// This macro provides some boilerplate
|
||||
glib_object_subclass!();
|
||||
|
||||
// Called when a new instance is to be created. We need to return an instance
|
||||
// of our struct here.
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
coeffs: Mutex::new(Vec::new()),
|
||||
history: Mutex::new(VecDeque::new()),
|
||||
}
|
||||
}
|
||||
|
||||
// Called exactly once when registering the type. Used for
|
||||
// setting up metadata for all instances, e.g. the name and
|
||||
// classification and the pad templates with their caps.
|
||||
//
|
||||
// Actual instances can create pads based on those pad templates
|
||||
// with a subset of the caps given here. In case of basetransform,
|
||||
// a "src" and "sink" pad template are required here and the base class
|
||||
// will automatically instantiate pads for them.
|
||||
//
|
||||
// Our element here can only handle F32 mono audio.
|
||||
fn class_init(klass: &mut subclass::simple::ClassStruct<Self>) {
|
||||
// Set the element specific metadata. This information is what
|
||||
// is visible from gst-inspect-1.0 and can also be programatically
|
||||
// retrieved from the gst::Registry after initial registration
|
||||
// without having to load the plugin in memory.
|
||||
klass.set_metadata(
|
||||
"FIR Filter",
|
||||
"Filter/Effect/Audio",
|
||||
"A FIR audio filter",
|
||||
"Sebastian Dröge <sebastian@centricular.com>",
|
||||
);
|
||||
|
||||
// Create and add pad templates for our sink and source pad. These
|
||||
// are later used for actually creating the pads and beforehand
|
||||
// already provide information to GStreamer about all possible
|
||||
// pads that could exist for this type.
|
||||
|
||||
// On both of pads we can only handle F32 mono at any sample rate.
|
||||
let caps = gst::Caps::new_simple(
|
||||
"audio/x-raw",
|
||||
&[
|
||||
("format", &gst_audio::AUDIO_FORMAT_F32.to_str()),
|
||||
("rate", &gst::IntRange::<i32>::new(1, i32::MAX)),
|
||||
("channels", &1i32),
|
||||
("layout", &"interleaved"),
|
||||
],
|
||||
);
|
||||
|
||||
// The src pad template must be named "src" for basetransform
|
||||
// and specific a pad that is always there
|
||||
let src_pad_template = gst::PadTemplate::new(
|
||||
"src",
|
||||
gst::PadDirection::Src,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
)
|
||||
.unwrap();
|
||||
klass.add_pad_template(src_pad_template);
|
||||
|
||||
// The sink pad template must be named "sink" for basetransform
|
||||
// and specific a pad that is always there
|
||||
let sink_pad_template = gst::PadTemplate::new(
|
||||
"sink",
|
||||
gst::PadDirection::Sink,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
)
|
||||
.unwrap();
|
||||
klass.add_pad_template(sink_pad_template);
|
||||
|
||||
// Configure basetransform so that we are always running in-place,
|
||||
// don't passthrough on same caps and also never call transform_ip
|
||||
// in passthrough mode (which does not matter for us here).
|
||||
//
|
||||
// The way how our processing is implemented, in-place transformation
|
||||
// is simpler.
|
||||
klass.configure(
|
||||
gst_base::subclass::BaseTransformMode::AlwaysInPlace,
|
||||
false,
|
||||
false,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for FirFilter {
|
||||
// This macro provides some boilerplate.
|
||||
glib_object_impl!();
|
||||
}
|
||||
impl ObjectImpl for FirFilter {}
|
||||
|
||||
impl GstObjectImpl for FirFilter {}
|
||||
|
||||
// Implementation of gst::Element virtual methods
|
||||
impl ElementImpl for FirFilter {}
|
||||
impl ElementImpl for FirFilter {
|
||||
// The element specific metadata. This information is what is visible from
|
||||
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
|
||||
// after initial registration without having to load the plugin in memory.
|
||||
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
|
||||
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
Some(ELEMENT_METADATA.get_or_init(|| {
|
||||
gst::subclass::ElementMetadata::new(
|
||||
"FIR Filter",
|
||||
"Filter/Effect/Audio",
|
||||
"A FIR audio filter",
|
||||
"Sebastian Dröge <sebastian@centricular.com>",
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
fn pad_templates() -> &'static [gst::PadTemplate] {
|
||||
static PAD_TEMPLATES: std::sync::OnceLock<Vec<gst::PadTemplate>> =
|
||||
std::sync::OnceLock::new();
|
||||
|
||||
PAD_TEMPLATES.get_or_init(|| {
|
||||
// Create pad templates for our sink and source pad. These are later used for
|
||||
// actually creating the pads and beforehand already provide information to
|
||||
// GStreamer about all possible pads that could exist for this type.
|
||||
|
||||
// On both of pads we can only handle F32 mono at any sample rate.
|
||||
let caps = gst_audio::AudioCapsBuilder::new_interleaved()
|
||||
.format(gst_audio::AUDIO_FORMAT_F32)
|
||||
.channels(1)
|
||||
.build();
|
||||
|
||||
vec![
|
||||
// The src pad template must be named "src" for basetransform
|
||||
// and specific a pad that is always there
|
||||
gst::PadTemplate::new(
|
||||
"src",
|
||||
gst::PadDirection::Src,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
)
|
||||
.unwrap(),
|
||||
// The sink pad template must be named "sink" for basetransform
|
||||
// and specific a pad that is always there
|
||||
gst::PadTemplate::new(
|
||||
"sink",
|
||||
gst::PadDirection::Sink,
|
||||
gst::PadPresence::Always,
|
||||
&caps,
|
||||
)
|
||||
.unwrap(),
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gst_base::BaseTransform virtual methods
|
||||
impl BaseTransformImpl for FirFilter {
|
||||
// Configure basetransform so that we are always running in-place,
|
||||
// don't passthrough on same caps and also never call transform_ip
|
||||
// in passthrough mode (which does not matter for us here).
|
||||
//
|
||||
// The way how our processing is implemented, in-place transformation
|
||||
// is simpler.
|
||||
const MODE: gst_base::subclass::BaseTransformMode =
|
||||
gst_base::subclass::BaseTransformMode::AlwaysInPlace;
|
||||
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
|
||||
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
|
||||
|
||||
// Returns the size of one processing unit (i.e. a frame in our case) corresponding
|
||||
// to the given caps. This is used for allocating a big enough output buffer and
|
||||
// sanity checking the input buffer size, among other things.
|
||||
fn get_unit_size(
|
||||
&self,
|
||||
_element: &gst_base::BaseTransform,
|
||||
caps: &gst::Caps,
|
||||
) -> Option<usize> {
|
||||
fn unit_size(&self, caps: &gst::Caps) -> Option<usize> {
|
||||
let audio_info = gst_audio::AudioInfo::from_caps(caps).ok();
|
||||
audio_info.map(|info| info.bpf() as usize)
|
||||
}
|
||||
|
||||
// Called when shutting down the element so we can release all stream-related state
|
||||
// There's also start(), which is called whenever starting the element again
|
||||
fn stop(&self, element: &gst_base::BaseTransform) -> Result<(), gst::ErrorMessage> {
|
||||
fn stop(&self) -> Result<(), gst::ErrorMessage> {
|
||||
// Drop state
|
||||
self.history.lock().unwrap().clear();
|
||||
|
||||
gst_info!(CAT, obj: element, "Stopped");
|
||||
gst::info!(CAT, imp: self, "Stopped");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -196,20 +150,19 @@ mod fir_filter {
|
|||
// Does the actual transformation of the input buffer to the output buffer
|
||||
fn transform_ip(
|
||||
&self,
|
||||
element: &gst_base::BaseTransform,
|
||||
buf: &mut gst::BufferRef,
|
||||
) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
// Get coefficients and return directly if we have none
|
||||
let coeffs = self.coeffs.lock().unwrap();
|
||||
if coeffs.is_empty() {
|
||||
gst_trace!(CAT, obj: element, "No coefficients set -- passthrough");
|
||||
gst::trace!(CAT, imp: self, "No coefficients set -- passthrough");
|
||||
return Ok(gst::FlowSuccess::Ok);
|
||||
}
|
||||
|
||||
// Try mapping the input buffer as writable
|
||||
let mut data = buf.map_writable().map_err(|_| {
|
||||
gst_element_error!(
|
||||
element,
|
||||
gst::element_imp_error!(
|
||||
self,
|
||||
gst::CoreError::Failed,
|
||||
["Failed to map input buffer readable"]
|
||||
);
|
||||
|
@ -218,8 +171,8 @@ mod fir_filter {
|
|||
|
||||
// And reinterprete it as a slice of f32
|
||||
let samples = data.as_mut_slice_of::<f32>().map_err(|err| {
|
||||
gst_element_error!(
|
||||
element,
|
||||
gst::element_imp_error!(
|
||||
self,
|
||||
gst::CoreError::Failed,
|
||||
["Failed to cast input buffer as f32 slice: {}", err]
|
||||
);
|
||||
|
@ -228,9 +181,9 @@ mod fir_filter {
|
|||
|
||||
let mut history = self.history.lock().unwrap();
|
||||
|
||||
gst_trace!(
|
||||
gst::trace!(
|
||||
CAT,
|
||||
obj: element,
|
||||
imp: self,
|
||||
"Transforming {} samples with filter of length {}",
|
||||
samples.len(),
|
||||
coeffs.len()
|
||||
|
@ -258,75 +211,50 @@ mod fir_filter {
|
|||
|
||||
// This here defines the public interface of our element and implements
|
||||
// the corresponding traits so that it behaves like any other gst::Element
|
||||
glib_wrapper! {
|
||||
pub struct FirFilter(
|
||||
Object<
|
||||
gst::subclass::ElementInstanceStruct<imp::FirFilter>,
|
||||
subclass::simple::ClassStruct<imp::FirFilter>,
|
||||
FirFilterClass
|
||||
>
|
||||
) @extends gst_base::BaseTransform, gst::Element, gst::Object;
|
||||
|
||||
match fn {
|
||||
get_type => || imp::FirFilter::get_type().to_glib(),
|
||||
}
|
||||
glib::wrapper! {
|
||||
pub struct FirFilter(ObjectSubclass<imp::FirFilter>) @extends gst_base::BaseTransform, gst::Element, gst::Object;
|
||||
}
|
||||
|
||||
// GStreamer elements must be Send+Sync, and ours is
|
||||
unsafe impl Send for FirFilter {}
|
||||
unsafe impl Sync for FirFilter {}
|
||||
|
||||
impl FirFilter {
|
||||
// Creates a new instance of our filter with the given name
|
||||
pub fn new(name: Option<&str>) -> FirFilter {
|
||||
glib::Object::new(Self::static_type(), &[("name", &name)])
|
||||
.expect("Failed to create fir filter")
|
||||
.downcast()
|
||||
.expect("Created fir filter is of wrong type")
|
||||
glib::Object::builder().property("name", name).build()
|
||||
}
|
||||
|
||||
// Sets the coefficients by getting access to the private
|
||||
// struct and simply setting them
|
||||
pub fn set_coeffs(&self, coeffs: Vec<f32>) {
|
||||
let imp = imp::FirFilter::from_instance(self);
|
||||
let imp = self.imp();
|
||||
*imp.coeffs.lock().unwrap() = coeffs;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
fn create_pipeline() -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
// Create our pipeline with the custom element
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("audiotestsrc", None)
|
||||
.map_err(|_| MissingElement("audiotestsrc"))?;
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("audiotestsrc")
|
||||
.property_from_str("wave", "white-noise")
|
||||
.build()?;
|
||||
let filter = fir_filter::FirFilter::new(None);
|
||||
let conv = gst::ElementFactory::make("audioconvert", None)
|
||||
.map_err(|_| MissingElement("audioconvert"))?;
|
||||
let sink = gst::ElementFactory::make("autoaudiosink", None)
|
||||
.map_err(|_| MissingElement("autoaudiosink"))?;
|
||||
let conv = gst::ElementFactory::make("audioconvert").build()?;
|
||||
let sink = gst::ElementFactory::make("autoaudiosink").build()?;
|
||||
|
||||
pipeline.add_many(&[&src, filter.upcast_ref(), &conv, &sink])?;
|
||||
pipeline.add_many([&src, filter.upcast_ref(), &conv, &sink])?;
|
||||
src.link(&filter)?;
|
||||
filter.link(&conv)?;
|
||||
conv.link(&sink)?;
|
||||
|
||||
src.set_property_from_str("wave", "white-noise");
|
||||
|
||||
// Create a windowed sinc lowpass filter at 1/64 sample rate,
|
||||
// i.e. 689Hz for 44.1kHz sample rate
|
||||
let w = 2.0 * std::f32::consts::PI / 64.0;
|
||||
|
@ -363,10 +291,10 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -375,12 +303,11 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
pipeline.set_state(gst::State::Null)?;
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
@ -396,12 +323,12 @@ fn main_loop(pipeline: gst::Pipeline) -> Result<(), Error> {
|
|||
fn example_main() {
|
||||
match create_pipeline().and_then(main_loop) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
188
examples/src/bin/subclass_vfuncs/iirfilter/imp.rs
Normal file
188
examples/src/bin/subclass_vfuncs/iirfilter/imp.rs
Normal file
|
@ -0,0 +1,188 @@
|
|||
// In the imp submodule we include the actual implementation
|
||||
|
||||
use std::{collections::VecDeque, sync::Mutex};
|
||||
|
||||
use glib::prelude::*;
|
||||
use gst_audio::subclass::prelude::*;
|
||||
use once_cell::sync::Lazy;
|
||||
|
||||
use byte_slice_cast::*;
|
||||
|
||||
use atomic_refcell::AtomicRefCell;
|
||||
|
||||
// The debug category we use below for our filter
|
||||
pub static CAT: Lazy<gst::DebugCategory> = Lazy::new(|| {
|
||||
gst::DebugCategory::new(
|
||||
"rsiirfilter",
|
||||
gst::DebugColorFlags::empty(),
|
||||
Some("Rust IIR Filter"),
|
||||
)
|
||||
});
|
||||
|
||||
#[derive(Default)]
|
||||
// This is the state of our filter
|
||||
struct State {
|
||||
a: Vec<f64>,
|
||||
b: Vec<f64>,
|
||||
x: VecDeque<f64>,
|
||||
y: VecDeque<f64>,
|
||||
}
|
||||
|
||||
// This is the private data of our filter
|
||||
#[derive(Default)]
|
||||
pub struct IirFilter {
|
||||
coeffs: Mutex<Option<(Vec<f64>, Vec<f64>)>>,
|
||||
state: AtomicRefCell<State>,
|
||||
}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for IirFilter {
|
||||
const NAME: &'static str = "RsIirFilter";
|
||||
const ABSTRACT: bool = true;
|
||||
type Type = super::IirFilter;
|
||||
type ParentType = gst_audio::AudioFilter;
|
||||
type Class = super::Class;
|
||||
|
||||
// Here we set default implementations for all the virtual methods.
|
||||
// This is mandatory for all virtual methods that are not `Option`s.
|
||||
fn class_init(class: &mut Self::Class) {
|
||||
class.set_rate = |obj, rate| obj.imp().set_rate_default(rate);
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for IirFilter {}
|
||||
|
||||
impl GstObjectImpl for IirFilter {}
|
||||
|
||||
// Implementation of gst::Element virtual methods
|
||||
impl ElementImpl for IirFilter {}
|
||||
|
||||
// Implementation of gst_base::BaseTransform virtual methods
|
||||
impl BaseTransformImpl for IirFilter {
|
||||
// Configure basetransform so that we are always running in-place,
|
||||
// don't passthrough on same caps and also never call transform_ip
|
||||
// in passthrough mode (which does not matter for us here).
|
||||
//
|
||||
// The way how our processing is implemented, in-place transformation
|
||||
// is simpler.
|
||||
const MODE: gst_base::subclass::BaseTransformMode =
|
||||
gst_base::subclass::BaseTransformMode::AlwaysInPlace;
|
||||
const PASSTHROUGH_ON_SAME_CAPS: bool = false;
|
||||
const TRANSFORM_IP_ON_PASSTHROUGH: bool = false;
|
||||
|
||||
fn start(&self) -> Result<(), gst::ErrorMessage> {
|
||||
self.parent_start()?;
|
||||
|
||||
*self.state.borrow_mut() = State::default();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&self) -> Result<(), gst::ErrorMessage> {
|
||||
self.parent_stop()?;
|
||||
|
||||
*self.state.borrow_mut() = State::default();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn transform_ip(&self, buf: &mut gst::BufferRef) -> Result<gst::FlowSuccess, gst::FlowError> {
|
||||
let mut state = self.state.borrow_mut();
|
||||
|
||||
// Update coefficients if new coefficients were set
|
||||
{
|
||||
let mut coeffs = self.coeffs.lock().unwrap();
|
||||
|
||||
if let Some((a, b)) = coeffs.take() {
|
||||
state.x.clear();
|
||||
state.y.clear();
|
||||
if !a.is_empty() {
|
||||
state.y.resize(a.len() - 1, 0.0);
|
||||
}
|
||||
if !b.is_empty() {
|
||||
state.x.resize(b.len() - 1, 0.0);
|
||||
}
|
||||
state.a = a;
|
||||
state.b = b;
|
||||
}
|
||||
}
|
||||
|
||||
if state.a.is_empty() | state.b.is_empty() {
|
||||
return Ok(gst::FlowSuccess::Ok);
|
||||
}
|
||||
|
||||
let mut map = buf.map_writable().map_err(|_| {
|
||||
gst::error!(CAT, imp: self, "Failed to map buffer writable");
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
let samples = map.as_mut_slice_of::<f32>().unwrap();
|
||||
|
||||
assert!(state.b.len() - 1 == state.x.len());
|
||||
assert!(state.a.len() - 1 == state.y.len());
|
||||
|
||||
for sample in samples.iter_mut() {
|
||||
let mut val = state.b[0] * *sample as f64;
|
||||
|
||||
for (b, x) in Iterator::zip(state.b.iter().skip(1), state.x.iter()) {
|
||||
val += b * x;
|
||||
}
|
||||
|
||||
for (a, y) in Iterator::zip(state.a.iter().skip(1), state.y.iter()) {
|
||||
val -= a * y;
|
||||
}
|
||||
|
||||
val /= state.a[0];
|
||||
|
||||
let _ = state.x.pop_back().unwrap();
|
||||
state.x.push_front(*sample as f64);
|
||||
|
||||
let _ = state.y.pop_back().unwrap();
|
||||
state.y.push_front(val);
|
||||
|
||||
*sample = val as f32;
|
||||
}
|
||||
|
||||
Ok(gst::FlowSuccess::Ok)
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioFilterImpl for IirFilter {
|
||||
fn allowed_caps() -> &'static gst::Caps {
|
||||
static CAPS: std::sync::OnceLock<gst::Caps> = std::sync::OnceLock::new();
|
||||
CAPS.get_or_init(|| {
|
||||
// On both of pads we can only handle F32 mono at any sample rate.
|
||||
gst_audio::AudioCapsBuilder::new_interleaved()
|
||||
.format(gst_audio::AUDIO_FORMAT_F32)
|
||||
.channels(1)
|
||||
.build()
|
||||
})
|
||||
}
|
||||
|
||||
fn setup(&self, info: &gst_audio::AudioInfo) -> Result<(), gst::LoggableError> {
|
||||
self.parent_setup(info)?;
|
||||
|
||||
gst::debug!(CAT, imp: self, "Rate changed to {}", info.rate());
|
||||
let obj = self.obj();
|
||||
(obj.class().as_ref().set_rate)(&obj, info.rate());
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// Wrappers for public methods and associated helper functions.
|
||||
impl IirFilter {
|
||||
pub(super) fn set_coeffs(&self, a: Vec<f64>, b: Vec<f64>) {
|
||||
gst::debug!(CAT, imp: self, "Setting coefficients a: {a:?}, b: {b:?}");
|
||||
*self.coeffs.lock().unwrap() = Some((a, b));
|
||||
}
|
||||
}
|
||||
|
||||
/// Default virtual method implementations.
|
||||
impl IirFilter {
|
||||
fn set_rate_default(&self, _rate: u32) {}
|
||||
}
|
86
examples/src/bin/subclass_vfuncs/iirfilter/mod.rs
Normal file
86
examples/src/bin/subclass_vfuncs/iirfilter/mod.rs
Normal file
|
@ -0,0 +1,86 @@
|
|||
use gst::{prelude::*, subclass::prelude::*};
|
||||
use gst_audio::subclass::prelude::*;
|
||||
|
||||
mod imp;
|
||||
|
||||
// This here defines the public interface of our element and implements
|
||||
// the corresponding traits so that it behaves like any other gst::Element
|
||||
//
|
||||
// GObject
|
||||
// ╰──GstObject
|
||||
// ╰──GstElement
|
||||
// ╰──GstBaseTransform
|
||||
// ╰──GstAudioFilter
|
||||
// ╰──IirFilter
|
||||
glib::wrapper! {
|
||||
pub struct IirFilter(ObjectSubclass<imp::IirFilter>) @extends gst_audio::AudioFilter, gst_base::BaseTransform, gst::Element, gst::Object;
|
||||
}
|
||||
|
||||
/// Trait containing extension methods for `IirFilter`.
|
||||
pub trait IirFilterExt: IsA<IirFilter> {
|
||||
// Sets the coefficients by getting access to the private struct and simply setting them
|
||||
fn set_coeffs(&self, a: Vec<f64>, b: Vec<f64>) {
|
||||
self.upcast_ref::<IirFilter>().imp().set_coeffs(a, b)
|
||||
}
|
||||
}
|
||||
|
||||
impl<O: IsA<IirFilter>> IirFilterExt for O {}
|
||||
|
||||
/// Trait to implement in `IirFilter` subclasses.
|
||||
pub trait IirFilterImpl: AudioFilterImpl {
|
||||
/// Called whenever the sample rate is changing.
|
||||
fn set_rate(&self, rate: u32) {
|
||||
self.parent_set_rate(rate);
|
||||
}
|
||||
}
|
||||
|
||||
/// Trait containing extension methods for `IirFilterImpl`, specifically methods for chaining
|
||||
/// up to the parent implementation of virtual methods.
|
||||
pub trait IirFilterImplExt: IirFilterImpl {
|
||||
fn parent_set_rate(&self, rate: u32) {
|
||||
unsafe {
|
||||
let data = Self::type_data();
|
||||
let parent_class = &*(data.as_ref().parent_class() as *mut Class);
|
||||
(parent_class.set_rate)(self.obj().unsafe_cast_ref(), rate)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: IirFilterImpl> IirFilterImplExt for T {}
|
||||
|
||||
/// Class struct for `IirFilter`.
|
||||
#[repr(C)]
|
||||
pub struct Class {
|
||||
parent: <<imp::IirFilter as ObjectSubclass>::ParentType as ObjectType>::GlibClassType,
|
||||
|
||||
set_rate: fn(&IirFilter, rate: u32),
|
||||
}
|
||||
|
||||
unsafe impl ClassStruct for Class {
|
||||
type Type = imp::IirFilter;
|
||||
}
|
||||
|
||||
/// This allows directly using `Class` as e.g. `gst_audio::AudioFilterClass` or
|
||||
/// `gst_base::BaseTransformClass` without having to cast.
|
||||
impl std::ops::Deref for Class {
|
||||
type Target = glib::Class<<<Self as ClassStruct>::Type as ObjectSubclass>::ParentType>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
unsafe { &*(&self.parent as *const _ as *const _) }
|
||||
}
|
||||
}
|
||||
|
||||
/// Overrides the virtual methods with the actual implementation of the subclass as is provided by
|
||||
/// the subclass' implementation of the `Impl` trait.
|
||||
unsafe impl<T: IirFilterImpl> IsSubclassable<T> for IirFilter {
|
||||
fn class_init(class: &mut glib::Class<Self>) {
|
||||
Self::parent_class_init::<T>(class);
|
||||
|
||||
let class = class.as_mut();
|
||||
|
||||
class.set_rate = |obj, rate| unsafe {
|
||||
let imp = obj.unsafe_cast_ref::<T::Type>().imp();
|
||||
imp.set_rate(rate);
|
||||
};
|
||||
}
|
||||
}
|
170
examples/src/bin/subclass_vfuncs/lowpass/imp.rs
Normal file
170
examples/src/bin/subclass_vfuncs/lowpass/imp.rs
Normal file
|
@ -0,0 +1,170 @@
|
|||
// In the imp submodule we include the actual implementation
|
||||
|
||||
use std::sync::Mutex;
|
||||
|
||||
use glib::prelude::*;
|
||||
use gst::prelude::*;
|
||||
use gst_audio::subclass::prelude::*;
|
||||
|
||||
use crate::iirfilter::{IirFilterExt, IirFilterImpl};
|
||||
|
||||
// These are the property values of our filter
|
||||
pub struct Settings {
|
||||
cutoff: f32,
|
||||
}
|
||||
|
||||
impl Default for Settings {
|
||||
fn default() -> Self {
|
||||
Settings { cutoff: 0.0 }
|
||||
}
|
||||
}
|
||||
|
||||
// This is the state of our filter
|
||||
#[derive(Default)]
|
||||
pub struct State {
|
||||
rate: Option<u32>,
|
||||
}
|
||||
|
||||
// This is the private data of our filter
|
||||
#[derive(Default)]
|
||||
pub struct Lowpass {
|
||||
settings: Mutex<Settings>,
|
||||
state: Mutex<State>,
|
||||
}
|
||||
|
||||
// This trait registers our type with the GObject object system and
|
||||
// provides the entry points for creating a new instance and setting
|
||||
// up the class data
|
||||
#[glib::object_subclass]
|
||||
impl ObjectSubclass for Lowpass {
|
||||
const NAME: &'static str = "RsLowpass";
|
||||
type Type = super::Lowpass;
|
||||
type ParentType = crate::iirfilter::IirFilter;
|
||||
}
|
||||
|
||||
// Implementation of glib::Object virtual methods
|
||||
impl ObjectImpl for Lowpass {
|
||||
fn properties() -> &'static [glib::ParamSpec] {
|
||||
static PROPERTIES: std::sync::OnceLock<Vec<glib::ParamSpec>> = std::sync::OnceLock::new();
|
||||
|
||||
PROPERTIES.get_or_init(|| {
|
||||
vec![glib::ParamSpecFloat::builder("cutoff")
|
||||
.nick("Cutoff")
|
||||
.blurb("Cutoff frequency in Hz")
|
||||
.default_value(Settings::default().cutoff)
|
||||
.minimum(0.0)
|
||||
.mutable_playing()
|
||||
.build()]
|
||||
})
|
||||
}
|
||||
|
||||
fn set_property(&self, _id: usize, value: &glib::Value, pspec: &glib::ParamSpec) {
|
||||
match pspec.name() {
|
||||
"cutoff" => {
|
||||
self.settings.lock().unwrap().cutoff = value.get().unwrap();
|
||||
self.calculate_coeffs();
|
||||
}
|
||||
_ => unimplemented!(),
|
||||
};
|
||||
}
|
||||
|
||||
fn property(&self, _id: usize, pspec: &glib::ParamSpec) -> glib::Value {
|
||||
match pspec.name() {
|
||||
"cutoff" => self.settings.lock().unwrap().cutoff.to_value(),
|
||||
_ => unimplemented!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl GstObjectImpl for Lowpass {}
|
||||
|
||||
// Implementation of gst::Element virtual methods
|
||||
impl ElementImpl for Lowpass {
|
||||
// The element specific metadata. This information is what is visible from
|
||||
// gst-inspect-1.0 and can also be programmatically retrieved from the gst::Registry
|
||||
// after initial registration without having to load the plugin in memory.
|
||||
fn metadata() -> Option<&'static gst::subclass::ElementMetadata> {
|
||||
static ELEMENT_METADATA: std::sync::OnceLock<gst::subclass::ElementMetadata> =
|
||||
std::sync::OnceLock::new();
|
||||
Some(ELEMENT_METADATA.get_or_init(|| {
|
||||
gst::subclass::ElementMetadata::new(
|
||||
"Lowpass Filter",
|
||||
"Filter/Effect/Audio",
|
||||
"A Lowpass audio filter",
|
||||
"Sebastian Dröge <sebastian@centricular.com>",
|
||||
)
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
// Implementation of gst_base::BaseTransform virtual methods
|
||||
impl BaseTransformImpl for Lowpass {
|
||||
const MODE: gst_base::subclass::BaseTransformMode =
|
||||
<<crate::iirfilter::IirFilter as glib::object::ObjectSubclassIs>::Subclass>::MODE;
|
||||
const PASSTHROUGH_ON_SAME_CAPS: bool =
|
||||
<<crate::iirfilter::IirFilter as glib::object::ObjectSubclassIs>::Subclass>::PASSTHROUGH_ON_SAME_CAPS;
|
||||
const TRANSFORM_IP_ON_PASSTHROUGH: bool =
|
||||
<<crate::iirfilter::IirFilter as glib::object::ObjectSubclassIs>::Subclass>::TRANSFORM_IP_ON_PASSTHROUGH;
|
||||
|
||||
fn start(&self) -> Result<(), gst::ErrorMessage> {
|
||||
self.parent_start()?;
|
||||
|
||||
*self.state.lock().unwrap() = State::default();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// Implement of gst_audio::AudioFilter virtual methods
|
||||
impl AudioFilterImpl for Lowpass {}
|
||||
|
||||
// Implement of IirFilter virtual methods
|
||||
impl IirFilterImpl for Lowpass {
|
||||
fn set_rate(&self, rate: u32) {
|
||||
// Could call
|
||||
// self.parent_set_rate(rate);
|
||||
// here but chaining up is not necessary if the base class doesn't require that
|
||||
// or if the behaviour of the parent class should be completely overridden.
|
||||
|
||||
self.state.lock().unwrap().rate = Some(rate);
|
||||
self.calculate_coeffs();
|
||||
}
|
||||
}
|
||||
|
||||
impl Lowpass {
|
||||
fn calculate_coeffs(&self) {
|
||||
use std::f64;
|
||||
|
||||
let Some(rate) = self.state.lock().unwrap().rate else {
|
||||
return;
|
||||
};
|
||||
let cutoff = self.settings.lock().unwrap().cutoff;
|
||||
|
||||
// See Audio EQ Cookbook
|
||||
// https://www.w3.org/TR/audio-eq-cookbook
|
||||
let cutoff = cutoff as f64 / rate as f64;
|
||||
|
||||
let omega = 2.0 * f64::consts::PI * cutoff;
|
||||
let q = 1.0;
|
||||
|
||||
let alpha = f64::sin(omega) / (2.0 * q);
|
||||
|
||||
let mut b = vec![
|
||||
(1.0 - f64::cos(omega)) / 2.0,
|
||||
1.0 - f64::cos(omega),
|
||||
(1.0 - f64::cos(omega) / 2.0),
|
||||
];
|
||||
|
||||
let mut a = vec![1.0 + alpha, -2.0 * f64::cos(omega), 1.0 - alpha];
|
||||
|
||||
let a0 = a[0];
|
||||
for a in &mut a {
|
||||
*a /= a0;
|
||||
}
|
||||
for b in &mut b {
|
||||
*b /= a0;
|
||||
}
|
||||
|
||||
self.obj().set_coeffs(a, b);
|
||||
}
|
||||
}
|
15
examples/src/bin/subclass_vfuncs/lowpass/mod.rs
Normal file
15
examples/src/bin/subclass_vfuncs/lowpass/mod.rs
Normal file
|
@ -0,0 +1,15 @@
|
|||
mod imp;
|
||||
|
||||
// This here defines the public interface of our element and implements
|
||||
// the corresponding traits so that it behaves like any other gst::Element
|
||||
//
|
||||
// GObject
|
||||
// ╰──GstObject
|
||||
// ╰──GstElement
|
||||
// ╰──GstBaseTransform
|
||||
// ╰──GstAudioFilter
|
||||
// ╰──IirFilter
|
||||
// ╰──Lowpass
|
||||
glib::wrapper! {
|
||||
pub struct Lowpass(ObjectSubclass<imp::Lowpass>) @extends crate::iirfilter::IirFilter, gst_audio::AudioFilter, gst_base::BaseTransform, gst::Element, gst::Object;
|
||||
}
|
66
examples/src/bin/subclass_vfuncs/main.rs
Normal file
66
examples/src/bin/subclass_vfuncs/main.rs
Normal file
|
@ -0,0 +1,66 @@
|
|||
// This example implements a baseclass IirFilter, and a subclass Lowpass of that.
|
||||
//
|
||||
// The example shows how to provide and implement virtual methods, and how to provide non-virtual
|
||||
// methods on the base class.
|
||||
|
||||
use gst::prelude::*;
|
||||
|
||||
mod iirfilter;
|
||||
mod lowpass;
|
||||
|
||||
#[path = "../../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
fn example_main() {
|
||||
gst::init().unwrap();
|
||||
|
||||
let pipeline = gst::Pipeline::new();
|
||||
let src = gst::ElementFactory::make("audiotestsrc")
|
||||
.property_from_str("wave", "white-noise")
|
||||
.build()
|
||||
.unwrap();
|
||||
let filter = glib::Object::builder::<lowpass::Lowpass>()
|
||||
.property("cutoff", 4000.0f32)
|
||||
.build();
|
||||
let conv = gst::ElementFactory::make("audioconvert").build().unwrap();
|
||||
let sink = gst::ElementFactory::make("autoaudiosink").build().unwrap();
|
||||
|
||||
pipeline
|
||||
.add_many([&src, filter.as_ref(), &conv, &sink])
|
||||
.unwrap();
|
||||
gst::Element::link_many([&src, filter.as_ref(), &conv, &sink]).unwrap();
|
||||
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
|
@ -18,27 +18,23 @@
|
|||
// (More modes of operation are possible, see: gst::TagMergeMode)
|
||||
// This merge-mode can also be supplied to any method that adds new tags.
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use anyhow::anyhow;
|
||||
use anyhow::Error;
|
||||
use anyhow::{anyhow, Error};
|
||||
use derive_more::{Display, Error};
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
#[display(fmt = "Missing element {_0}")]
|
||||
struct MissingElement(#[error(not(source))] String);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
fn example_main() -> Result<(), Error> {
|
||||
|
@ -46,7 +42,7 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
// Parse the pipeline we want to probe from a static in-line string.
|
||||
let mut context = gst::ParseContext::new();
|
||||
let pipeline = match gst::parse_launch_full(
|
||||
let pipeline = match gst::parse::launch_full(
|
||||
"audiotestsrc wave=white-noise num-buffers=100 ! flacenc ! filesink location=test.flac",
|
||||
Some(&mut context),
|
||||
gst::ParseFlags::empty(),
|
||||
|
@ -54,7 +50,7 @@ fn example_main() -> Result<(), Error> {
|
|||
Ok(pipeline) => pipeline,
|
||||
Err(err) => {
|
||||
if let Some(gst::ParseError::NoSuchElement) = err.kind::<gst::ParseError>() {
|
||||
return Err(MissingElement(context.get_missing_elements().join(",")).into());
|
||||
return Err(MissingElement(context.missing_elements().join(",")).into());
|
||||
} else {
|
||||
return Err(err.into());
|
||||
}
|
||||
|
@ -68,7 +64,7 @@ fn example_main() -> Result<(), Error> {
|
|||
// Query the pipeline for elements implementing the GstTagsetter interface.
|
||||
// In our case, this will return the flacenc element.
|
||||
let tagsetter = pipeline
|
||||
.get_by_interface(gst::TagSetter::static_type())
|
||||
.by_interface(gst::TagSetter::static_type())
|
||||
.ok_or_else(|| anyhow!("No TagSetter found"))?;
|
||||
let tagsetter = tagsetter
|
||||
.dynamic_cast::<gst::TagSetter>()
|
||||
|
@ -80,27 +76,26 @@ fn example_main() -> Result<(), Error> {
|
|||
// Set the "title" tag to "Special randomized white-noise".
|
||||
// The second parameter gst::TagMergeMode::Append tells the tagsetter to append this title
|
||||
// if there already is one.
|
||||
tagsetter.add::<gst::tags::Title>(&"Special randomized white-noise", gst::TagMergeMode::Append);
|
||||
tagsetter
|
||||
.add_tag::<gst::tags::Title>(&"Special randomized white-noise", gst::TagMergeMode::Append);
|
||||
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
return Err(ErrorMessage {
|
||||
src: err
|
||||
.get_src()
|
||||
.map(|s| s.get_path_string())
|
||||
.unwrap_or_else(|| "None".into())
|
||||
.to_string(),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
src: msg
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
@ -114,10 +109,10 @@ fn example_main() -> Result<(), Error> {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
251
examples/src/bin/thumbnail.rs
Normal file
251
examples/src/bin/thumbnail.rs
Normal file
|
@ -0,0 +1,251 @@
|
|||
// This example demonstrates how to get a raw video frame at a given position
|
||||
// and then rescale and store it with the image crate:
|
||||
|
||||
// {uridecodebin} - {videoconvert} - {appsink}
|
||||
|
||||
// The appsink enforces RGBx so that the image crate can use it. The sample layout is passed
|
||||
// with the correct stride from GStreamer to the image crate as GStreamer does not necessarily
|
||||
// produce tightly packed pixels, and in case of RGBx never.
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::{element_error, prelude::*};
|
||||
use gst_video::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
fn create_pipeline(uri: String, out_path: std::path::PathBuf) -> Result<gst::Pipeline, Error> {
|
||||
gst::init()?;
|
||||
|
||||
// Create our pipeline from a pipeline description string.
|
||||
let pipeline = gst::parse::launch(&format!(
|
||||
"uridecodebin uri={uri} ! videoconvert ! appsink name=sink"
|
||||
))?
|
||||
.downcast::<gst::Pipeline>()
|
||||
.expect("Expected a gst::Pipeline");
|
||||
|
||||
// Get access to the appsink element.
|
||||
let appsink = pipeline
|
||||
.by_name("sink")
|
||||
.expect("Sink element not found")
|
||||
.downcast::<gst_app::AppSink>()
|
||||
.expect("Sink element is expected to be an appsink!");
|
||||
|
||||
// Don't synchronize on the clock, we only want a snapshot asap.
|
||||
appsink.set_property("sync", false);
|
||||
|
||||
// Tell the appsink what format we want.
|
||||
// This can be set after linking the two objects, because format negotiation between
|
||||
// both elements will happen during pre-rolling of the pipeline.
|
||||
appsink.set_caps(Some(
|
||||
&gst_video::VideoCapsBuilder::new()
|
||||
.format(gst_video::VideoFormat::Rgbx)
|
||||
.build(),
|
||||
));
|
||||
|
||||
let mut got_snapshot = false;
|
||||
|
||||
// Getting data out of the appsink is done by setting callbacks on it.
|
||||
// The appsink will then call those handlers, as soon as data is available.
|
||||
appsink.set_callbacks(
|
||||
gst_app::AppSinkCallbacks::builder()
|
||||
// Add a handler to the "new-sample" signal.
|
||||
.new_sample(move |appsink| {
|
||||
// Pull the sample in question out of the appsink's buffer.
|
||||
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
|
||||
let buffer = sample.buffer().ok_or_else(|| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get buffer from appsink")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
// Make sure that we only get a single buffer
|
||||
if got_snapshot {
|
||||
return Err(gst::FlowError::Eos);
|
||||
}
|
||||
got_snapshot = true;
|
||||
|
||||
let caps = sample.caps().expect("Sample without caps");
|
||||
let info = gst_video::VideoInfo::from_caps(caps).expect("Failed to parse caps");
|
||||
|
||||
// At this point, buffer is only a reference to an existing memory region somewhere.
|
||||
// When we want to access its content, we have to map it while requesting the required
|
||||
// mode of access (read, read/write).
|
||||
// This type of abstraction is necessary, because the buffer in question might not be
|
||||
// on the machine's main memory itself, but rather in the GPU's memory.
|
||||
// So mapping the buffer makes the underlying memory region accessible to us.
|
||||
// See: https://gstreamer.freedesktop.org/documentation/plugin-development/advanced/allocation.html
|
||||
let frame = gst_video::VideoFrameRef::from_buffer_ref_readable(buffer, &info)
|
||||
.map_err(|_| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to map buffer readable")
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
// We only want to have a single buffer and then have the pipeline terminate
|
||||
println!("Have video frame");
|
||||
|
||||
// Calculate a target width/height that keeps the display aspect ratio while having
|
||||
// a height of 240 pixels
|
||||
let display_aspect_ratio = (frame.width() as f64 * info.par().numer() as f64)
|
||||
/ (frame.height() as f64 * info.par().denom() as f64);
|
||||
let target_height = 240;
|
||||
let target_width = target_height as f64 * display_aspect_ratio;
|
||||
|
||||
// Create a FlatSamples around the borrowed video frame data from GStreamer with
|
||||
// the correct stride as provided by GStreamer.
|
||||
let img = image::FlatSamples::<&[u8]> {
|
||||
samples: frame.plane_data(0).unwrap(),
|
||||
layout: image::flat::SampleLayout {
|
||||
channels: 3, // RGB
|
||||
channel_stride: 1, // 1 byte from component to component
|
||||
width: frame.width(),
|
||||
width_stride: 4, // 4 byte from pixel to pixel
|
||||
height: frame.height(),
|
||||
height_stride: frame.plane_stride()[0] as usize, // stride from line to line
|
||||
},
|
||||
color_hint: Some(image::ColorType::Rgb8),
|
||||
};
|
||||
|
||||
// Scale image to our target dimensions
|
||||
let scaled_img = image::imageops::thumbnail(
|
||||
&img.as_view::<image::Rgb<u8>>()
|
||||
.expect("couldn't create image view"),
|
||||
target_width as u32,
|
||||
target_height as u32,
|
||||
);
|
||||
|
||||
// Save it at the specific location. This automatically detects the file type
|
||||
// based on the filename.
|
||||
scaled_img.save(&out_path).map_err(|err| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Write,
|
||||
(
|
||||
"Failed to write thumbnail file {}: {}",
|
||||
out_path.display(),
|
||||
err
|
||||
)
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})?;
|
||||
|
||||
println!("Wrote thumbnail to {}", out_path.display());
|
||||
|
||||
Err(gst::FlowError::Eos)
|
||||
})
|
||||
.build(),
|
||||
);
|
||||
|
||||
Ok(pipeline)
|
||||
}
|
||||
|
||||
fn main_loop(pipeline: gst::Pipeline, position: u64) -> Result<(), Error> {
|
||||
pipeline.set_state(gst::State::Paused)?;
|
||||
|
||||
let bus = pipeline
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
let mut seeked = false;
|
||||
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::AsyncDone(..) => {
|
||||
if !seeked {
|
||||
// AsyncDone means that the pipeline has started now and that we can seek
|
||||
println!("Got AsyncDone message, seeking to {position}s");
|
||||
|
||||
if pipeline
|
||||
.seek_simple(gst::SeekFlags::FLUSH, position * gst::ClockTime::SECOND)
|
||||
.is_err()
|
||||
{
|
||||
println!("Failed to seek, taking first frame");
|
||||
}
|
||||
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
seeked = true;
|
||||
} else {
|
||||
println!("Got second AsyncDone message, seek finished");
|
||||
}
|
||||
}
|
||||
MessageView::Eos(..) => {
|
||||
// The End-of-stream message is posted when the stream is done, which in our case
|
||||
// happens immediately after creating the thumbnail because we return
|
||||
// gst::FlowError::Eos then.
|
||||
println!("Got Eos message, done");
|
||||
break;
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
pipeline.set_state(gst::State::Null)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn example_main() {
|
||||
use std::env;
|
||||
|
||||
let mut args = env::args();
|
||||
|
||||
// Parse commandline arguments: input URI, position in seconds, output path
|
||||
let _arg0 = args.next().unwrap();
|
||||
let uri = args
|
||||
.next()
|
||||
.expect("No input URI provided on the commandline");
|
||||
let position = args
|
||||
.next()
|
||||
.expect("No position in second on the commandline");
|
||||
let position = position
|
||||
.parse::<u64>()
|
||||
.expect("Failed to parse position as integer");
|
||||
let out_path = args
|
||||
.next()
|
||||
.expect("No output path provided on the commandline");
|
||||
let out_path = std::path::PathBuf::from(out_path);
|
||||
|
||||
match create_pipeline(uri, out_path).and_then(|pipeline| main_loop(pipeline, position)) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
|
@ -9,11 +9,10 @@
|
|||
// {filesrc} - {decodebin} - {queue} - {fakesink}
|
||||
// \- ...
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
|
@ -28,15 +27,15 @@ fn example_main() {
|
|||
std::process::exit(-1)
|
||||
};
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let src = gst::ElementFactory::make("filesrc", None).unwrap();
|
||||
let decodebin = gst::ElementFactory::make("decodebin", None).unwrap();
|
||||
|
||||
src.set_property("location", &glib::Value::from(uri))
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("filesrc")
|
||||
.property("location", uri)
|
||||
.build()
|
||||
.unwrap();
|
||||
let decodebin = gst::ElementFactory::make("decodebin").build().unwrap();
|
||||
|
||||
pipeline.add_many(&[&src, &decodebin]).unwrap();
|
||||
gst::Element::link_many(&[&src, &decodebin]).unwrap();
|
||||
pipeline.add_many([&src, &decodebin]).unwrap();
|
||||
gst::Element::link_many([&src, &decodebin]).unwrap();
|
||||
|
||||
// Need to move a new reference into the closure.
|
||||
// !!ATTENTION!!:
|
||||
|
@ -53,16 +52,15 @@ fn example_main() {
|
|||
decodebin.connect_pad_added(move |_, src_pad| {
|
||||
// Here we temporarily retrieve a strong reference on the pipeline from the weak one
|
||||
// we moved into this callback.
|
||||
let pipeline = match pipeline_weak.upgrade() {
|
||||
Some(pipeline) => pipeline,
|
||||
None => return,
|
||||
let Some(pipeline) = pipeline_weak.upgrade() else {
|
||||
return;
|
||||
};
|
||||
|
||||
// In this example, we are only interested about parsing the ToC, so
|
||||
// we simply pipe every encountered stream into a fakesink, essentially
|
||||
// throwing away the data.
|
||||
let queue = gst::ElementFactory::make("queue", None).unwrap();
|
||||
let sink = gst::ElementFactory::make("fakesink", None).unwrap();
|
||||
let queue = gst::ElementFactory::make("queue").build().unwrap();
|
||||
let sink = gst::ElementFactory::make("fakesink").build().unwrap();
|
||||
|
||||
let elements = &[&queue, &sink];
|
||||
pipeline.add_many(elements).unwrap();
|
||||
|
@ -72,7 +70,7 @@ fn example_main() {
|
|||
e.sync_state_with_parent().unwrap();
|
||||
}
|
||||
|
||||
let sink_pad = queue.get_static_pad("sink").unwrap();
|
||||
let sink_pad = queue.static_pad("sink").unwrap();
|
||||
src_pad
|
||||
.link(&sink_pad)
|
||||
.expect("Unable to link src pad to sink pad");
|
||||
|
@ -82,14 +80,14 @@ fn example_main() {
|
|||
.set_state(gst::State::Paused)
|
||||
.expect("Unable to set the pipeline to the `Paused` state");
|
||||
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
let bus = pipeline.bus().unwrap();
|
||||
|
||||
// Instead of using a main loop (like GLib's), we manually iterate over
|
||||
// GStreamer's bus messages in this example. We don't need any special
|
||||
// functionality like timeouts or GLib socket notifications, so this is sufficient.
|
||||
// The bus is manually operated by repeatedly calling timed_pop on the bus with
|
||||
// the desired timeout for when to stop waiting for new messages. (None = Wait forever)
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -97,24 +95,20 @@ fn example_main() {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
MessageView::Toc(msg_toc) => {
|
||||
// Some element found a ToC in the current media stream and told
|
||||
// us by posting a message to GStreamer's bus.
|
||||
let (toc, updated) = msg_toc.get_toc();
|
||||
println!(
|
||||
"\nReceived toc: {:?} - updated: {}",
|
||||
toc.get_scope(),
|
||||
updated
|
||||
);
|
||||
let (toc, updated) = msg_toc.toc();
|
||||
println!("\nReceived toc: {:?} - updated: {}", toc.scope(), updated);
|
||||
// Get a list of tags that are ToC specific.
|
||||
if let Some(tags) = toc.get_tags() {
|
||||
println!("- tags: {}", tags.to_string());
|
||||
if let Some(tags) = toc.tags() {
|
||||
println!("- tags: {tags}");
|
||||
}
|
||||
// ToCs do not have a fixed structure. Depending on the format that
|
||||
// they were parsed from, they might have different tree-like structures,
|
||||
|
@ -123,35 +117,31 @@ fn example_main() {
|
|||
// interpreting the ToC manually.
|
||||
// In this example, we simply want to print the ToC structure, so
|
||||
// we iterate everything and don't try to interpret anything.
|
||||
for toc_entry in toc.get_entries() {
|
||||
for toc_entry in toc.entries() {
|
||||
// Every entry in a ToC has its own type. One type could for
|
||||
// example be Chapter.
|
||||
println!(
|
||||
"\t{:?} - {}",
|
||||
toc_entry.get_entry_type(),
|
||||
toc_entry.get_uid()
|
||||
);
|
||||
println!("\t{:?} - {}", toc_entry.entry_type(), toc_entry.uid());
|
||||
// Every ToC entry can have a set of timestamps (start, stop).
|
||||
if let Some((start, stop)) = toc_entry.get_start_stop_times() {
|
||||
println!("\t- start: {}, stop: {}", start, stop);
|
||||
if let Some((start, stop)) = toc_entry.start_stop_times() {
|
||||
println!("\t- start: {start}, stop: {stop}");
|
||||
}
|
||||
// Every ToC entry can have tags to it.
|
||||
if let Some(tags) = toc_entry.get_tags() {
|
||||
println!("\t- tags: {}", tags.to_string());
|
||||
if let Some(tags) = toc_entry.tags() {
|
||||
println!("\t- tags: {tags}");
|
||||
}
|
||||
// Every ToC entry can have a set of child entries.
|
||||
// With this structure, you can create trees of arbitrary depth.
|
||||
for toc_sub_entry in toc_entry.get_sub_entries() {
|
||||
for toc_sub_entry in toc_entry.sub_entries() {
|
||||
println!(
|
||||
"\n\t\t{:?} - {}",
|
||||
toc_sub_entry.get_entry_type(),
|
||||
toc_sub_entry.get_uid()
|
||||
toc_sub_entry.entry_type(),
|
||||
toc_sub_entry.uid()
|
||||
);
|
||||
if let Some((start, stop)) = toc_sub_entry.get_start_stop_times() {
|
||||
println!("\t\t- start: {}, stop: {}", start, stop);
|
||||
if let Some((start, stop)) = toc_sub_entry.start_stop_times() {
|
||||
println!("\t\t- start: {start}, stop: {stop}");
|
||||
}
|
||||
if let Some(tags) = toc_sub_entry.get_tags() {
|
||||
println!("\t\t- tags: {:?}", tags.to_string());
|
||||
if let Some(tags) = toc_sub_entry.tags() {
|
||||
println!("\t\t- tags: {tags}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +156,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
|
@ -17,29 +17,21 @@
|
|||
// {src} - {typefind} - {demuxer} -| {multiqueue} - {matroskamux} - {filesink}
|
||||
// \-[video]-/
|
||||
|
||||
extern crate gstreamer as gst;
|
||||
use gst::gst_element_error;
|
||||
use gst::prelude::*;
|
||||
|
||||
use std::env;
|
||||
|
||||
use anyhow::Error;
|
||||
use derive_more::{Display, Error};
|
||||
use gst::{element_error, prelude::*};
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Missing element {}", _0)]
|
||||
struct MissingElement(#[error(not(source))] &'static str);
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {}: {} (debug: {:?})", src, error, debug)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: String,
|
||||
error: String,
|
||||
debug: Option<String>,
|
||||
source: glib::Error,
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
fn example_main() -> Result<(), Error> {
|
||||
|
@ -57,33 +49,24 @@ fn example_main() -> Result<(), Error> {
|
|||
std::process::exit(-1)
|
||||
};
|
||||
|
||||
let pipeline = gst::Pipeline::new(None);
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::Element::make_from_uri(gst::URIType::Src, uri, None)
|
||||
.expect("We do not seem to support this uri");
|
||||
let typefinder =
|
||||
gst::ElementFactory::make("typefind", None).map_err(|_| MissingElement("typefind"))?;
|
||||
let queue =
|
||||
gst::ElementFactory::make("multiqueue", None).map_err(|_| MissingElement("multiqueue"))?;
|
||||
let muxer = gst::ElementFactory::make("matroskamux", None)
|
||||
.map_err(|_| MissingElement("matroskamux"))?;
|
||||
let sink =
|
||||
gst::ElementFactory::make("filesink", None).map_err(|_| MissingElement("filesink"))?;
|
||||
let typefinder = gst::ElementFactory::make("typefind").build()?;
|
||||
let queue = gst::ElementFactory::make("multiqueue")
|
||||
.property("max-size-buffers", 0u32)
|
||||
.property("max-size-time", 0u64)
|
||||
.property("max-size-bytes", 1024u32 * 1024 * 100)
|
||||
.build()?;
|
||||
let muxer = gst::ElementFactory::make("matroskamux").build()?;
|
||||
let sink = gst::ElementFactory::make("filesink")
|
||||
.property("location", output_file)
|
||||
.build()?;
|
||||
|
||||
sink.set_property("location", &output_file)
|
||||
.expect("setting location property failed");
|
||||
// Increase the queue capacity to 100MB to avoid a stalling pipeline
|
||||
queue
|
||||
.set_property("max-size-buffers", &0u32.to_value())
|
||||
.expect("changing capacity of multiqueue failed");
|
||||
queue
|
||||
.set_property("max-size-time", &0u64.to_value())
|
||||
.expect("changing capacity of multiqueue failed");
|
||||
queue
|
||||
.set_property("max-size-bytes", &(1024u32 * 1024 * 100).to_value())
|
||||
.expect("changing capacity of multiqueue failed");
|
||||
|
||||
pipeline
|
||||
.add_many(&[&src, &typefinder, &queue, &muxer, &sink])
|
||||
.add_many([&src, &typefinder, &queue, &muxer, &sink])
|
||||
.expect("failed to add elements to pipeline");
|
||||
|
||||
src.link(&typefinder)?;
|
||||
|
@ -91,67 +74,61 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
let pipeline_clone = pipeline.clone();
|
||||
let typefinder_clone = typefinder.clone();
|
||||
typefinder
|
||||
.connect("have-type", false, move |values| {
|
||||
let (pipeline, typefinder) = (&pipeline_clone, &typefinder_clone);
|
||||
typefinder.connect("have-type", false, move |values| {
|
||||
let (pipeline, typefinder) = (&pipeline_clone, &typefinder_clone);
|
||||
|
||||
// Use the detected format to select between a small set of supported demuxers
|
||||
// Hint: This should probably never be done manually, for stuff like this,
|
||||
// the decodebin should be used, that does this stuff automatically and handles
|
||||
// much more corner-cases. This is just for the sake of being an example.
|
||||
let caps = values[2]
|
||||
.get::<gst::Caps>()
|
||||
.expect("typefinder \"have-type\" signal values[2]")
|
||||
.expect("typefinder \"have-type\" signal values[2]: no `caps`");
|
||||
let format_name = caps
|
||||
.get_structure(0)
|
||||
.expect("Failed to get format name")
|
||||
.get_name();
|
||||
// Use the detected format to select between a small set of supported demuxers
|
||||
// Hint: This should probably never be done manually, for stuff like this,
|
||||
// the decodebin should be used, that does this stuff automatically and handles
|
||||
// much more corner-cases. This is just for the sake of being an example.
|
||||
let caps = values[2]
|
||||
.get::<gst::Caps>()
|
||||
.expect("typefinder \"have-type\" signal values[2]");
|
||||
let format_name = caps.structure(0).expect("Failed to get format name").name();
|
||||
|
||||
let demuxer = match format_name {
|
||||
"video/x-matroska" | "video/webm" => {
|
||||
gst::ElementFactory::make("matroskademux", None).expect("matroskademux missing")
|
||||
}
|
||||
"video/quicktime" => {
|
||||
gst::ElementFactory::make("qtdemux", None).expect("qtdemux missing")
|
||||
}
|
||||
_ => {
|
||||
eprintln!("Sorry, this format is not supported by this example.");
|
||||
std::process::exit(-1);
|
||||
}
|
||||
};
|
||||
let demuxer = match format_name.as_str() {
|
||||
"video/x-matroska" | "video/webm" => gst::ElementFactory::make("matroskademux")
|
||||
.build()
|
||||
.expect("matroskademux missing"),
|
||||
"video/quicktime" => gst::ElementFactory::make("qtdemux")
|
||||
.build()
|
||||
.expect("qtdemux missing"),
|
||||
_ => {
|
||||
eprintln!("Sorry, this format is not supported by this example.");
|
||||
std::process::exit(-1);
|
||||
}
|
||||
};
|
||||
|
||||
// We found a supported format and created the appropriate demuxer -> link it
|
||||
pipeline
|
||||
.add(&demuxer)
|
||||
.expect("Failed to build remux pipeline");
|
||||
// We simply keep the typefinder element and pipe the data through it.
|
||||
// Removing is non-trivial since it started reading data from the pipeline
|
||||
// that the next element (the format specific demuxer) would need.
|
||||
typefinder
|
||||
.link(&demuxer)
|
||||
.expect("Failed to build remux pipeline");
|
||||
// We found a supported format and created the appropriate demuxer -> link it
|
||||
pipeline
|
||||
.add(&demuxer)
|
||||
.expect("Failed to build remux pipeline");
|
||||
// We simply keep the typefinder element and pipe the data through it.
|
||||
// Removing is non-trivial since it started reading data from the pipeline
|
||||
// that the next element (the format specific demuxer) would need.
|
||||
typefinder
|
||||
.link(&demuxer)
|
||||
.expect("Failed to build remux pipeline");
|
||||
|
||||
let queue_clone = queue.clone();
|
||||
let muxer_clone = muxer.clone();
|
||||
demuxer.connect_pad_added(move |demux, src_pad| {
|
||||
handle_demux_pad_added(demux, src_pad, &queue_clone, &muxer_clone)
|
||||
});
|
||||
demuxer
|
||||
.sync_state_with_parent()
|
||||
.expect("Failed to build remux pipeline");
|
||||
let queue_clone = queue.clone();
|
||||
let muxer_clone = muxer.clone();
|
||||
demuxer.connect_pad_added(move |demux, src_pad| {
|
||||
handle_demux_pad_added(demux, src_pad, &queue_clone, &muxer_clone)
|
||||
});
|
||||
demuxer
|
||||
.sync_state_with_parent()
|
||||
.expect("Failed to build remux pipeline");
|
||||
|
||||
None
|
||||
})
|
||||
.expect("Failed to register have-type signal of typefind");
|
||||
None
|
||||
});
|
||||
|
||||
pipeline.set_state(gst::State::Playing)?;
|
||||
|
||||
let bus = pipeline
|
||||
.get_bus()
|
||||
.bus()
|
||||
.expect("Pipeline without bus. Shouldn't happen!");
|
||||
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -161,22 +138,21 @@ fn example_main() -> Result<(), Error> {
|
|||
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.get_src()
|
||||
.map(|s| String::from(s.get_path_string()))
|
||||
.unwrap_or_else(|| String::from("None")),
|
||||
error: err.get_error().to_string(),
|
||||
debug: err.get_debug(),
|
||||
source: err.get_error(),
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
MessageView::StateChanged(s) => {
|
||||
println!(
|
||||
"State changed from {:?}: {:?} -> {:?} ({:?})",
|
||||
s.get_src().map(|s| s.get_path_string()),
|
||||
s.get_old(),
|
||||
s.get_current(),
|
||||
s.get_pending()
|
||||
s.src().map(|s| s.path_string()),
|
||||
s.old(),
|
||||
s.current(),
|
||||
s.pending()
|
||||
);
|
||||
}
|
||||
_ => (),
|
||||
|
@ -199,7 +175,7 @@ fn handle_demux_pad_added(
|
|||
// For that, we need to request a sink pad that fits our needs.
|
||||
let link_to_muxer = || -> Result<(), Error> {
|
||||
let queue_sink_pad = queue
|
||||
.get_request_pad("sink_%u")
|
||||
.request_pad_simple("sink_%u")
|
||||
.expect("If this happened, something is terribly wrong");
|
||||
demux_src_pad.link(&queue_sink_pad)?;
|
||||
// Now that we requested a sink pad fitting our needs from the multiqueue,
|
||||
|
@ -214,7 +190,7 @@ fn handle_demux_pad_added(
|
|||
// Link the multiqueue's output for this stream to the matroskamuxer.
|
||||
// For that, we request an appropriate pad at the muxer, that fits our needs.
|
||||
let muxer_sink_pad = muxer
|
||||
.get_compatible_pad(&queue_src_pad, None)
|
||||
.compatible_pad(&queue_src_pad, None)
|
||||
.expect("Aww, you found a format that matroska doesn't support!");
|
||||
queue_src_pad.link(&muxer_sink_pad)?;
|
||||
|
||||
|
@ -222,7 +198,7 @@ fn handle_demux_pad_added(
|
|||
};
|
||||
|
||||
if let Err(err) = link_to_muxer() {
|
||||
gst_element_error!(
|
||||
element_error!(
|
||||
demuxer,
|
||||
gst::LibraryError::Failed,
|
||||
("Failed to insert sink"),
|
||||
|
@ -232,10 +208,10 @@ fn handle_demux_pad_added(
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
match examples_common::run(example_main) {
|
||||
Ok(r) => r,
|
||||
Err(e) => eprintln!("Error! {}", e),
|
||||
Err(e) => eprintln!("Error! {e}"),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
// API to configure the compositor element to do specific
|
||||
// formatting of an input video.
|
||||
//
|
||||
extern crate gstreamer as gst;
|
||||
extern crate gstreamer_video as gst_video;
|
||||
use gst::prelude::*;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
|
@ -13,23 +11,23 @@ fn example_main() {
|
|||
gst::init().unwrap();
|
||||
|
||||
// This creates a pipeline by parsing the gst-launch pipeline syntax.
|
||||
let pipeline = gst::parse_launch(
|
||||
let pipeline = gst::parse::launch(
|
||||
"videotestsrc name=src ! video/x-raw,width=640,height=480 ! compositor0.sink_0 \
|
||||
compositor ! video/x-raw,width=1280,height=720 ! videoconvert ! autovideosink",
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let pipeline = pipeline.dynamic_cast::<gst::Pipeline>().unwrap();
|
||||
let compositor = pipeline.get_by_name("compositor0").unwrap();
|
||||
let sinkpad = compositor.get_static_pad("sink_0").unwrap();
|
||||
let compositor = pipeline.by_name("compositor0").unwrap();
|
||||
let sinkpad = compositor.static_pad("sink_0").unwrap();
|
||||
|
||||
/* Completely contrived example that takes the 4:3 input video, cuts out a 5:4 frame
|
||||
* and then adds pillarbox borders to place it in a 16:9 target area */
|
||||
/* The output will be the full frame: */
|
||||
sinkpad.set_property("xpos", &0i32).unwrap();
|
||||
sinkpad.set_property("ypos", &0i32).unwrap();
|
||||
sinkpad.set_property("width", &1280i32).unwrap();
|
||||
sinkpad.set_property("height", &720i32).unwrap();
|
||||
sinkpad.set_property("xpos", 0i32);
|
||||
sinkpad.set_property("ypos", 0i32);
|
||||
sinkpad.set_property("width", 1280i32);
|
||||
sinkpad.set_property("height", 720i32);
|
||||
|
||||
let mut converter_config = gst_video::VideoConverterConfig::new();
|
||||
/* Crop the input frame to 5:4: */
|
||||
|
@ -43,9 +41,7 @@ fn example_main() {
|
|||
converter_config.set_dest_y(0);
|
||||
converter_config.set_dest_height(Some(720));
|
||||
|
||||
sinkpad
|
||||
.set_property("converter-config", &*converter_config)
|
||||
.unwrap();
|
||||
sinkpad.set_property("converter-config", &*converter_config);
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
@ -53,8 +49,8 @@ fn example_main() {
|
|||
/* Iterate messages on the bus until an error or EOS occurs,
|
||||
* although in this example the only error we'll hopefully
|
||||
* get is if the user closes the output window */
|
||||
let bus = pipeline.get_bus().unwrap();
|
||||
for msg in bus.iter_timed(gst::CLOCK_TIME_NONE) {
|
||||
let bus = pipeline.bus().unwrap();
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
|
@ -66,9 +62,9 @@ fn example_main() {
|
|||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.get_src().map(|s| s.get_path_string()),
|
||||
err.get_error(),
|
||||
err.get_debug()
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
@ -82,7 +78,7 @@ fn example_main() {
|
|||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environent on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up autmatically)
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
||||
|
|
229
examples/src/bin/zoom.rs
Normal file
229
examples/src/bin/zoom.rs
Normal file
|
@ -0,0 +1,229 @@
|
|||
// Zoom example using navigation events and a compositor
|
||||
|
||||
// Use can change the video player zoom using the next keys:
|
||||
// * +: Zoom in
|
||||
// * -: Zoom out
|
||||
// * Up/Down/Right/Left: Move the frame
|
||||
// * r: reset the zoom
|
||||
// Also mouse navigation events can be used for a better UX.
|
||||
|
||||
use gst::prelude::*;
|
||||
use gst_video::video_event::NavigationEvent;
|
||||
use std::sync::Mutex;
|
||||
|
||||
#[path = "../examples-common.rs"]
|
||||
mod examples_common;
|
||||
|
||||
const WIDTH: i32 = 1280;
|
||||
const HEIGHT: i32 = 720;
|
||||
|
||||
#[derive(Default)]
|
||||
struct MouseState {
|
||||
clicked: bool,
|
||||
clicked_x: f64,
|
||||
clicked_y: f64,
|
||||
clicked_xpos: i32,
|
||||
clicked_ypos: i32,
|
||||
}
|
||||
|
||||
fn zoom(mixer_sink_pad: gst::Pad, x: i32, y: i32, zoom_in: bool) {
|
||||
let xpos = mixer_sink_pad.property::<i32>("xpos");
|
||||
let ypos = mixer_sink_pad.property::<i32>("ypos");
|
||||
let width = mixer_sink_pad.property::<i32>("width");
|
||||
let height = mixer_sink_pad.property::<i32>("height");
|
||||
|
||||
let (width_offset, height_offset) = if zoom_in {
|
||||
(WIDTH / 10, HEIGHT / 10)
|
||||
} else {
|
||||
(-WIDTH / 10, -HEIGHT / 10)
|
||||
};
|
||||
|
||||
if width_offset + width <= 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
mixer_sink_pad.set_property("width", width + width_offset);
|
||||
mixer_sink_pad.set_property("height", height + height_offset);
|
||||
|
||||
let xpos_offset = ((x as f32 / WIDTH as f32) * width_offset as f32) as i32;
|
||||
let new_xpos = xpos - xpos_offset;
|
||||
let ypos_offset = ((y as f32 / HEIGHT as f32) * height_offset as f32) as i32;
|
||||
let new_ypos = ypos - ypos_offset;
|
||||
|
||||
if new_xpos != xpos {
|
||||
mixer_sink_pad.set_property("xpos", new_xpos);
|
||||
}
|
||||
if new_ypos != ypos {
|
||||
mixer_sink_pad.set_property("ypos", new_ypos);
|
||||
}
|
||||
}
|
||||
|
||||
fn reset_zoom(mixer_sink_pad: gst::Pad) {
|
||||
let xpos = mixer_sink_pad.property::<i32>("xpos");
|
||||
let ypos = mixer_sink_pad.property::<i32>("ypos");
|
||||
let width = mixer_sink_pad.property::<i32>("width");
|
||||
let height = mixer_sink_pad.property::<i32>("height");
|
||||
|
||||
if 0 != xpos {
|
||||
mixer_sink_pad.set_property("xpos", 0);
|
||||
}
|
||||
if 0 != ypos {
|
||||
mixer_sink_pad.set_property("ypos", 0);
|
||||
}
|
||||
if WIDTH != width {
|
||||
mixer_sink_pad.set_property("width", WIDTH);
|
||||
}
|
||||
if HEIGHT != height {
|
||||
mixer_sink_pad.set_property("height", HEIGHT);
|
||||
}
|
||||
}
|
||||
|
||||
fn example_main() {
|
||||
let clicked = Mutex::new(MouseState::default());
|
||||
|
||||
gst::init().unwrap();
|
||||
|
||||
let pipeline = gst::parse::launch(&format!(
|
||||
"compositor name=mix background=1 sink_0::xpos=0 sink_0::ypos=0 sink_0::zorder=0 sink_0::width={WIDTH} sink_0::height={HEIGHT} ! xvimagesink \
|
||||
videotestsrc name=src ! video/x-raw,framerate=30/1,width={WIDTH},height={HEIGHT},pixel-aspect-ratio=1/1 ! queue ! mix.sink_0"
|
||||
)).unwrap().downcast::<gst::Pipeline>().unwrap();
|
||||
|
||||
let mixer = pipeline.by_name("mix").unwrap();
|
||||
let mixer_src_pad = mixer.static_pad("src").unwrap();
|
||||
let mixer_sink_pad_weak = mixer.static_pad("sink_0").unwrap().downgrade();
|
||||
|
||||
// Probe added in the sink pad to get direct navigation events w/o transformation done by the mixer
|
||||
mixer_src_pad.add_probe(gst::PadProbeType::EVENT_UPSTREAM, move |_, probe_info| {
|
||||
let mixer_sink_pad = mixer_sink_pad_weak.upgrade().unwrap();
|
||||
|
||||
let Some(ev) = probe_info.event() else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
if ev.type_() != gst::EventType::Navigation {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
let Ok(nav_event) = NavigationEvent::parse(ev) else {
|
||||
return gst::PadProbeReturn::Ok;
|
||||
};
|
||||
|
||||
match nav_event {
|
||||
NavigationEvent::KeyPress { key, .. } => match key.as_str() {
|
||||
"Left" => {
|
||||
let xpos = mixer_sink_pad.property::<i32>("xpos");
|
||||
mixer_sink_pad.set_property("xpos", xpos - 10);
|
||||
}
|
||||
"Right" => {
|
||||
let xpos = mixer_sink_pad.property::<i32>("xpos");
|
||||
mixer_sink_pad.set_property("xpos", xpos + 10);
|
||||
}
|
||||
"Up" => {
|
||||
let ypos = mixer_sink_pad.property::<i32>("ypos");
|
||||
mixer_sink_pad.set_property("ypos", ypos - 10);
|
||||
}
|
||||
"Down" => {
|
||||
let ypos = mixer_sink_pad.property::<i32>("ypos");
|
||||
mixer_sink_pad.set_property("ypos", ypos + 10);
|
||||
}
|
||||
"plus" => {
|
||||
zoom(mixer_sink_pad, WIDTH / 2, HEIGHT / 2, true);
|
||||
}
|
||||
"minus" => {
|
||||
zoom(mixer_sink_pad, WIDTH / 2, HEIGHT / 2, false);
|
||||
}
|
||||
"r" => {
|
||||
reset_zoom(mixer_sink_pad);
|
||||
}
|
||||
_ => (),
|
||||
},
|
||||
NavigationEvent::MouseMove { x, y, .. } => {
|
||||
let state = clicked.lock().unwrap();
|
||||
if state.clicked {
|
||||
let xpos = mixer_sink_pad.property::<i32>("xpos");
|
||||
let ypos = mixer_sink_pad.property::<i32>("ypos");
|
||||
|
||||
let new_xpos = state.clicked_xpos + (x - state.clicked_x) as i32;
|
||||
let new_ypos = state.clicked_ypos + (y - state.clicked_y) as i32;
|
||||
|
||||
if new_xpos != xpos {
|
||||
mixer_sink_pad.set_property("xpos", new_xpos);
|
||||
}
|
||||
|
||||
if new_ypos != ypos {
|
||||
mixer_sink_pad.set_property("ypos", new_ypos);
|
||||
}
|
||||
}
|
||||
}
|
||||
NavigationEvent::MouseButtonPress { button, x, y, .. } => {
|
||||
if button == 1 || button == 272 {
|
||||
let mut state = clicked.lock().unwrap();
|
||||
state.clicked = true;
|
||||
state.clicked_x = x;
|
||||
state.clicked_y = y;
|
||||
state.clicked_xpos = mixer_sink_pad.property("xpos");
|
||||
state.clicked_ypos = mixer_sink_pad.property("ypos");
|
||||
} else if button == 2 || button == 3 || button == 274 || button == 273 {
|
||||
reset_zoom(mixer_sink_pad);
|
||||
} else if button == 4 {
|
||||
zoom(mixer_sink_pad, x as i32, y as i32, true);
|
||||
} else if button == 5 {
|
||||
zoom(mixer_sink_pad, x as i32, y as i32, false);
|
||||
}
|
||||
}
|
||||
NavigationEvent::MouseButtonRelease { button, .. } => {
|
||||
if button == 1 || button == 272 {
|
||||
let mut state = clicked.lock().unwrap();
|
||||
state.clicked = false;
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "v1_18")]
|
||||
NavigationEvent::MouseScroll { x, y, delta_y, .. } => {
|
||||
if delta_y > 0.0 {
|
||||
zoom(mixer_sink_pad, x as i32, y as i32, true);
|
||||
} else if delta_y < 0.0 {
|
||||
zoom(mixer_sink_pad, x as i32, y as i32, false);
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
gst::PadProbeReturn::Ok
|
||||
});
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Playing)
|
||||
.expect("Unable to set the pipeline to the `Playing` state");
|
||||
|
||||
let bus = pipeline.bus().unwrap();
|
||||
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
||||
use gst::MessageView;
|
||||
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => {
|
||||
println!("received eos");
|
||||
break;
|
||||
}
|
||||
MessageView::Error(err) => {
|
||||
println!(
|
||||
"Error from {:?}: {} ({:?})",
|
||||
err.src().map(|s| s.path_string()),
|
||||
err.error(),
|
||||
err.debug()
|
||||
);
|
||||
break;
|
||||
}
|
||||
_ => (),
|
||||
};
|
||||
}
|
||||
|
||||
pipeline
|
||||
.set_state(gst::State::Null)
|
||||
.expect("Unable to set the pipeline to the `Null` state");
|
||||
}
|
||||
|
||||
fn main() {
|
||||
// tutorials_common::run is only required to set up the application environment on macOS
|
||||
// (but not necessary in normal Cocoa applications where this is set up automatically)
|
||||
examples_common::run(example_main);
|
||||
}
|
|
@ -1,41 +1,6 @@
|
|||
/// macOS has a specific requirement that there must be a run loop running
|
||||
/// on the main thread in order to open windows and use OpenGL.
|
||||
|
||||
#[cfg(target_os = "macos")]
|
||||
mod runloop {
|
||||
use std::os::raw::c_void;
|
||||
#[repr(C)]
|
||||
pub struct CFRunLoop(*mut c_void);
|
||||
|
||||
#[link(name = "foundation", kind = "framework")]
|
||||
extern "C" {
|
||||
fn CFRunLoopRun();
|
||||
fn CFRunLoopGetMain() -> *mut c_void;
|
||||
fn CFRunLoopStop(l: *mut c_void);
|
||||
}
|
||||
|
||||
impl CFRunLoop {
|
||||
pub fn run() {
|
||||
unsafe {
|
||||
CFRunLoopRun();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_main() -> CFRunLoop {
|
||||
unsafe {
|
||||
let r = CFRunLoopGetMain();
|
||||
assert!(!r.is_null());
|
||||
CFRunLoop(r)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stop(&self) {
|
||||
unsafe { CFRunLoopStop(self.0) }
|
||||
}
|
||||
}
|
||||
|
||||
unsafe impl Send for CFRunLoop {}
|
||||
}
|
||||
/// macOS has a specific requirement that there must be a run loop running on the main thread in
|
||||
/// order to open windows and use OpenGL, and that the global NSApplication instance must be
|
||||
/// initialized.
|
||||
|
||||
/// On macOS this launches the callback function on a thread.
|
||||
/// On other platforms it's just executed immediately.
|
||||
|
@ -52,16 +17,72 @@ pub fn run<T, F: FnOnce() -> T + Send + 'static>(main: F) -> T
|
|||
where
|
||||
T: Send + 'static,
|
||||
{
|
||||
use std::thread;
|
||||
use std::{
|
||||
ffi::c_void,
|
||||
sync::mpsc::{channel, Sender},
|
||||
thread,
|
||||
};
|
||||
|
||||
let l = runloop::CFRunLoop::get_main();
|
||||
let t = thread::spawn(move || {
|
||||
let res = main();
|
||||
l.stop();
|
||||
res
|
||||
});
|
||||
use cocoa::{
|
||||
appkit::{NSApplication, NSWindow},
|
||||
base::id,
|
||||
delegate,
|
||||
};
|
||||
use objc::{
|
||||
class, msg_send,
|
||||
runtime::{Object, Sel},
|
||||
sel, sel_impl,
|
||||
};
|
||||
|
||||
runloop::CFRunLoop::run();
|
||||
unsafe {
|
||||
let app = cocoa::appkit::NSApp();
|
||||
let (send, recv) = channel::<()>();
|
||||
|
||||
t.join().unwrap()
|
||||
extern "C" fn on_finish_launching(this: &Object, _cmd: Sel, _notification: id) {
|
||||
let send = unsafe {
|
||||
let send_pointer = *this.get_ivar::<*const c_void>("send");
|
||||
let boxed = Box::from_raw(send_pointer as *mut Sender<()>);
|
||||
*boxed
|
||||
};
|
||||
send.send(()).unwrap();
|
||||
}
|
||||
|
||||
let delegate = delegate!("AppDelegate", {
|
||||
app: id = app,
|
||||
send: *const c_void = Box::into_raw(Box::new(send)) as *const c_void,
|
||||
(applicationDidFinishLaunching:) => on_finish_launching as extern fn(&Object, Sel, id)
|
||||
});
|
||||
app.setDelegate_(delegate);
|
||||
|
||||
let t = thread::spawn(move || {
|
||||
// Wait for the NSApp to launch to avoid possibly calling stop_() too early
|
||||
recv.recv().unwrap();
|
||||
|
||||
let res = main();
|
||||
|
||||
let app = cocoa::appkit::NSApp();
|
||||
app.stop_(cocoa::base::nil);
|
||||
|
||||
// Stopping the event loop requires an actual event
|
||||
let event = cocoa::appkit::NSEvent::otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_(
|
||||
cocoa::base::nil,
|
||||
cocoa::appkit::NSEventType::NSApplicationDefined,
|
||||
cocoa::foundation::NSPoint { x: 0.0, y: 0.0 },
|
||||
cocoa::appkit::NSEventModifierFlags::empty(),
|
||||
0.0,
|
||||
0,
|
||||
cocoa::base::nil,
|
||||
cocoa::appkit::NSEventSubtype::NSApplicationActivatedEventType,
|
||||
0,
|
||||
0,
|
||||
);
|
||||
app.postEvent_atStart_(event, cocoa::base::YES);
|
||||
|
||||
res
|
||||
});
|
||||
|
||||
app.run();
|
||||
|
||||
t.join().unwrap()
|
||||
}
|
||||
}
|
||||
|
|
820
examples/src/glupload.rs
Normal file
820
examples/src/glupload.rs
Normal file
|
@ -0,0 +1,820 @@
|
|||
//! This example demonstrates how to output GL textures, within an EGL/X11 context provided by the
|
||||
//! application, and render those textures in the GL application.
|
||||
//!
|
||||
//! This example follow common patterns from `glutin`:
|
||||
//! <https://github.com/rust-windowing/glutin/blob/master/glutin_examples/src/lib.rs>
|
||||
|
||||
// {videotestsrc} - { glsinkbin }
|
||||
|
||||
use std::{
|
||||
ffi::{CStr, CString},
|
||||
mem,
|
||||
num::NonZeroU32,
|
||||
ptr,
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use derive_more::{Display, Error};
|
||||
use glutin::{
|
||||
config::GetGlConfig as _,
|
||||
context::AsRawContext as _,
|
||||
display::{AsRawDisplay as _, GetGlDisplay as _},
|
||||
prelude::*,
|
||||
};
|
||||
use glutin_winit::GlWindow as _;
|
||||
use gst::element_error;
|
||||
use gst_gl::prelude::*;
|
||||
use raw_window_handle::HasRawWindowHandle as _;
|
||||
|
||||
#[derive(Debug, Display, Error)]
|
||||
#[display(fmt = "Received error from {src}: {error} (debug: {debug:?})")]
|
||||
struct ErrorMessage {
|
||||
src: glib::GString,
|
||||
error: glib::Error,
|
||||
debug: Option<glib::GString>,
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
static VERTICES: [f32; 20] = [
|
||||
1.0, 1.0, 0.0, 1.0, 0.0,
|
||||
-1.0, 1.0, 0.0, 0.0, 0.0,
|
||||
-1.0, -1.0, 0.0, 0.0, 1.0,
|
||||
1.0, -1.0, 0.0, 1.0, 1.0,
|
||||
];
|
||||
|
||||
static INDICES: [u16; 6] = [0, 1, 2, 0, 2, 3];
|
||||
|
||||
#[rustfmt::skip]
|
||||
static IDENTITY: [f32; 16] = [
|
||||
1.0, 0.0, 0.0, 0.0,
|
||||
0.0, 1.0, 0.0, 0.0,
|
||||
0.0, 0.0, 1.0, 0.0,
|
||||
0.0, 0.0, 0.0, 1.0,
|
||||
];
|
||||
|
||||
const VS_SRC: &[u8] = b"
|
||||
uniform mat4 u_transformation;
|
||||
attribute vec4 a_position;
|
||||
attribute vec2 a_texcoord;
|
||||
varying vec2 v_texcoord;
|
||||
|
||||
void main() {
|
||||
gl_Position = u_transformation * a_position;
|
||||
v_texcoord = a_texcoord;
|
||||
}
|
||||
\0";
|
||||
|
||||
const FS_SRC: &[u8] = b"
|
||||
#ifdef GL_ES
|
||||
precision mediump float;
|
||||
#endif
|
||||
varying vec2 v_texcoord;
|
||||
uniform sampler2D tex;
|
||||
|
||||
void main() {
|
||||
gl_FragColor = texture2D(tex, v_texcoord);
|
||||
}
|
||||
\0";
|
||||
|
||||
#[allow(clippy::unreadable_literal)]
|
||||
#[allow(clippy::unused_unit)]
|
||||
#[allow(clippy::too_many_arguments)]
|
||||
#[allow(clippy::manual_non_exhaustive)]
|
||||
#[allow(clippy::upper_case_acronyms)]
|
||||
pub(crate) mod gl {
|
||||
pub use self::Gles2 as Gl;
|
||||
include!(concat!(env!("OUT_DIR"), "/test_gl_bindings.rs"));
|
||||
}
|
||||
|
||||
struct Gl {
|
||||
gl: gl::Gl,
|
||||
program: gl::types::GLuint,
|
||||
attr_position: gl::types::GLint,
|
||||
attr_texture: gl::types::GLint,
|
||||
vao: Option<gl::types::GLuint>,
|
||||
vertex_buffer: gl::types::GLuint,
|
||||
vbo_indices: gl::types::GLuint,
|
||||
}
|
||||
|
||||
impl Gl {
|
||||
fn draw_frame(&self, texture_id: gl::types::GLuint) {
|
||||
unsafe {
|
||||
// render
|
||||
self.gl.ClearColor(0.0, 0.0, 0.0, 1.0);
|
||||
self.gl.Clear(gl::COLOR_BUFFER_BIT);
|
||||
|
||||
self.gl.BlendColor(0.0, 0.0, 0.0, 1.0);
|
||||
if self.gl.BlendFuncSeparate.is_loaded() {
|
||||
self.gl.BlendFuncSeparate(
|
||||
gl::SRC_ALPHA,
|
||||
gl::CONSTANT_COLOR,
|
||||
gl::ONE,
|
||||
gl::ONE_MINUS_SRC_ALPHA,
|
||||
);
|
||||
} else {
|
||||
self.gl.BlendFunc(gl::SRC_ALPHA, gl::CONSTANT_COLOR);
|
||||
}
|
||||
self.gl.BlendEquation(gl::FUNC_ADD);
|
||||
self.gl.Enable(gl::BLEND);
|
||||
|
||||
self.gl.UseProgram(self.program);
|
||||
|
||||
if self.gl.BindVertexArray.is_loaded() {
|
||||
self.gl.BindVertexArray(self.vao.unwrap());
|
||||
}
|
||||
|
||||
{
|
||||
self.gl
|
||||
.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, self.vbo_indices);
|
||||
self.gl.BindBuffer(gl::ARRAY_BUFFER, self.vertex_buffer);
|
||||
|
||||
// Load the vertex position
|
||||
self.gl.VertexAttribPointer(
|
||||
self.attr_position as gl::types::GLuint,
|
||||
3,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
ptr::null(),
|
||||
);
|
||||
|
||||
// Load the texture coordinate
|
||||
self.gl.VertexAttribPointer(
|
||||
self.attr_texture as gl::types::GLuint,
|
||||
2,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
(3 * mem::size_of::<f32>()) as *const () as *const _,
|
||||
);
|
||||
|
||||
self.gl.EnableVertexAttribArray(self.attr_position as _);
|
||||
self.gl.EnableVertexAttribArray(self.attr_texture as _);
|
||||
}
|
||||
|
||||
self.gl.ActiveTexture(gl::TEXTURE0);
|
||||
self.gl.BindTexture(gl::TEXTURE_2D, texture_id);
|
||||
|
||||
let location = self
|
||||
.gl
|
||||
.GetUniformLocation(self.program, b"tex\0".as_ptr() as *const _);
|
||||
self.gl.Uniform1i(location, 0);
|
||||
|
||||
let location = self
|
||||
.gl
|
||||
.GetUniformLocation(self.program, b"u_transformation\0".as_ptr() as *const _);
|
||||
self.gl
|
||||
.UniformMatrix4fv(location, 1, gl::FALSE, IDENTITY.as_ptr() as *const _);
|
||||
|
||||
self.gl
|
||||
.DrawElements(gl::TRIANGLES, 6, gl::UNSIGNED_SHORT, ptr::null());
|
||||
|
||||
self.gl.BindTexture(gl::TEXTURE_2D, 0);
|
||||
self.gl.UseProgram(0);
|
||||
|
||||
if self.gl.BindVertexArray.is_loaded() {
|
||||
self.gl.BindVertexArray(0);
|
||||
}
|
||||
|
||||
{
|
||||
self.gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
|
||||
self.gl.BindBuffer(gl::ARRAY_BUFFER, 0);
|
||||
|
||||
self.gl.DisableVertexAttribArray(self.attr_position as _);
|
||||
self.gl.DisableVertexAttribArray(self.attr_texture as _);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn resize(&self, size: winit::dpi::PhysicalSize<u32>) {
|
||||
unsafe {
|
||||
self.gl
|
||||
.Viewport(0, 0, size.width as i32, size.height as i32);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load(gl_display: &impl glutin::display::GlDisplay) -> Gl {
|
||||
let gl = gl::Gl::load_with(|symbol| {
|
||||
let symbol = CString::new(symbol).unwrap();
|
||||
gl_display.get_proc_address(&symbol).cast()
|
||||
});
|
||||
|
||||
let version = unsafe {
|
||||
let version = gl.GetString(gl::VERSION);
|
||||
assert!(!version.is_null());
|
||||
let version = CStr::from_ptr(version.cast());
|
||||
version.to_string_lossy()
|
||||
};
|
||||
|
||||
println!("OpenGL version {version}");
|
||||
|
||||
let (program, attr_position, attr_texture, vao, vertex_buffer, vbo_indices) = unsafe {
|
||||
let vs = gl.CreateShader(gl::VERTEX_SHADER);
|
||||
gl.ShaderSource(vs, 1, [VS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
|
||||
gl.CompileShader(vs);
|
||||
|
||||
let fs = gl.CreateShader(gl::FRAGMENT_SHADER);
|
||||
gl.ShaderSource(fs, 1, [FS_SRC.as_ptr() as *const _].as_ptr(), ptr::null());
|
||||
gl.CompileShader(fs);
|
||||
|
||||
let program = gl.CreateProgram();
|
||||
gl.AttachShader(program, vs);
|
||||
gl.AttachShader(program, fs);
|
||||
gl.LinkProgram(program);
|
||||
|
||||
{
|
||||
let mut success = 1;
|
||||
gl.GetProgramiv(program, gl::LINK_STATUS, &mut success);
|
||||
assert_ne!(success, 0);
|
||||
assert_eq!(gl.GetError(), 0);
|
||||
}
|
||||
|
||||
let attr_position = gl.GetAttribLocation(program, b"a_position\0".as_ptr() as *const _);
|
||||
let attr_texture = gl.GetAttribLocation(program, b"a_texcoord\0".as_ptr() as *const _);
|
||||
|
||||
let vao = if gl.BindVertexArray.is_loaded() {
|
||||
let mut vao = mem::MaybeUninit::uninit();
|
||||
gl.GenVertexArrays(1, vao.as_mut_ptr());
|
||||
let vao = vao.assume_init();
|
||||
gl.BindVertexArray(vao);
|
||||
Some(vao)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let mut vertex_buffer = mem::MaybeUninit::uninit();
|
||||
gl.GenBuffers(1, vertex_buffer.as_mut_ptr());
|
||||
let vertex_buffer = vertex_buffer.assume_init();
|
||||
gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);
|
||||
gl.BufferData(
|
||||
gl::ARRAY_BUFFER,
|
||||
(VERTICES.len() * mem::size_of::<f32>()) as gl::types::GLsizeiptr,
|
||||
VERTICES.as_ptr() as *const _,
|
||||
gl::STATIC_DRAW,
|
||||
);
|
||||
|
||||
let mut vbo_indices = mem::MaybeUninit::uninit();
|
||||
gl.GenBuffers(1, vbo_indices.as_mut_ptr());
|
||||
let vbo_indices = vbo_indices.assume_init();
|
||||
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
|
||||
gl.BufferData(
|
||||
gl::ELEMENT_ARRAY_BUFFER,
|
||||
(INDICES.len() * mem::size_of::<u16>()) as gl::types::GLsizeiptr,
|
||||
INDICES.as_ptr() as *const _,
|
||||
gl::STATIC_DRAW,
|
||||
);
|
||||
|
||||
if gl.BindVertexArray.is_loaded() {
|
||||
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, vbo_indices);
|
||||
gl.BindBuffer(gl::ARRAY_BUFFER, vertex_buffer);
|
||||
|
||||
// Load the vertex position
|
||||
gl.VertexAttribPointer(
|
||||
attr_position as gl::types::GLuint,
|
||||
3,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
ptr::null(),
|
||||
);
|
||||
|
||||
// Load the texture coordinate
|
||||
gl.VertexAttribPointer(
|
||||
attr_texture as gl::types::GLuint,
|
||||
2,
|
||||
gl::FLOAT,
|
||||
gl::FALSE,
|
||||
(5 * mem::size_of::<f32>()) as gl::types::GLsizei,
|
||||
(3 * mem::size_of::<f32>()) as *const () as *const _,
|
||||
);
|
||||
|
||||
gl.EnableVertexAttribArray(attr_position as _);
|
||||
gl.EnableVertexAttribArray(attr_texture as _);
|
||||
|
||||
gl.BindVertexArray(0);
|
||||
}
|
||||
|
||||
gl.BindBuffer(gl::ELEMENT_ARRAY_BUFFER, 0);
|
||||
gl.BindBuffer(gl::ARRAY_BUFFER, 0);
|
||||
|
||||
assert_eq!(gl.GetError(), 0);
|
||||
|
||||
(
|
||||
program,
|
||||
attr_position,
|
||||
attr_texture,
|
||||
vao,
|
||||
vertex_buffer,
|
||||
vbo_indices,
|
||||
)
|
||||
};
|
||||
|
||||
Gl {
|
||||
gl,
|
||||
program,
|
||||
attr_position,
|
||||
attr_texture,
|
||||
vao,
|
||||
vertex_buffer,
|
||||
vbo_indices,
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Message {
|
||||
Frame(gst_video::VideoInfo, gst::Buffer),
|
||||
BusEvent,
|
||||
}
|
||||
|
||||
pub(crate) struct App {
|
||||
pipeline: gst::Pipeline,
|
||||
appsink: gst_app::AppSink,
|
||||
bus: gst::Bus,
|
||||
event_loop: winit::event_loop::EventLoop<Message>,
|
||||
window: Option<winit::window::Window>,
|
||||
not_current_gl_context: Option<glutin::context::NotCurrentContext>,
|
||||
shared_context: gst_gl::GLContext,
|
||||
}
|
||||
|
||||
impl App {
|
||||
pub(crate) fn new(gl_element: Option<&gst::Element>) -> Result<App> {
|
||||
gst::init()?;
|
||||
|
||||
let (pipeline, appsink) = App::create_pipeline(gl_element)?;
|
||||
let bus = pipeline
|
||||
.bus()
|
||||
.context("Pipeline without bus. Shouldn't happen!")?;
|
||||
|
||||
let event_loop = winit::event_loop::EventLoopBuilder::with_user_event().build()?;
|
||||
|
||||
// Only Windows requires the window to be present before creating a `glutin::Display`. Other
|
||||
// platforms don't really need one (and on Android, none exists until `Event::Resumed`).
|
||||
let window_builder = cfg!(windows).then(|| {
|
||||
winit::window::WindowBuilder::new()
|
||||
.with_transparent(true)
|
||||
.with_title("GL rendering")
|
||||
});
|
||||
|
||||
let display_builder =
|
||||
glutin_winit::DisplayBuilder::new().with_window_builder(window_builder);
|
||||
// XXX on macOS/cgl only one config can be queried at a time. If transparency is needed,
|
||||
// add .with_transparency(true) to ConfigTemplateBuilder. EGL on X11 doesn't support
|
||||
// transparency at all.
|
||||
let template = glutin::config::ConfigTemplateBuilder::new().with_alpha_size(8);
|
||||
let (window, gl_config) = display_builder
|
||||
.build(&event_loop, template, |configs| {
|
||||
configs
|
||||
.reduce(|current, new_config| {
|
||||
let prefer_transparency =
|
||||
new_config.supports_transparency().unwrap_or(false)
|
||||
& !current.supports_transparency().unwrap_or(false);
|
||||
|
||||
if prefer_transparency || new_config.num_samples() > current.num_samples() {
|
||||
new_config
|
||||
} else {
|
||||
current
|
||||
}
|
||||
})
|
||||
.unwrap()
|
||||
})
|
||||
.expect("Failed to build display");
|
||||
println!(
|
||||
"Picked a config with {} samples and transparency {}. Pixel format: {:?}",
|
||||
gl_config.num_samples(),
|
||||
gl_config.supports_transparency().unwrap_or(false),
|
||||
gl_config.color_buffer_type()
|
||||
);
|
||||
println!("Config supports GL API(s) {:?}", gl_config.api());
|
||||
|
||||
// XXX The display could be obtained from any object created by it, so we can query it from
|
||||
// the config.
|
||||
let gl_display = gl_config.display();
|
||||
let raw_gl_display = gl_display.raw_display();
|
||||
|
||||
println!("Using raw display connection {:?}", raw_gl_display);
|
||||
|
||||
let raw_window_handle = window.as_ref().map(|window| window.raw_window_handle());
|
||||
|
||||
// The context creation part. It can be created before surface and that's how
|
||||
// it's expected in multithreaded + multiwindow operation mode, since you
|
||||
// can send NotCurrentContext, but not Surface.
|
||||
let context_attributes =
|
||||
glutin::context::ContextAttributesBuilder::new().build(raw_window_handle);
|
||||
|
||||
// Since glutin by default tries to create OpenGL core context, which may not be
|
||||
// present we should try gles.
|
||||
let fallback_context_attributes = glutin::context::ContextAttributesBuilder::new()
|
||||
.with_context_api(glutin::context::ContextApi::Gles(None))
|
||||
.build(raw_window_handle);
|
||||
|
||||
// There are also some old devices that support neither modern OpenGL nor GLES.
|
||||
// To support these we can try and create a 2.1 context.
|
||||
let legacy_context_attributes = glutin::context::ContextAttributesBuilder::new()
|
||||
.with_context_api(glutin::context::ContextApi::OpenGl(Some(
|
||||
glutin::context::Version::new(2, 1),
|
||||
)))
|
||||
.build(raw_window_handle);
|
||||
|
||||
let not_current_gl_context = unsafe {
|
||||
gl_display
|
||||
.create_context(&gl_config, &context_attributes)
|
||||
.or_else(|_| {
|
||||
gl_display
|
||||
.create_context(&gl_config, &fallback_context_attributes)
|
||||
.or_else(|_| {
|
||||
gl_display.create_context(&gl_config, &legacy_context_attributes)
|
||||
})
|
||||
})
|
||||
}
|
||||
.context("failed to create context")?;
|
||||
|
||||
let raw_gl_context = not_current_gl_context.raw_context();
|
||||
|
||||
println!("Using raw GL context {:?}", raw_gl_context);
|
||||
|
||||
#[cfg(not(target_os = "linux"))]
|
||||
compile_error!("This example only has Linux support");
|
||||
|
||||
let api = App::map_gl_api(gl_config.api());
|
||||
|
||||
let (raw_gl_context, gst_gl_display, platform) = match (raw_gl_display, raw_gl_context) {
|
||||
#[cfg(feature = "gst-gl-egl")]
|
||||
(
|
||||
glutin::display::RawDisplay::Egl(egl_display),
|
||||
glutin::context::RawContext::Egl(egl_context),
|
||||
) => {
|
||||
let gl_display =
|
||||
unsafe { gst_gl_egl::GLDisplayEGL::with_egl_display(egl_display as usize) }
|
||||
.context("Failed to create GLDisplayEGL from raw `EGLDisplay`")?
|
||||
.upcast::<gst_gl::GLDisplay>();
|
||||
|
||||
(egl_context as usize, gl_display, gst_gl::GLPlatform::EGL)
|
||||
}
|
||||
#[cfg(feature = "gst-gl-x11")]
|
||||
(
|
||||
glutin::display::RawDisplay::Glx(glx_display),
|
||||
glutin::context::RawContext::Glx(glx_context),
|
||||
) => {
|
||||
let gl_display =
|
||||
unsafe { gst_gl_x11::GLDisplayX11::with_display(glx_display as usize) }
|
||||
.context("Failed to create GLDisplayX11 from raw X11 `Display`")?
|
||||
.upcast::<gst_gl::GLDisplay>();
|
||||
(glx_context as usize, gl_display, gst_gl::GLPlatform::GLX)
|
||||
}
|
||||
#[allow(unreachable_patterns)]
|
||||
handler => anyhow::bail!("Unsupported platform: {handler:?}."),
|
||||
};
|
||||
|
||||
let shared_context = unsafe {
|
||||
gst_gl::GLContext::new_wrapped(&gst_gl_display, raw_gl_context, platform, api)
|
||||
}
|
||||
.context("Couldn't wrap GL context")?;
|
||||
|
||||
let gl_context = shared_context.clone();
|
||||
let event_proxy = event_loop.create_proxy();
|
||||
|
||||
#[allow(clippy::single_match)]
|
||||
bus.set_sync_handler(move |_, msg| {
|
||||
match msg.view() {
|
||||
gst::MessageView::NeedContext(ctxt) => {
|
||||
let context_type = ctxt.context_type();
|
||||
if context_type == *gst_gl::GL_DISPLAY_CONTEXT_TYPE {
|
||||
if let Some(el) =
|
||||
msg.src().map(|s| s.downcast_ref::<gst::Element>().unwrap())
|
||||
{
|
||||
let context = gst::Context::new(context_type, true);
|
||||
context.set_gl_display(&gst_gl_display);
|
||||
el.set_context(&context);
|
||||
}
|
||||
}
|
||||
if context_type == "gst.gl.app_context" {
|
||||
if let Some(el) =
|
||||
msg.src().map(|s| s.downcast_ref::<gst::Element>().unwrap())
|
||||
{
|
||||
let mut context = gst::Context::new(context_type, true);
|
||||
{
|
||||
let context = context.get_mut().unwrap();
|
||||
let s = context.structure_mut();
|
||||
s.set("context", &gl_context);
|
||||
}
|
||||
el.set_context(&context);
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if let Err(e) = event_proxy.send_event(Message::BusEvent) {
|
||||
eprintln!("Failed to send BusEvent to event proxy: {e}")
|
||||
}
|
||||
|
||||
gst::BusSyncReply::Pass
|
||||
});
|
||||
|
||||
Ok(App {
|
||||
pipeline,
|
||||
appsink,
|
||||
bus,
|
||||
event_loop,
|
||||
window,
|
||||
not_current_gl_context: Some(not_current_gl_context),
|
||||
shared_context,
|
||||
})
|
||||
}
|
||||
|
||||
fn setup(&self, event_loop: &winit::event_loop::EventLoop<Message>) -> Result<()> {
|
||||
let event_proxy = event_loop.create_proxy();
|
||||
self.appsink.set_callbacks(
|
||||
gst_app::AppSinkCallbacks::builder()
|
||||
.new_sample(move |appsink| {
|
||||
let sample = appsink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
|
||||
|
||||
let info = sample
|
||||
.caps()
|
||||
.and_then(|caps| gst_video::VideoInfo::from_caps(caps).ok())
|
||||
.ok_or_else(|| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get video info from sample")
|
||||
);
|
||||
|
||||
gst::FlowError::NotNegotiated
|
||||
})?;
|
||||
|
||||
let mut buffer = sample.buffer_owned().unwrap();
|
||||
{
|
||||
let context = match (buffer.n_memory() > 0)
|
||||
.then(|| buffer.peek_memory(0))
|
||||
.and_then(|m| m.downcast_memory_ref::<gst_gl::GLBaseMemory>())
|
||||
.map(|m| m.context())
|
||||
{
|
||||
Some(context) => context.clone(),
|
||||
None => {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to get GL context from buffer")
|
||||
);
|
||||
|
||||
return Err(gst::FlowError::Error);
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(meta) = buffer.meta::<gst_gl::GLSyncMeta>() {
|
||||
meta.set_sync_point(&context);
|
||||
} else {
|
||||
let buffer = buffer.make_mut();
|
||||
let meta = gst_gl::GLSyncMeta::add(buffer, &context);
|
||||
meta.set_sync_point(&context);
|
||||
}
|
||||
}
|
||||
|
||||
event_proxy
|
||||
.send_event(Message::Frame(info, buffer))
|
||||
.map(|()| gst::FlowSuccess::Ok)
|
||||
.map_err(|e| {
|
||||
element_error!(
|
||||
appsink,
|
||||
gst::ResourceError::Failed,
|
||||
("Failed to send sample to event loop: {}", e)
|
||||
);
|
||||
|
||||
gst::FlowError::Error
|
||||
})
|
||||
})
|
||||
.build(),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Converts from <https://docs.rs/glutin/latest/glutin/config/struct.Api.html> to
|
||||
/// <https://gstreamer.freedesktop.org/documentation/gl/gstglapi.html?gi-language=c#GstGLAPI>.
|
||||
fn map_gl_api(api: glutin::config::Api) -> gst_gl::GLAPI {
|
||||
use glutin::config::Api;
|
||||
use gst_gl::GLAPI;
|
||||
|
||||
let mut gst_gl_api = GLAPI::empty();
|
||||
// In gstreamer:
|
||||
// GLAPI::OPENGL: Desktop OpenGL up to and including 3.1. The compatibility profile when the OpenGL version is >= 3.2
|
||||
// GLAPI::OPENGL3: Desktop OpenGL >= 3.2 core profile
|
||||
// In glutin, API::OPENGL is set for every context API, except EGL where it is set based on
|
||||
// EGL_RENDERABLE_TYPE containing EGL_OPENGL_BIT:
|
||||
// https://registry.khronos.org/EGL/sdk/docs/man/html/eglChooseConfig.xhtml
|
||||
gst_gl_api.set(GLAPI::OPENGL | GLAPI::OPENGL3, api.contains(Api::OPENGL));
|
||||
gst_gl_api.set(GLAPI::GLES1, api.contains(Api::GLES1));
|
||||
// OpenGL ES 2.x and 3.x
|
||||
gst_gl_api.set(GLAPI::GLES2, api.intersects(Api::GLES2 | Api::GLES3));
|
||||
|
||||
gst_gl_api
|
||||
}
|
||||
|
||||
fn create_pipeline(
|
||||
gl_element: Option<&gst::Element>,
|
||||
) -> Result<(gst::Pipeline, gst_app::AppSink)> {
|
||||
let pipeline = gst::Pipeline::default();
|
||||
let src = gst::ElementFactory::make("videotestsrc").build()?;
|
||||
|
||||
let caps = gst_video::VideoCapsBuilder::new()
|
||||
.features([gst_gl::CAPS_FEATURE_MEMORY_GL_MEMORY])
|
||||
.format(gst_video::VideoFormat::Rgba)
|
||||
.field("texture-target", "2D")
|
||||
.build();
|
||||
|
||||
let appsink = gst_app::AppSink::builder()
|
||||
.enable_last_sample(true)
|
||||
.max_buffers(1)
|
||||
.caps(&caps)
|
||||
.build();
|
||||
|
||||
if let Some(gl_element) = gl_element {
|
||||
let glupload = gst::ElementFactory::make("glupload").build()?;
|
||||
|
||||
pipeline.add_many([&src, &glupload])?;
|
||||
pipeline.add(gl_element)?;
|
||||
pipeline.add(&appsink)?;
|
||||
|
||||
src.link(&glupload)?;
|
||||
glupload.link(gl_element)?;
|
||||
gl_element.link(&appsink)?;
|
||||
|
||||
Ok((pipeline, appsink))
|
||||
} else {
|
||||
let sink = gst::ElementFactory::make("glsinkbin")
|
||||
.property("sink", &appsink)
|
||||
.build()?;
|
||||
|
||||
pipeline.add_many([&src, &sink])?;
|
||||
src.link(&sink)?;
|
||||
|
||||
Ok((pipeline, appsink))
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_messages(bus: &gst::Bus) -> Result<()> {
|
||||
use gst::MessageView;
|
||||
|
||||
for msg in bus.iter() {
|
||||
match msg.view() {
|
||||
MessageView::Eos(..) => break,
|
||||
MessageView::Error(err) => {
|
||||
return Err(ErrorMessage {
|
||||
src: msg
|
||||
.src()
|
||||
.map(|s| s.path_string())
|
||||
.unwrap_or_else(|| glib::GString::from("UNKNOWN")),
|
||||
error: err.error(),
|
||||
debug: err.debug(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn main_loop(app: App) -> Result<()> {
|
||||
app.setup(&app.event_loop)?;
|
||||
|
||||
let App {
|
||||
pipeline,
|
||||
bus,
|
||||
event_loop,
|
||||
mut window,
|
||||
mut not_current_gl_context,
|
||||
shared_context,
|
||||
..
|
||||
} = app;
|
||||
|
||||
let mut curr_frame: Option<gst_gl::GLVideoFrame<gst_gl::gl_video_frame::Readable>> = None;
|
||||
|
||||
let mut running_state = None::<(
|
||||
Gl,
|
||||
glutin::context::PossiblyCurrentContext,
|
||||
glutin::surface::Surface<glutin::surface::WindowSurface>,
|
||||
)>;
|
||||
|
||||
Ok(event_loop.run(move |event, window_target| {
|
||||
window_target.set_control_flow(winit::event_loop::ControlFlow::Wait);
|
||||
|
||||
let mut needs_redraw = false;
|
||||
match event {
|
||||
winit::event::Event::LoopExiting => {
|
||||
pipeline.send_event(gst::event::Eos::new());
|
||||
pipeline.set_state(gst::State::Null).unwrap();
|
||||
}
|
||||
winit::event::Event::WindowEvent { event, .. } => match event {
|
||||
winit::event::WindowEvent::CloseRequested
|
||||
| winit::event::WindowEvent::KeyboardInput {
|
||||
event:
|
||||
winit::event::KeyEvent {
|
||||
state: winit::event::ElementState::Released,
|
||||
logical_key:
|
||||
winit::keyboard::Key::Named(winit::keyboard::NamedKey::Escape),
|
||||
..
|
||||
},
|
||||
..
|
||||
} => window_target.exit(),
|
||||
winit::event::WindowEvent::Resized(size) => {
|
||||
// Some platforms like EGL require resizing GL surface to update the size
|
||||
// Notable platforms here are Wayland and macOS, other don't require it
|
||||
// and the function is no-op, but it's wise to resize it for portability
|
||||
// reasons.
|
||||
if let Some((gl, gl_context, gl_surface)) = &running_state {
|
||||
gl_surface.resize(
|
||||
gl_context,
|
||||
// XXX Ignore minimizing
|
||||
NonZeroU32::new(size.width).unwrap(),
|
||||
NonZeroU32::new(size.height).unwrap(),
|
||||
);
|
||||
gl.resize(size);
|
||||
}
|
||||
}
|
||||
winit::event::WindowEvent::RedrawRequested => needs_redraw = true,
|
||||
_ => (),
|
||||
},
|
||||
// Receive a frame
|
||||
winit::event::Event::UserEvent(Message::Frame(info, buffer)) => {
|
||||
if let Ok(frame) = gst_gl::GLVideoFrame::from_buffer_readable(buffer, &info) {
|
||||
curr_frame = Some(frame);
|
||||
needs_redraw = true;
|
||||
}
|
||||
}
|
||||
// Handle all pending messages when we are awaken by set_sync_handler
|
||||
winit::event::Event::UserEvent(Message::BusEvent) => {
|
||||
App::handle_messages(&bus).unwrap();
|
||||
}
|
||||
winit::event::Event::Resumed => {
|
||||
let not_current_gl_context = not_current_gl_context
|
||||
.take()
|
||||
.expect("There must be a NotCurrentContext prior to Event::Resumed");
|
||||
|
||||
let gl_config = not_current_gl_context.config();
|
||||
let gl_display = gl_config.display();
|
||||
|
||||
let window = window.get_or_insert_with(|| {
|
||||
let window_builder = winit::window::WindowBuilder::new().with_transparent(true);
|
||||
glutin_winit::finalize_window(window_target, window_builder, &gl_config)
|
||||
.unwrap()
|
||||
});
|
||||
|
||||
let attrs = window.build_surface_attributes(<_>::default());
|
||||
let gl_surface = unsafe {
|
||||
gl_config
|
||||
.display()
|
||||
.create_window_surface(&gl_config, &attrs)
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
// Make it current.
|
||||
let gl_context = not_current_gl_context.make_current(&gl_surface).unwrap();
|
||||
|
||||
// Tell GStreamer that the context has been made current (for borrowed contexts,
|
||||
// this does not try to make it current again)
|
||||
shared_context.activate(true).unwrap();
|
||||
|
||||
shared_context
|
||||
.fill_info()
|
||||
.expect("Couldn't fill context info");
|
||||
|
||||
// The context needs to be current for the Renderer to set up shaders and buffers.
|
||||
// It also performs function loading, which needs a current context on WGL.
|
||||
let gl = load(&gl_display);
|
||||
|
||||
// Try setting vsync.
|
||||
if let Err(res) = gl_surface.set_swap_interval(
|
||||
&gl_context,
|
||||
glutin::surface::SwapInterval::Wait(std::num::NonZeroU32::new(1).unwrap()),
|
||||
) {
|
||||
eprintln!("Error setting vsync: {res:?}");
|
||||
}
|
||||
|
||||
pipeline.set_state(gst::State::Playing).unwrap();
|
||||
|
||||
assert!(running_state
|
||||
.replace((gl, gl_context, gl_surface))
|
||||
.is_none());
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
if needs_redraw {
|
||||
if let Some((gl, gl_context, gl_surface)) = &running_state {
|
||||
if let Some(frame) = curr_frame.as_ref() {
|
||||
let sync_meta = frame.buffer().meta::<gst_gl::GLSyncMeta>().unwrap();
|
||||
sync_meta.wait(&shared_context);
|
||||
if let Ok(texture) = frame.texture_id(0) {
|
||||
gl.draw_frame(texture as gl::types::GLuint);
|
||||
}
|
||||
}
|
||||
|
||||
gl_surface.swap_buffers(gl_context).unwrap();
|
||||
}
|
||||
}
|
||||
})?)
|
||||
}
|
58
generator.py
58
generator.py
|
@ -1,58 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from os import listdir
|
||||
from os.path import isfile, join
|
||||
from subprocess import call
|
||||
import sys
|
||||
|
||||
need_rebuild = False
|
||||
|
||||
def update_workspace():
|
||||
try:
|
||||
call(['bash', '-c', 'cd gir && cargo build --release'])
|
||||
except:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
if not isfile('./gir/src'):
|
||||
need_rebuild = True
|
||||
print('=> Initializing gir submodule...')
|
||||
call(['bash', '-c', 'git submodule update --init'])
|
||||
print('<= Done!')
|
||||
|
||||
question = 'Do you want to update gir submodule? [y/N] '
|
||||
if sys.version_info[0] < 3:
|
||||
line = raw_input(question)
|
||||
else:
|
||||
line = input(question)
|
||||
line = line.strip()
|
||||
if line.lower() == 'y':
|
||||
need_rebuild = True
|
||||
print('=> Updating gir submodule...')
|
||||
call(['bash', '-c', 'cd gir && git reset --hard HEAD && git pull -f origin master'])
|
||||
print('<= Done!')
|
||||
|
||||
if need_rebuild is True or not os.path.isfile('./gir/target/release/gir'):
|
||||
print('=> Building gir...')
|
||||
if update_workspace() is True:
|
||||
print('<= Done!')
|
||||
else:
|
||||
print('<= Failed...')
|
||||
sys.exit(1)
|
||||
|
||||
print('=> Regenerating crates...')
|
||||
for entry in [f for f in listdir('.') if isfile(join('.', f))]:
|
||||
if entry.startswith('Gir_Gst') and entry.endswith('.toml'):
|
||||
print('==> Regenerating "{}"...'.format(entry))
|
||||
try:
|
||||
call(['./gir/target/release/gir', '-c', entry])
|
||||
except Exception as err:
|
||||
print('The following error occurred: {}'.format(err))
|
||||
line = input('Do you want to continue? [y/N] ').strip().lower()
|
||||
if line != 'y':
|
||||
sys.exit(1)
|
||||
print('<== Done!')
|
||||
call(['cargo', 'fmt'])
|
||||
print('<= Done!')
|
||||
print("Don't forget to check if everything has been correctly generated!")
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue