initial commit: oh this is going to be horrible
This commit is contained in:
commit
b02da2c6d0
22 changed files with 11664 additions and 0 deletions
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
/target
|
||||
/.cargo
|
4744
Cargo.lock
generated
Normal file
4744
Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
16
Cargo.toml
Normal file
16
Cargo.toml
Normal file
|
@ -0,0 +1,16 @@
|
|||
#![feature(let_chains)]
|
||||
[package]
|
||||
name = "reversed-rooms-launcher"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
|
||||
[dependencies]
|
||||
directories = "6.0.0"
|
||||
iced = { git = "https://github.com/iced-rs/iced.git", rev = "482d54118a733231cdceb4ab8eef2419fbec385e", features = ["smol", "tiny-skia", "image"], default-features = false }
|
||||
image = "0.25.6"
|
||||
serde = { version = "1.0.219", features = ["serde_derive"] }
|
||||
serde_json = "1.0.140"
|
||||
tempfile = "3.19.1"
|
||||
iced_video_player = {path = "./iced_video_player"}
|
||||
url = "2.5.4"
|
||||
rust-embed = "8.7.0"
|
1
iced_video_player/.envrc
Normal file
1
iced_video_player/.envrc
Normal file
|
@ -0,0 +1 @@
|
|||
use flake || use nix shell.nix
|
2
iced_video_player/.gitignore
vendored
Normal file
2
iced_video_player/.gitignore
vendored
Normal file
|
@ -0,0 +1,2 @@
|
|||
/target
|
||||
.direnv
|
4532
iced_video_player/Cargo.lock
generated
Normal file
4532
iced_video_player/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
47
iced_video_player/Cargo.toml
Normal file
47
iced_video_player/Cargo.toml
Normal file
|
@ -0,0 +1,47 @@
|
|||
[package]
|
||||
name = "iced_video_player"
|
||||
description = "A convenient video player widget for Iced"
|
||||
homepage = "https://github.com/jazzfool/iced_video_player"
|
||||
repository = "https://github.com/jazzfool/iced_video_player"
|
||||
readme = "README.md"
|
||||
keywords = ["gui", "iced", "video"]
|
||||
categories = ["gui", "multimedia"]
|
||||
version = "0.6.0"
|
||||
authors = ["jazzfool"]
|
||||
edition = "2024"
|
||||
resolver = "2"
|
||||
license = "MIT OR Apache-2.0"
|
||||
exclude = [
|
||||
".media/test.mp4"
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
iced = { git = "https://github.com/iced-rs/iced.git", rev = "482d54118a733231cdceb4ab8eef2419fbec385e", features = ["smol", "wgpu", "image", "advanced"], default-features = false }
|
||||
iced_wgpu = { git = "https://github.com/iced-rs/iced.git", rev = "482d54118a733231cdceb4ab8eef2419fbec385e" }
|
||||
gstreamer = "0.23"
|
||||
gstreamer-app = "0.23" # appsink
|
||||
gstreamer-base = "0.23" # basesrc
|
||||
glib = "0.20" # gobject traits and error type
|
||||
log = "0.4"
|
||||
thiserror = "2.0.12"
|
||||
url = "2" # media uri
|
||||
|
||||
[package.metadata.nix]
|
||||
systems = ["x86_64-linux"]
|
||||
app = true
|
||||
build = true
|
||||
runtimeLibs = [
|
||||
"vulkan-loader",
|
||||
"wayland",
|
||||
"wayland-protocols",
|
||||
"libxkbcommon",
|
||||
"xorg.libX11",
|
||||
"xorg.libXrandr",
|
||||
"xorg.libXi", "gst_all_1.gstreamer", "gst_all_1.gstreamermm", "gst_all_1.gst-plugins-bad", "gst_all_1.gst-plugins-ugly", "gst_all_1.gst-plugins-good", "gst_all_1.gst-plugins-base",
|
||||
]
|
||||
buildInputs = ["libxkbcommon", "gst_all_1.gstreamer", "gst_all_1.gstreamermm", "gst_all_1.gst-plugins-bad", "gst_all_1.gst-plugins-ugly", "gst_all_1.gst-plugins-good", "gst_all_1.gst-plugins-base"]
|
||||
|
||||
[package.metadata.docs.rs]
|
||||
rustc-args = ["--cfg", "docsrs"]
|
||||
rustdoc-args = ["--cfg", "docsrs"]
|
||||
targets = ["wasm32-unknown-unknown"]
|
176
iced_video_player/LICENSE-APACHE
Normal file
176
iced_video_player/LICENSE-APACHE
Normal file
|
@ -0,0 +1,176 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
23
iced_video_player/LICENSE-MIT
Normal file
23
iced_video_player/LICENSE-MIT
Normal file
|
@ -0,0 +1,23 @@
|
|||
Permission is hereby granted, free of charge, to any
|
||||
person obtaining a copy of this software and associated
|
||||
documentation files (the "Software"), to deal in the
|
||||
Software without restriction, including without
|
||||
limitation the rights to use, copy, modify, merge,
|
||||
publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software
|
||||
is furnished to do so, subject to the following
|
||||
conditions:
|
||||
|
||||
The above copyright notice and this permission notice
|
||||
shall be included in all copies or substantial portions
|
||||
of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
|
||||
ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
|
||||
TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
|
||||
PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
|
||||
SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
|
||||
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
||||
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
|
||||
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
64
iced_video_player/README.md
Normal file
64
iced_video_player/README.md
Normal file
|
@ -0,0 +1,64 @@
|
|||
# Iced Video Player Widget
|
||||
|
||||
Composable component to play videos in any Iced application built on the excellent GStreamer library.
|
||||
|
||||
<img src=".media/screenshot.png" width="50%" />
|
||||
|
||||
## Overview
|
||||
|
||||
In general, this supports anything that [`gstreamer/playbin`](https://gstreamer.freedesktop.org/documentation/playback/playbin.html?gi-language=c) supports.
|
||||
|
||||
Features:
|
||||
- Load video files from any file path **or URL** (support for streaming over network).
|
||||
- Video buffering when streaming on a network.
|
||||
- Audio support.
|
||||
- Programmatic control.
|
||||
- Can capture thumbnails from a set of timestamps.
|
||||
- Good performance (i.e., comparable to other video players). GStreamer (with the right plugins) will perform hardware-accelerated decoding, and the color space (YUV to RGB) is converted on the GPU whilst rendering the frame.
|
||||
|
||||
Limitations (hopefully to be fixed):
|
||||
- GStreamer is a bit annoying to set up on Windows.
|
||||
|
||||
The player **does not** come with any surrounding GUI controls, but they should be quite easy to implement should you need them.
|
||||
See the "minimal" example for a demonstration on how you could implement pausing, looping, and seeking.
|
||||
|
||||
## Example Usage
|
||||
|
||||
```rust
|
||||
use iced_video_player::{Video, VideoPlayer};
|
||||
|
||||
fn main() -> iced::Result {
|
||||
iced::run("Video Player", (), App::view)
|
||||
}
|
||||
|
||||
struct App {
|
||||
video: Video,
|
||||
}
|
||||
|
||||
impl Default for App {
|
||||
fn default() -> Self {
|
||||
App {
|
||||
video: Video::new(&url::Url::parse("file:///C:/my_video.mp4").unwrap()).unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl App {
|
||||
fn view(&self) -> iced::Element<()> {
|
||||
VideoPlayer::new(&self.video).into()
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Building
|
||||
|
||||
Follow the [GStreamer build instructions](https://github.com/sdroege/gstreamer-rs#installation). This should be able to compile on MSVC, MinGW, Linux, and MacOS.
|
||||
|
||||
## License
|
||||
|
||||
Licensed under either
|
||||
|
||||
- [Apache 2.0](https://www.apache.org/licenses/LICENSE-2.0)
|
||||
- [MIT](http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
98
iced_video_player/flake.lock
Normal file
98
iced_video_player/flake.lock
Normal file
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"nodes": {
|
||||
"devshell": {
|
||||
"locked": {
|
||||
"lastModified": 1629275356,
|
||||
"narHash": "sha256-R17M69EKXP6q8/mNHaK53ECwjFo1pdF+XaJC9Qq8zjg=",
|
||||
"owner": "numtide",
|
||||
"repo": "devshell",
|
||||
"rev": "26f25a12265f030917358a9632cd600b51af1d97",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "devshell",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flakeCompat": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1627913399,
|
||||
"narHash": "sha256-hY8g6H2KFL8ownSiFeMOjwPC8P0ueXpCVEbxgda3pko=",
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"rev": "12c64ca55c1014cdc1b16ed5a804aa8576601ff2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "edolstra",
|
||||
"repo": "flake-compat",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixCargoIntegration": {
|
||||
"inputs": {
|
||||
"devshell": "devshell",
|
||||
"nixpkgs": [
|
||||
"nixpkgs"
|
||||
],
|
||||
"rustOverlay": "rustOverlay"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1629871751,
|
||||
"narHash": "sha256-QjnDg34ApcnjmXlNLnbHswT9OroCPY7Wip6r9Zkgkfo=",
|
||||
"owner": "yusdacra",
|
||||
"repo": "nix-cargo-integration",
|
||||
"rev": "4f164ecad242537d5893426eef02c47c9e5ced59",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "yusdacra",
|
||||
"repo": "nix-cargo-integration",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1629618782,
|
||||
"narHash": "sha256-2K8SSXu3alo/URI3MClGdDSns6Gb4ZaW4LET53UWyKk=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "870959c7fb3a42af1863bed9e1756086a74eb649",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "NixOS",
|
||||
"ref": "nixos-unstable",
|
||||
"repo": "nixpkgs",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"flakeCompat": "flakeCompat",
|
||||
"nixCargoIntegration": "nixCargoIntegration",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"rustOverlay": {
|
||||
"flake": false,
|
||||
"locked": {
|
||||
"lastModified": 1629857564,
|
||||
"narHash": "sha256-dClWiHkbaCDaIl520Miri66UOA8OecWbaVTWJBajHyM=",
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"rev": "88848c36934318e16c86097f65dbf97a57968d81",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "oxalica",
|
||||
"repo": "rust-overlay",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
"version": 7
|
||||
}
|
28
iced_video_player/flake.nix
Normal file
28
iced_video_player/flake.nix
Normal file
|
@ -0,0 +1,28 @@
|
|||
{
|
||||
inputs = {
|
||||
flakeCompat = {
|
||||
url = "github:edolstra/flake-compat";
|
||||
flake = false;
|
||||
};
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
|
||||
nixCargoIntegration = {
|
||||
url = "github:yusdacra/nix-cargo-integration";
|
||||
inputs.nixpkgs.follows = "nixpkgs";
|
||||
};
|
||||
};
|
||||
|
||||
outputs = inputs:
|
||||
inputs.nixCargoIntegration.lib.makeOutputs {
|
||||
root = ./.;
|
||||
overrides = {
|
||||
shell = common: prev: {
|
||||
env = prev.env ++ [
|
||||
{
|
||||
name = "GST_PLUGIN_PATH";
|
||||
value = "${common.pkgs.gst_all_1.gstreamer}:${common.pkgs.gst_all_1.gst-plugins-bad}:${common.pkgs.gst_all_1.gst-plugins-ugly}:${common.pkgs.gst_all_1.gst-plugins-good}:${common.pkgs.gst_all_1.gst-plugins-base}";
|
||||
}
|
||||
];
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
12
iced_video_player/shell.nix
Normal file
12
iced_video_player/shell.nix
Normal file
|
@ -0,0 +1,12 @@
|
|||
# Flake's devShell for non-flake-enabled nix instances
|
||||
(import
|
||||
(
|
||||
let lock = builtins.fromJSON (builtins.readFile ./flake.lock);
|
||||
in
|
||||
fetchTarball {
|
||||
url =
|
||||
"https://github.com/edolstra/flake-compat/archive/${lock.nodes.flakeCompat.locked.rev}.tar.gz";
|
||||
sha256 = lock.nodes.flakeCompat.locked.narHash;
|
||||
}
|
||||
)
|
||||
{ src = ./.; }).shellNix.default
|
76
iced_video_player/src/lib.rs
Normal file
76
iced_video_player/src/lib.rs
Normal file
|
@ -0,0 +1,76 @@
|
|||
//! # Iced Video Player
|
||||
//!
|
||||
//! A convenient video player widget for Iced.
|
||||
//!
|
||||
//! To get started, load a video from a URI (e.g., a file path prefixed with `file:///`) using [`Video::new`](crate::Video::new),
|
||||
//! then use it like any other Iced widget in your `view` function by creating a [`VideoPlayer`].
|
||||
//!
|
||||
//! Example:
|
||||
//! ```rust
|
||||
//! use iced_video_player::{Video, VideoPlayer};
|
||||
//!
|
||||
//! fn main() -> iced::Result {
|
||||
//! iced::run("Video Player", (), App::view)
|
||||
//! }
|
||||
//!
|
||||
//! struct App {
|
||||
//! video: Video,
|
||||
//! }
|
||||
//!
|
||||
//! impl Default for App {
|
||||
//! fn default() -> Self {
|
||||
//! App {
|
||||
//! video: Video::new(&url::Url::parse("file:///C:/my_video.mp4").unwrap()).unwrap(),
|
||||
//! }
|
||||
//! }
|
||||
//! }
|
||||
//!
|
||||
//! impl App {
|
||||
//! fn view(&self) -> iced::Element<()> {
|
||||
//! VideoPlayer::new(&self.video).into()
|
||||
//! }
|
||||
//! }
|
||||
//! ```
|
||||
//!
|
||||
//! You can programmatically control the video (e.g., seek, pause, loop, grab thumbnails) by accessing various methods on [`Video`].
|
||||
|
||||
mod pipeline;
|
||||
mod video;
|
||||
mod video_player;
|
||||
|
||||
use gstreamer as gst;
|
||||
use thiserror::Error;
|
||||
|
||||
pub use video::Position;
|
||||
pub use video::Video;
|
||||
pub use video_player::VideoPlayer;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
#[error("{0}")]
|
||||
Glib(#[from] glib::Error),
|
||||
#[error("{0}")]
|
||||
Bool(#[from] glib::BoolError),
|
||||
#[error("failed to get the gstreamer bus")]
|
||||
Bus,
|
||||
#[error("failed to get AppSink element with name='{0}' from gstreamer pipeline")]
|
||||
AppSink(String),
|
||||
#[error("{0}")]
|
||||
StateChange(#[from] gst::StateChangeError),
|
||||
#[error("failed to cast gstreamer element")]
|
||||
Cast,
|
||||
#[error("{0}")]
|
||||
Io(#[from] std::io::Error),
|
||||
#[error("invalid URI")]
|
||||
Uri,
|
||||
#[error("failed to get media capabilities")]
|
||||
Caps,
|
||||
#[error("failed to query media duration or position")]
|
||||
Duration,
|
||||
#[error("failed to sync with playback")]
|
||||
Sync,
|
||||
#[error("failed to lock internal sync primitive")]
|
||||
Lock,
|
||||
#[error("invalid framerate: {0}")]
|
||||
Framerate(f64),
|
||||
}
|
464
iced_video_player/src/pipeline.rs
Normal file
464
iced_video_player/src/pipeline.rs
Normal file
|
@ -0,0 +1,464 @@
|
|||
use crate::video::Frame;
|
||||
use iced::{wgpu::{self, PipelineCompilationOptions}, widget::shader::{Primitive, Storage, Viewport}};
|
||||
use std::{
|
||||
collections::{btree_map::Entry, BTreeMap},
|
||||
num::NonZero,
|
||||
sync::{
|
||||
atomic::{AtomicBool, AtomicUsize, Ordering},
|
||||
Arc, Mutex,
|
||||
},
|
||||
};
|
||||
|
||||
#[repr(C)]
|
||||
struct Uniforms {
|
||||
rect: [f32; 4],
|
||||
// because wgpu min_uniform_buffer_offset_alignment
|
||||
_pad: [u8; 240],
|
||||
}
|
||||
|
||||
struct VideoEntry {
|
||||
texture_y: wgpu::Texture,
|
||||
texture_uv: wgpu::Texture,
|
||||
instances: wgpu::Buffer,
|
||||
bg0: wgpu::BindGroup,
|
||||
alive: Arc<AtomicBool>,
|
||||
|
||||
prepare_index: AtomicUsize,
|
||||
render_index: AtomicUsize,
|
||||
}
|
||||
|
||||
struct VideoPipeline {
|
||||
pipeline: wgpu::RenderPipeline,
|
||||
bg0_layout: wgpu::BindGroupLayout,
|
||||
sampler: wgpu::Sampler,
|
||||
videos: BTreeMap<u64, VideoEntry>,
|
||||
}
|
||||
|
||||
impl VideoPipeline {
|
||||
fn new(device: &wgpu::Device, format: wgpu::TextureFormat) -> Self {
|
||||
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
|
||||
label: Some("iced_video_player shader"),
|
||||
source: wgpu::ShaderSource::Wgsl(include_str!("shader.wgsl").into()),
|
||||
});
|
||||
|
||||
let bg0_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
|
||||
label: Some("iced_video_player bind group 0 layout"),
|
||||
entries: &[
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 0,
|
||||
visibility: wgpu::ShaderStages::FRAGMENT,
|
||||
ty: wgpu::BindingType::Texture {
|
||||
sample_type: wgpu::TextureSampleType::Float { filterable: true },
|
||||
view_dimension: wgpu::TextureViewDimension::D2,
|
||||
multisampled: false,
|
||||
},
|
||||
count: None,
|
||||
},
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 1,
|
||||
visibility: wgpu::ShaderStages::FRAGMENT,
|
||||
ty: wgpu::BindingType::Texture {
|
||||
sample_type: wgpu::TextureSampleType::Float { filterable: true },
|
||||
view_dimension: wgpu::TextureViewDimension::D2,
|
||||
multisampled: false,
|
||||
},
|
||||
count: None,
|
||||
},
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 2,
|
||||
visibility: wgpu::ShaderStages::FRAGMENT,
|
||||
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
|
||||
count: None,
|
||||
},
|
||||
wgpu::BindGroupLayoutEntry {
|
||||
binding: 3,
|
||||
visibility: wgpu::ShaderStages::VERTEX,
|
||||
ty: wgpu::BindingType::Buffer {
|
||||
ty: wgpu::BufferBindingType::Uniform,
|
||||
has_dynamic_offset: true,
|
||||
min_binding_size: None,
|
||||
},
|
||||
count: None,
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
|
||||
label: Some("iced_video_player pipeline layout"),
|
||||
bind_group_layouts: &[&bg0_layout],
|
||||
push_constant_ranges: &[],
|
||||
});
|
||||
|
||||
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
|
||||
label: Some("iced_video_player pipeline"),
|
||||
layout: Some(&layout),
|
||||
vertex: wgpu::VertexState {
|
||||
module: &shader,
|
||||
entry_point: Some("vs_main"),
|
||||
buffers: &[],
|
||||
compilation_options: PipelineCompilationOptions::default()
|
||||
},
|
||||
primitive: wgpu::PrimitiveState::default(),
|
||||
depth_stencil: None,
|
||||
multisample: wgpu::MultisampleState {
|
||||
count: 1,
|
||||
mask: !0,
|
||||
alpha_to_coverage_enabled: false,
|
||||
},
|
||||
fragment: Some(wgpu::FragmentState {
|
||||
module: &shader,
|
||||
entry_point: Some("fs_main"),
|
||||
targets: &[Some(wgpu::ColorTargetState {
|
||||
format,
|
||||
blend: None,
|
||||
write_mask: wgpu::ColorWrites::ALL,
|
||||
})],
|
||||
compilation_options: PipelineCompilationOptions::default(),
|
||||
}),
|
||||
multiview: None,
|
||||
cache: None,
|
||||
});
|
||||
|
||||
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
|
||||
label: Some("iced_video_player sampler"),
|
||||
address_mode_u: wgpu::AddressMode::ClampToEdge,
|
||||
address_mode_v: wgpu::AddressMode::ClampToEdge,
|
||||
address_mode_w: wgpu::AddressMode::ClampToEdge,
|
||||
mag_filter: wgpu::FilterMode::Linear,
|
||||
min_filter: wgpu::FilterMode::Linear,
|
||||
mipmap_filter: wgpu::FilterMode::Nearest,
|
||||
lod_min_clamp: 0.0,
|
||||
lod_max_clamp: 1.0,
|
||||
compare: None,
|
||||
anisotropy_clamp: 1,
|
||||
border_color: None,
|
||||
});
|
||||
|
||||
VideoPipeline {
|
||||
pipeline,
|
||||
bg0_layout,
|
||||
sampler,
|
||||
videos: BTreeMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn upload(
|
||||
&mut self,
|
||||
device: &wgpu::Device,
|
||||
queue: &wgpu::Queue,
|
||||
video_id: u64,
|
||||
alive: &Arc<AtomicBool>,
|
||||
(width, height): (u32, u32),
|
||||
frame: &[u8],
|
||||
) {
|
||||
if let Entry::Vacant(entry) = self.videos.entry(video_id) {
|
||||
let texture_y = device.create_texture(&wgpu::TextureDescriptor {
|
||||
label: Some("iced_video_player texture"),
|
||||
size: wgpu::Extent3d {
|
||||
width,
|
||||
height,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: wgpu::TextureDimension::D2,
|
||||
format: wgpu::TextureFormat::R8Unorm,
|
||||
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
|
||||
view_formats: &[],
|
||||
});
|
||||
|
||||
let texture_uv = device.create_texture(&wgpu::TextureDescriptor {
|
||||
label: Some("iced_video_player texture"),
|
||||
size: wgpu::Extent3d {
|
||||
width: width / 2,
|
||||
height: height / 2,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
mip_level_count: 1,
|
||||
sample_count: 1,
|
||||
dimension: wgpu::TextureDimension::D2,
|
||||
format: wgpu::TextureFormat::Rg8Unorm,
|
||||
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
|
||||
view_formats: &[],
|
||||
});
|
||||
|
||||
let view_y = texture_y.create_view(&wgpu::TextureViewDescriptor {
|
||||
label: Some("iced_video_player texture view"),
|
||||
format: None,
|
||||
dimension: None,
|
||||
aspect: wgpu::TextureAspect::All,
|
||||
base_mip_level: 0,
|
||||
mip_level_count: None,
|
||||
base_array_layer: 0,
|
||||
array_layer_count: None,
|
||||
usage: None,
|
||||
});
|
||||
|
||||
let view_uv = texture_uv.create_view(&wgpu::TextureViewDescriptor {
|
||||
label: Some("iced_video_player texture view"),
|
||||
format: None,
|
||||
dimension: None,
|
||||
aspect: wgpu::TextureAspect::All,
|
||||
base_mip_level: 0,
|
||||
mip_level_count: None,
|
||||
base_array_layer: 0,
|
||||
array_layer_count: None,
|
||||
usage: None,
|
||||
});
|
||||
|
||||
let instances = device.create_buffer(&wgpu::BufferDescriptor {
|
||||
label: Some("iced_video_player uniform buffer"),
|
||||
size: 256 * std::mem::size_of::<Uniforms>() as u64, // max 256 video players per frame
|
||||
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::UNIFORM,
|
||||
mapped_at_creation: false,
|
||||
});
|
||||
|
||||
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
|
||||
label: Some("iced_video_player bind group"),
|
||||
layout: &self.bg0_layout,
|
||||
entries: &[
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 0,
|
||||
resource: wgpu::BindingResource::TextureView(&view_y),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 1,
|
||||
resource: wgpu::BindingResource::TextureView(&view_uv),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 2,
|
||||
resource: wgpu::BindingResource::Sampler(&self.sampler),
|
||||
},
|
||||
wgpu::BindGroupEntry {
|
||||
binding: 3,
|
||||
resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding {
|
||||
buffer: &instances,
|
||||
offset: 0,
|
||||
size: Some(NonZero::new(std::mem::size_of::<Uniforms>() as _).unwrap()),
|
||||
}),
|
||||
},
|
||||
],
|
||||
});
|
||||
|
||||
entry.insert(VideoEntry {
|
||||
texture_y,
|
||||
texture_uv,
|
||||
instances,
|
||||
bg0: bind_group,
|
||||
alive: Arc::clone(alive),
|
||||
|
||||
prepare_index: AtomicUsize::new(0),
|
||||
render_index: AtomicUsize::new(0),
|
||||
});
|
||||
}
|
||||
|
||||
let VideoEntry {
|
||||
texture_y,
|
||||
texture_uv,
|
||||
..
|
||||
} = self.videos.get(&video_id).unwrap();
|
||||
|
||||
queue.write_texture(
|
||||
wgpu::TexelCopyTextureInfo {
|
||||
texture: texture_y,
|
||||
mip_level: 0,
|
||||
origin: wgpu::Origin3d::ZERO,
|
||||
aspect: wgpu::TextureAspect::All,
|
||||
},
|
||||
&frame[..(width * height) as usize],
|
||||
wgpu::TexelCopyBufferLayout {
|
||||
offset: 0,
|
||||
bytes_per_row: Some(width),
|
||||
rows_per_image: Some(height),
|
||||
},
|
||||
wgpu::Extent3d {
|
||||
width,
|
||||
height,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
);
|
||||
|
||||
queue.write_texture(
|
||||
wgpu::TexelCopyTextureInfo {
|
||||
texture: texture_uv,
|
||||
mip_level: 0,
|
||||
origin: wgpu::Origin3d::ZERO,
|
||||
aspect: wgpu::TextureAspect::All,
|
||||
},
|
||||
&frame[(width * height) as usize..],
|
||||
wgpu::TexelCopyBufferLayout {
|
||||
offset: 0,
|
||||
bytes_per_row: Some(width),
|
||||
rows_per_image: Some(height / 2),
|
||||
},
|
||||
wgpu::Extent3d {
|
||||
width: width / 2,
|
||||
height: height / 2,
|
||||
depth_or_array_layers: 1,
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
fn cleanup(&mut self) {
|
||||
let ids: Vec<_> = self
|
||||
.videos
|
||||
.iter()
|
||||
.filter_map(|(id, entry)| (!entry.alive.load(Ordering::SeqCst)).then_some(*id))
|
||||
.collect();
|
||||
for id in ids {
|
||||
if let Some(video) = self.videos.remove(&id) {
|
||||
video.texture_y.destroy();
|
||||
video.texture_uv.destroy();
|
||||
video.instances.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prepare(&mut self, queue: &wgpu::Queue, video_id: u64, bounds: &iced::Rectangle) {
|
||||
if let Some(video) = self.videos.get_mut(&video_id) {
|
||||
let uniforms = Uniforms {
|
||||
rect: [
|
||||
bounds.x,
|
||||
bounds.y,
|
||||
bounds.x + bounds.width,
|
||||
bounds.y + bounds.height,
|
||||
],
|
||||
_pad: [0; 240],
|
||||
};
|
||||
queue.write_buffer(
|
||||
&video.instances,
|
||||
(video.prepare_index.load(Ordering::Relaxed) * std::mem::size_of::<Uniforms>())
|
||||
as u64,
|
||||
unsafe {
|
||||
std::slice::from_raw_parts(
|
||||
&uniforms as *const _ as *const u8,
|
||||
std::mem::size_of::<Uniforms>(),
|
||||
)
|
||||
},
|
||||
);
|
||||
video.prepare_index.fetch_add(1, Ordering::Relaxed);
|
||||
video.render_index.store(0, Ordering::Relaxed);
|
||||
}
|
||||
|
||||
self.cleanup();
|
||||
}
|
||||
|
||||
fn draw(
|
||||
&self,
|
||||
target: &wgpu::TextureView,
|
||||
encoder: &mut wgpu::CommandEncoder,
|
||||
clip: &iced::Rectangle<u32>,
|
||||
video_id: u64,
|
||||
) {
|
||||
if let Some(video) = self.videos.get(&video_id) {
|
||||
let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
|
||||
label: Some("iced_video_player render pass"),
|
||||
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
|
||||
view: target,
|
||||
resolve_target: None,
|
||||
ops: wgpu::Operations {
|
||||
load: wgpu::LoadOp::Load,
|
||||
store: wgpu::StoreOp::Store,
|
||||
},
|
||||
})],
|
||||
depth_stencil_attachment: None,
|
||||
timestamp_writes: None,
|
||||
occlusion_query_set: None,
|
||||
});
|
||||
|
||||
pass.set_pipeline(&self.pipeline);
|
||||
pass.set_bind_group(
|
||||
0,
|
||||
&video.bg0,
|
||||
&[
|
||||
(video.render_index.load(Ordering::Relaxed) * std::mem::size_of::<Uniforms>())
|
||||
as u32,
|
||||
],
|
||||
);
|
||||
pass.set_scissor_rect(clip.x as _, clip.y as _, clip.width as _, clip.height as _);
|
||||
pass.draw(0..6, 0..1);
|
||||
|
||||
video.prepare_index.store(0, Ordering::Relaxed);
|
||||
video.render_index.fetch_add(1, Ordering::Relaxed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct VideoPrimitive {
|
||||
video_id: u64,
|
||||
alive: Arc<AtomicBool>,
|
||||
frame: Arc<Mutex<Frame>>,
|
||||
size: (u32, u32),
|
||||
upload_frame: bool,
|
||||
}
|
||||
|
||||
impl VideoPrimitive {
|
||||
pub fn new(
|
||||
video_id: u64,
|
||||
alive: Arc<AtomicBool>,
|
||||
frame: Arc<Mutex<Frame>>,
|
||||
size: (u32, u32),
|
||||
upload_frame: bool,
|
||||
) -> Self {
|
||||
VideoPrimitive {
|
||||
video_id,
|
||||
alive,
|
||||
frame,
|
||||
size,
|
||||
upload_frame,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Primitive for VideoPrimitive {
|
||||
fn prepare(
|
||||
&self,
|
||||
device: &wgpu::Device,
|
||||
queue: &wgpu::Queue,
|
||||
format: wgpu::TextureFormat,
|
||||
storage: &mut Storage,
|
||||
bounds: &iced::Rectangle,
|
||||
viewport: &Viewport,
|
||||
) {
|
||||
if !storage.has::<VideoPipeline>() {
|
||||
storage.store(VideoPipeline::new(device, format));
|
||||
}
|
||||
|
||||
let pipeline = storage.get_mut::<VideoPipeline>().unwrap();
|
||||
|
||||
if self.upload_frame {
|
||||
if let Some(readable) = self.frame.lock().expect("lock frame mutex").readable() {
|
||||
pipeline.upload(
|
||||
device,
|
||||
queue,
|
||||
self.video_id,
|
||||
&self.alive,
|
||||
self.size,
|
||||
readable.as_slice(),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pipeline.prepare(
|
||||
queue,
|
||||
self.video_id,
|
||||
&(*bounds
|
||||
* iced::Transformation::orthographic(
|
||||
viewport.logical_size().width as _,
|
||||
viewport.logical_size().height as _,
|
||||
)),
|
||||
);
|
||||
}
|
||||
|
||||
fn render(
|
||||
&self,
|
||||
encoder: &mut wgpu::CommandEncoder,
|
||||
storage: &Storage,
|
||||
target: &wgpu::TextureView,
|
||||
clip_bounds: &iced::Rectangle<u32>,
|
||||
) {
|
||||
let pipeline = storage.get::<VideoPipeline>().unwrap();
|
||||
pipeline.draw(target, encoder, clip_bounds, self.video_id);
|
||||
}
|
||||
}
|
61
iced_video_player/src/shader.wgsl
Normal file
61
iced_video_player/src/shader.wgsl
Normal file
|
@ -0,0 +1,61 @@
|
|||
struct VertexOutput {
|
||||
@builtin(position) position: vec4<f32>,
|
||||
@location(0) uv: vec2<f32>,
|
||||
}
|
||||
|
||||
struct Uniforms {
|
||||
rect: vec4<f32>,
|
||||
}
|
||||
|
||||
@group(0) @binding(0)
|
||||
var tex_y: texture_2d<f32>;
|
||||
|
||||
@group(0) @binding(1)
|
||||
var tex_uv: texture_2d<f32>;
|
||||
|
||||
@group(0) @binding(2)
|
||||
var s: sampler;
|
||||
|
||||
@group(0) @binding(3)
|
||||
var<uniform> uniforms: Uniforms;
|
||||
|
||||
@vertex
|
||||
fn vs_main(@builtin(vertex_index) in_vertex_index: u32) -> VertexOutput {
|
||||
var quad = array<vec4<f32>, 6>(
|
||||
vec4<f32>(uniforms.rect.xy, 0.0, 0.0),
|
||||
vec4<f32>(uniforms.rect.zy, 1.0, 0.0),
|
||||
vec4<f32>(uniforms.rect.xw, 0.0, 1.0),
|
||||
vec4<f32>(uniforms.rect.zy, 1.0, 0.0),
|
||||
vec4<f32>(uniforms.rect.zw, 1.0, 1.0),
|
||||
vec4<f32>(uniforms.rect.xw, 0.0, 1.0),
|
||||
);
|
||||
|
||||
var out: VertexOutput;
|
||||
out.uv = quad[in_vertex_index].zw;
|
||||
out.position = vec4<f32>(quad[in_vertex_index].xy, 1.0, 1.0);
|
||||
return out;
|
||||
}
|
||||
|
||||
@fragment
|
||||
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
|
||||
let yuv2r = vec3<f32>(1.164, 0.0, 1.596);
|
||||
let yuv2g = vec3<f32>(1.164, -0.391, -0.813);
|
||||
let yuv2b = vec3<f32>(1.164, 2.018, 0.0);
|
||||
|
||||
var yuv = vec3<f32>(0.0);
|
||||
yuv.x = textureSample(tex_y, s, in.uv).r - 0.0625;
|
||||
yuv.y = textureSample(tex_uv, s, in.uv).r - 0.5;
|
||||
yuv.z = textureSample(tex_uv, s, in.uv).g - 0.5;
|
||||
|
||||
var rgb = vec3<f32>(0.0);
|
||||
rgb.x = dot(yuv, yuv2r);
|
||||
rgb.y = dot(yuv, yuv2g);
|
||||
rgb.z = dot(yuv, yuv2b);
|
||||
|
||||
let threshold = rgb <= vec3<f32>(0.04045);
|
||||
let hi = pow((rgb + vec3<f32>(0.055)) / vec3<f32>(1.055), vec3<f32>(2.4));
|
||||
let lo = rgb * vec3<f32>(1.0 / 12.92);
|
||||
rgb = select(hi, lo, threshold);
|
||||
|
||||
return vec4<f32>(rgb, 1.0);
|
||||
}
|
655
iced_video_player/src/video.rs
Normal file
655
iced_video_player/src/video.rs
Normal file
|
@ -0,0 +1,655 @@
|
|||
use crate::Error;
|
||||
use gstreamer as gst;
|
||||
use gstreamer_app as gst_app;
|
||||
use gstreamer_app::prelude::*;
|
||||
use iced::widget::image as img;
|
||||
use std::num::NonZeroU8;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
|
||||
use std::sync::{Arc, Mutex, RwLock};
|
||||
use std::time::{Duration, Instant};
|
||||
|
||||
/// Position in the media.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||
pub enum Position {
|
||||
/// Position based on time.
|
||||
///
|
||||
/// Not the most accurate format for videos.
|
||||
Time(Duration),
|
||||
/// Position based on nth frame.
|
||||
Frame(u64),
|
||||
}
|
||||
|
||||
impl From<Position> for gst::GenericFormattedValue {
|
||||
fn from(pos: Position) -> Self {
|
||||
match pos {
|
||||
Position::Time(t) => gst::ClockTime::from_nseconds(t.as_nanos() as _).into(),
|
||||
Position::Frame(f) => gst::format::Default::from_u64(f).into(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Duration> for Position {
|
||||
fn from(t: Duration) -> Self {
|
||||
Position::Time(t)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<u64> for Position {
|
||||
fn from(f: u64) -> Self {
|
||||
Position::Frame(f)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Frame(gst::Sample);
|
||||
|
||||
impl Frame {
|
||||
pub fn empty() -> Self {
|
||||
Self(gst::Sample::builder().build())
|
||||
}
|
||||
|
||||
pub fn readable(&self) -> Option<gst::BufferMap<gst::buffer::Readable>> {
|
||||
self.0.buffer().and_then(|x| x.map_readable().ok())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct Internal {
|
||||
pub(crate) id: u64,
|
||||
|
||||
pub(crate) bus: gst::Bus,
|
||||
pub(crate) source: gst::Pipeline,
|
||||
pub(crate) alive: Arc<AtomicBool>,
|
||||
pub(crate) worker: Option<std::thread::JoinHandle<()>>,
|
||||
|
||||
pub(crate) width: i32,
|
||||
pub(crate) height: i32,
|
||||
pub(crate) framerate: f64,
|
||||
pub(crate) duration: Duration,
|
||||
pub(crate) speed: f64,
|
||||
pub(crate) sync_av: bool,
|
||||
|
||||
pub(crate) frame: Arc<Mutex<Frame>>,
|
||||
pub(crate) upload_frame: Arc<AtomicBool>,
|
||||
pub(crate) last_frame_time: Arc<Mutex<Instant>>,
|
||||
pub(crate) looping: bool,
|
||||
pub(crate) is_eos: bool,
|
||||
pub(crate) restart_stream: bool,
|
||||
pub(crate) sync_av_avg: u64,
|
||||
pub(crate) sync_av_counter: u64,
|
||||
|
||||
pub(crate) subtitle_text: Arc<Mutex<Option<String>>>,
|
||||
pub(crate) upload_text: Arc<AtomicBool>,
|
||||
}
|
||||
|
||||
impl Internal {
|
||||
pub(crate) fn seek(&self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
|
||||
let position = position.into();
|
||||
|
||||
// gstreamer complains if the start & end value types aren't the same
|
||||
match &position {
|
||||
Position::Time(_) => self.source.seek(
|
||||
self.speed,
|
||||
gst::SeekFlags::FLUSH
|
||||
| if accurate {
|
||||
gst::SeekFlags::ACCURATE
|
||||
} else {
|
||||
gst::SeekFlags::empty()
|
||||
},
|
||||
gst::SeekType::Set,
|
||||
gst::GenericFormattedValue::from(position),
|
||||
gst::SeekType::Set,
|
||||
gst::ClockTime::NONE,
|
||||
)?,
|
||||
Position::Frame(_) => self.source.seek(
|
||||
self.speed,
|
||||
gst::SeekFlags::FLUSH
|
||||
| if accurate {
|
||||
gst::SeekFlags::ACCURATE
|
||||
} else {
|
||||
gst::SeekFlags::empty()
|
||||
},
|
||||
gst::SeekType::Set,
|
||||
gst::GenericFormattedValue::from(position),
|
||||
gst::SeekType::Set,
|
||||
gst::format::Default::NONE,
|
||||
)?,
|
||||
};
|
||||
|
||||
*self.subtitle_text.lock().expect("lock subtitle_text") = None;
|
||||
self.upload_text.store(true, Ordering::SeqCst);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
|
||||
let Some(position) = self.source.query_position::<gst::ClockTime>() else {
|
||||
return Err(Error::Caps);
|
||||
};
|
||||
if speed > 0.0 {
|
||||
self.source.seek(
|
||||
speed,
|
||||
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
|
||||
gst::SeekType::Set,
|
||||
position,
|
||||
gst::SeekType::End,
|
||||
gst::ClockTime::from_seconds(0),
|
||||
)?;
|
||||
} else {
|
||||
self.source.seek(
|
||||
speed,
|
||||
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
|
||||
gst::SeekType::Set,
|
||||
gst::ClockTime::from_seconds(0),
|
||||
gst::SeekType::Set,
|
||||
position,
|
||||
)?;
|
||||
}
|
||||
self.speed = speed;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn restart_stream(&mut self) -> Result<(), Error> {
|
||||
self.is_eos = false;
|
||||
self.set_paused(false);
|
||||
self.seek(0, false)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn set_paused(&mut self, paused: bool) {
|
||||
self.source
|
||||
.set_state(if paused {
|
||||
gst::State::Paused
|
||||
} else {
|
||||
gst::State::Playing
|
||||
})
|
||||
.unwrap(/* state was changed in ctor; state errors caught there */);
|
||||
|
||||
// Set restart_stream flag to make the stream restart on the next Message::NextFrame
|
||||
if self.is_eos && !paused {
|
||||
self.restart_stream = true;
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn paused(&self) -> bool {
|
||||
self.source.state(gst::ClockTime::ZERO).1 == gst::State::Paused
|
||||
}
|
||||
|
||||
/// Syncs audio with video when there is (inevitably) latency presenting the frame.
|
||||
pub(crate) fn set_av_offset(&mut self, offset: Duration) {
|
||||
if self.sync_av {
|
||||
self.sync_av_counter += 1;
|
||||
self.sync_av_avg = self.sync_av_avg * (self.sync_av_counter - 1) / self.sync_av_counter
|
||||
+ offset.as_nanos() as u64 / self.sync_av_counter;
|
||||
if self.sync_av_counter % 128 == 0 {
|
||||
self.source
|
||||
.set_property("av-offset", -(self.sync_av_avg as i64));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream).
|
||||
#[derive(Debug)]
|
||||
pub struct Video(pub(crate) RwLock<Internal>);
|
||||
|
||||
impl Drop for Video {
|
||||
fn drop(&mut self) {
|
||||
let inner = self.0.get_mut().expect("failed to lock");
|
||||
|
||||
inner
|
||||
.source
|
||||
.set_state(gst::State::Null)
|
||||
.expect("failed to set state");
|
||||
|
||||
inner.alive.store(false, Ordering::SeqCst);
|
||||
if let Some(worker) = inner.worker.take() {
|
||||
if let Err(err) = worker.join() {
|
||||
match err.downcast_ref::<String>() {
|
||||
Some(e) => log::error!("Video thread panicked: {e}"),
|
||||
None => log::error!("Video thread panicked with unknown reason"),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Video {
|
||||
/// Create a new video player from a given video which loads from `uri`.
|
||||
/// Note that live sources will report the duration to be zero.
|
||||
pub fn new(uri: url::Url) -> Result<Self, Error> {
|
||||
gst::init()?;
|
||||
|
||||
let pipeline = format!("playbin uri=\"{}\" text-sink=\"appsink name=iced_text sync=true drop=true\" video-sink=\"videoscale ! videoconvert ! appsink name=iced_video drop=true caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1\"", &uri.as_str());
|
||||
let pipeline = gst::parse::launch(pipeline.as_ref())?
|
||||
.downcast::<gst::Pipeline>()
|
||||
.map_err(|_| Error::Cast)?;
|
||||
|
||||
let video_sink: gst::Element = pipeline.property("video-sink");
|
||||
let pad = video_sink.pads().first().cloned().unwrap();
|
||||
let pad = pad.dynamic_cast::<gst::GhostPad>().unwrap();
|
||||
let bin = pad
|
||||
.parent_element()
|
||||
.unwrap()
|
||||
.downcast::<gst::Bin>()
|
||||
.unwrap();
|
||||
let video_sink = bin.by_name("iced_video").unwrap();
|
||||
let video_sink = video_sink.downcast::<gst_app::AppSink>().unwrap();
|
||||
|
||||
let text_sink: gst::Element = pipeline.property("text-sink");
|
||||
let text_sink = text_sink.downcast::<gst_app::AppSink>().unwrap();
|
||||
|
||||
Self::from_gst_pipeline(pipeline, video_sink, Some(text_sink))
|
||||
}
|
||||
|
||||
/// Creates a new video based on an existing GStreamer pipeline and appsink.
|
||||
/// Expects an `appsink` plugin with `caps=video/x-raw,format=NV12`.
|
||||
///
|
||||
/// An optional `text_sink` can be provided, which enables subtitle messages
|
||||
/// to be emitted.
|
||||
///
|
||||
/// **Note:** Many functions of [`Video`] assume a `playbin` pipeline.
|
||||
/// Non-`playbin` pipelines given here may not have full functionality.
|
||||
pub fn from_gst_pipeline(
|
||||
pipeline: gst::Pipeline,
|
||||
video_sink: gst_app::AppSink,
|
||||
text_sink: Option<gst_app::AppSink>,
|
||||
) -> Result<Self, Error> {
|
||||
gst::init()?;
|
||||
static NEXT_ID: AtomicU64 = AtomicU64::new(0);
|
||||
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
|
||||
|
||||
// We need to ensure we stop the pipeline if we hit an error,
|
||||
// or else there may be audio left playing in the background.
|
||||
macro_rules! cleanup {
|
||||
($expr:expr) => {
|
||||
$expr.map_err(|e| {
|
||||
let _ = pipeline.set_state(gst::State::Null);
|
||||
e
|
||||
})
|
||||
};
|
||||
}
|
||||
|
||||
let pad = video_sink.pads().first().cloned().unwrap();
|
||||
|
||||
cleanup!(pipeline.set_state(gst::State::Playing))?;
|
||||
|
||||
// wait for up to 5 seconds until the decoder gets the source capabilities
|
||||
cleanup!(pipeline.state(gst::ClockTime::from_seconds(5)).0)?;
|
||||
|
||||
// extract resolution and framerate
|
||||
// TODO(jazzfool): maybe we want to extract some other information too?
|
||||
let caps = cleanup!(pad.current_caps().ok_or(Error::Caps))?;
|
||||
let s = cleanup!(caps.structure(0).ok_or(Error::Caps))?;
|
||||
let width = cleanup!(s.get::<i32>("width").map_err(|_| Error::Caps))?;
|
||||
let height = cleanup!(s.get::<i32>("height").map_err(|_| Error::Caps))?;
|
||||
// resolution should be mod4
|
||||
let width = ((width + 4 - 1) / 4) * 4;
|
||||
let framerate = cleanup!(s.get::<gst::Fraction>("framerate").map_err(|_| Error::Caps))?;
|
||||
let framerate = framerate.numer() as f64 / framerate.denom() as f64;
|
||||
|
||||
if framerate.is_nan()
|
||||
|| framerate.is_infinite()
|
||||
|| framerate < 0.0
|
||||
|| framerate.abs() < f64::EPSILON
|
||||
{
|
||||
let _ = pipeline.set_state(gst::State::Null);
|
||||
return Err(Error::Framerate(framerate));
|
||||
}
|
||||
|
||||
let duration = Duration::from_nanos(
|
||||
pipeline
|
||||
.query_duration::<gst::ClockTime>()
|
||||
.map(|duration| duration.nseconds())
|
||||
.unwrap_or(0),
|
||||
);
|
||||
|
||||
let sync_av = pipeline.has_property("av-offset", None);
|
||||
|
||||
// NV12 = 12bpp
|
||||
let frame = Arc::new(Mutex::new(Frame::empty()));
|
||||
let upload_frame = Arc::new(AtomicBool::new(false));
|
||||
let alive = Arc::new(AtomicBool::new(true));
|
||||
let last_frame_time = Arc::new(Mutex::new(Instant::now()));
|
||||
|
||||
let frame_ref = Arc::clone(&frame);
|
||||
let upload_frame_ref = Arc::clone(&upload_frame);
|
||||
let alive_ref = Arc::clone(&alive);
|
||||
let last_frame_time_ref = Arc::clone(&last_frame_time);
|
||||
|
||||
let subtitle_text = Arc::new(Mutex::new(None));
|
||||
let upload_text = Arc::new(AtomicBool::new(false));
|
||||
let subtitle_text_ref = Arc::clone(&subtitle_text);
|
||||
let upload_text_ref = Arc::clone(&upload_text);
|
||||
|
||||
let pipeline_ref = pipeline.clone();
|
||||
|
||||
let worker = std::thread::spawn(move || {
|
||||
let mut clear_subtitles_at = None;
|
||||
|
||||
while alive_ref.load(Ordering::Acquire) {
|
||||
if let Err(gst::FlowError::Error) = (|| -> Result<(), gst::FlowError> {
|
||||
let sample =
|
||||
if pipeline_ref.state(gst::ClockTime::ZERO).1 != gst::State::Playing {
|
||||
video_sink
|
||||
.try_pull_preroll(gst::ClockTime::from_mseconds(16))
|
||||
.ok_or(gst::FlowError::Eos)?
|
||||
} else {
|
||||
video_sink
|
||||
.try_pull_sample(gst::ClockTime::from_mseconds(16))
|
||||
.ok_or(gst::FlowError::Eos)?
|
||||
};
|
||||
|
||||
*last_frame_time_ref
|
||||
.lock()
|
||||
.map_err(|_| gst::FlowError::Error)? = Instant::now();
|
||||
|
||||
let frame_segment = sample.segment().cloned().ok_or(gst::FlowError::Error)?;
|
||||
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
|
||||
let frame_pts = buffer.pts().ok_or(gst::FlowError::Error)?;
|
||||
let frame_duration = buffer.duration().ok_or(gst::FlowError::Error)?;
|
||||
{
|
||||
let mut frame_guard =
|
||||
frame_ref.lock().map_err(|_| gst::FlowError::Error)?;
|
||||
*frame_guard = Frame(sample);
|
||||
}
|
||||
|
||||
upload_frame_ref.swap(true, Ordering::SeqCst);
|
||||
|
||||
if let Some(at) = clear_subtitles_at {
|
||||
if frame_pts >= at {
|
||||
*subtitle_text_ref
|
||||
.lock()
|
||||
.map_err(|_| gst::FlowError::Error)? = None;
|
||||
upload_text_ref.store(true, Ordering::SeqCst);
|
||||
clear_subtitles_at = None;
|
||||
}
|
||||
}
|
||||
|
||||
let text = text_sink
|
||||
.as_ref()
|
||||
.and_then(|sink| sink.try_pull_sample(gst::ClockTime::from_seconds(0)));
|
||||
if let Some(text) = text {
|
||||
let text_segment = text.segment().ok_or(gst::FlowError::Error)?;
|
||||
let text = text.buffer().ok_or(gst::FlowError::Error)?;
|
||||
let text_pts = text.pts().ok_or(gst::FlowError::Error)?;
|
||||
let text_duration = text.duration().ok_or(gst::FlowError::Error)?;
|
||||
|
||||
let frame_running_time = frame_segment.to_running_time(frame_pts).value();
|
||||
let frame_running_time_end = frame_segment
|
||||
.to_running_time(frame_pts + frame_duration)
|
||||
.value();
|
||||
|
||||
let text_running_time = text_segment.to_running_time(text_pts).value();
|
||||
let text_running_time_end = text_segment
|
||||
.to_running_time(text_pts + text_duration)
|
||||
.value();
|
||||
|
||||
// see gst-plugins-base/ext/pango/gstbasetextoverlay.c (gst_base_text_overlay_video_chain)
|
||||
// as an example of how to correctly synchronize the text+video segments
|
||||
if text_running_time_end > frame_running_time
|
||||
&& frame_running_time_end > text_running_time
|
||||
{
|
||||
let duration = text.duration().unwrap_or(gst::ClockTime::ZERO);
|
||||
let map = text.map_readable().map_err(|_| gst::FlowError::Error)?;
|
||||
|
||||
let text = std::str::from_utf8(map.as_slice())
|
||||
.map_err(|_| gst::FlowError::Error)?
|
||||
.to_string();
|
||||
*subtitle_text_ref
|
||||
.lock()
|
||||
.map_err(|_| gst::FlowError::Error)? = Some(text);
|
||||
upload_text_ref.store(true, Ordering::SeqCst);
|
||||
|
||||
clear_subtitles_at = Some(text_pts + duration);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
})() {
|
||||
log::error!("error pulling frame");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Ok(Video(RwLock::new(Internal {
|
||||
id,
|
||||
|
||||
bus: pipeline.bus().unwrap(),
|
||||
source: pipeline,
|
||||
alive,
|
||||
worker: Some(worker),
|
||||
|
||||
width,
|
||||
height,
|
||||
framerate,
|
||||
duration,
|
||||
speed: 1.0,
|
||||
sync_av,
|
||||
|
||||
frame,
|
||||
upload_frame,
|
||||
last_frame_time,
|
||||
looping: false,
|
||||
is_eos: false,
|
||||
restart_stream: false,
|
||||
sync_av_avg: 0,
|
||||
sync_av_counter: 0,
|
||||
|
||||
subtitle_text,
|
||||
upload_text,
|
||||
})))
|
||||
}
|
||||
|
||||
pub(crate) fn read(&self) -> impl Deref<Target = Internal> + '_ {
|
||||
self.0.read().expect("lock")
|
||||
}
|
||||
|
||||
pub(crate) fn write(&self) -> impl DerefMut<Target = Internal> + '_ {
|
||||
self.0.write().expect("lock")
|
||||
}
|
||||
|
||||
pub(crate) fn get_mut(&mut self) -> impl DerefMut<Target = Internal> + '_ {
|
||||
self.0.get_mut().expect("lock")
|
||||
}
|
||||
|
||||
/// Get the size/resolution of the video as `(width, height)`.
|
||||
pub fn size(&self) -> (i32, i32) {
|
||||
(self.read().width, self.read().height)
|
||||
}
|
||||
|
||||
/// Get the framerate of the video as frames per second.
|
||||
pub fn framerate(&self) -> f64 {
|
||||
self.read().framerate
|
||||
}
|
||||
|
||||
/// Set the volume multiplier of the audio.
|
||||
/// `0.0` = 0% volume, `1.0` = 100% volume.
|
||||
///
|
||||
/// This uses a linear scale, for example `0.5` is perceived as half as loud.
|
||||
pub fn set_volume(&mut self, volume: f64) {
|
||||
self.get_mut().source.set_property("volume", volume);
|
||||
self.set_muted(self.muted()); // for some reason gstreamer unmutes when changing volume?
|
||||
}
|
||||
|
||||
/// Get the volume multiplier of the audio.
|
||||
pub fn volume(&self) -> f64 {
|
||||
self.read().source.property("volume")
|
||||
}
|
||||
|
||||
/// Set if the audio is muted or not, without changing the volume.
|
||||
pub fn set_muted(&mut self, muted: bool) {
|
||||
self.get_mut().source.set_property("mute", muted);
|
||||
}
|
||||
|
||||
/// Get if the audio is muted or not.
|
||||
pub fn muted(&self) -> bool {
|
||||
self.read().source.property("mute")
|
||||
}
|
||||
|
||||
/// Get if the stream ended or not.
|
||||
pub fn eos(&self) -> bool {
|
||||
self.read().is_eos
|
||||
}
|
||||
|
||||
/// Get if the media will loop or not.
|
||||
pub fn looping(&self) -> bool {
|
||||
self.read().looping
|
||||
}
|
||||
|
||||
/// Set if the media will loop or not.
|
||||
pub fn set_looping(&mut self, looping: bool) {
|
||||
self.get_mut().looping = looping;
|
||||
}
|
||||
|
||||
/// Set if the media is paused or not.
|
||||
pub fn set_paused(&mut self, paused: bool) {
|
||||
self.get_mut().set_paused(paused)
|
||||
}
|
||||
|
||||
/// Get if the media is paused or not.
|
||||
pub fn paused(&self) -> bool {
|
||||
self.read().paused()
|
||||
}
|
||||
|
||||
/// Jumps to a specific position in the media.
|
||||
/// Passing `true` to the `accurate` parameter will result in more accurate seeking,
|
||||
/// however, it is also slower. For most seeks (e.g., scrubbing) this is not needed.
|
||||
pub fn seek(&mut self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
|
||||
self.get_mut().seek(position, accurate)
|
||||
}
|
||||
|
||||
/// Set the playback speed of the media.
|
||||
/// The default speed is `1.0`.
|
||||
pub fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
|
||||
self.get_mut().set_speed(speed)
|
||||
}
|
||||
|
||||
/// Get the current playback speed.
|
||||
pub fn speed(&self) -> f64 {
|
||||
self.read().speed
|
||||
}
|
||||
|
||||
/// Get the current playback position in time.
|
||||
pub fn position(&self) -> Duration {
|
||||
Duration::from_nanos(
|
||||
self.read()
|
||||
.source
|
||||
.query_position::<gst::ClockTime>()
|
||||
.map_or(0, |pos| pos.nseconds()),
|
||||
)
|
||||
}
|
||||
|
||||
/// Get the media duration.
|
||||
pub fn duration(&self) -> Duration {
|
||||
self.read().duration
|
||||
}
|
||||
|
||||
/// Restarts a stream; seeks to the first frame and unpauses, sets the `eos` flag to false.
|
||||
pub fn restart_stream(&mut self) -> Result<(), Error> {
|
||||
self.get_mut().restart_stream()
|
||||
}
|
||||
|
||||
/// Set the subtitle URL to display.
|
||||
pub fn set_subtitle_url(&mut self, url: &url::Url) -> Result<(), Error> {
|
||||
let paused = self.paused();
|
||||
let mut inner = self.get_mut();
|
||||
inner.source.set_state(gst::State::Ready)?;
|
||||
inner.source.set_property("suburi", url.as_str());
|
||||
inner.set_paused(paused);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get the current subtitle URL.
|
||||
pub fn subtitle_url(&self) -> Option<url::Url> {
|
||||
url::Url::parse(&self.read().source.property::<String>("suburi")).ok()
|
||||
}
|
||||
|
||||
/// Get the underlying GStreamer pipeline.
|
||||
pub fn pipeline(&self) -> gst::Pipeline {
|
||||
self.read().source.clone()
|
||||
}
|
||||
|
||||
/// Generates a list of thumbnails based on a set of positions in the media, downscaled by a given factor.
|
||||
///
|
||||
/// Slow; only needs to be called once for each instance.
|
||||
/// It's best to call this at the very start of playback, otherwise the position may shift.
|
||||
pub fn thumbnails<I>(
|
||||
&mut self,
|
||||
positions: I,
|
||||
downscale: NonZeroU8,
|
||||
) -> Result<Vec<img::Handle>, Error>
|
||||
where
|
||||
I: IntoIterator<Item = Position>,
|
||||
{
|
||||
let downscale = u8::from(downscale) as u32;
|
||||
|
||||
let paused = self.paused();
|
||||
let muted = self.muted();
|
||||
let pos = self.position();
|
||||
|
||||
self.set_paused(false);
|
||||
self.set_muted(true);
|
||||
|
||||
let out = {
|
||||
let inner = self.read();
|
||||
let width = inner.width;
|
||||
let height = inner.height;
|
||||
positions
|
||||
.into_iter()
|
||||
.map(|pos| {
|
||||
inner.seek(pos, true)?;
|
||||
inner.upload_frame.store(false, Ordering::SeqCst);
|
||||
while !inner.upload_frame.load(Ordering::SeqCst) {
|
||||
std::hint::spin_loop();
|
||||
}
|
||||
let frame_guard = inner.frame.lock().map_err(|_| Error::Lock)?;
|
||||
let frame = frame_guard.readable().ok_or(Error::Lock)?;
|
||||
|
||||
Ok(img::Handle::from_rgba(
|
||||
inner.width as u32 / downscale,
|
||||
inner.height as u32 / downscale,
|
||||
yuv_to_rgba(frame.as_slice(), width as _, height as _, downscale),
|
||||
))
|
||||
})
|
||||
.collect()
|
||||
};
|
||||
|
||||
self.set_paused(paused);
|
||||
self.set_muted(muted);
|
||||
self.seek(pos, true)?;
|
||||
|
||||
out
|
||||
}
|
||||
}
|
||||
|
||||
fn yuv_to_rgba(yuv: &[u8], width: u32, height: u32, downscale: u32) -> Vec<u8> {
|
||||
let uv_start = width * height;
|
||||
let mut rgba = vec![];
|
||||
|
||||
for y in 0..height / downscale {
|
||||
for x in 0..width / downscale {
|
||||
let x_src = x * downscale;
|
||||
let y_src = y * downscale;
|
||||
|
||||
let uv_i = uv_start + width * (y_src / 2) + x_src / 2 * 2;
|
||||
|
||||
let brightness_adjustment = 10.0;
|
||||
let y = yuv[(y_src * width + x_src) as usize] as f32 * brightness_adjustment;
|
||||
let u = yuv[uv_i as usize] as f32;
|
||||
let v = yuv[(uv_i + 1) as usize] as f32;
|
||||
|
||||
let r = y + 1.5748 * v;
|
||||
let g = y - 0.1873 * u - 0.4681 * v;
|
||||
let b = y + 1.8556 * u;
|
||||
|
||||
rgba.push(r as u8);
|
||||
rgba.push(g as u8);
|
||||
rgba.push(b as u8);
|
||||
rgba.push(0xFF);
|
||||
}
|
||||
}
|
||||
|
||||
rgba
|
||||
}
|
305
iced_video_player/src/video_player.rs
Normal file
305
iced_video_player/src/video_player.rs
Normal file
|
@ -0,0 +1,305 @@
|
|||
use crate::{pipeline::VideoPrimitive, video::Video};
|
||||
use gstreamer as gst;
|
||||
use iced::{
|
||||
advanced::{self, layout, widget, Widget},
|
||||
Element,
|
||||
};
|
||||
use iced_wgpu::primitive::Renderer as PrimitiveRenderer;
|
||||
use log::error;
|
||||
use std::{marker::PhantomData, sync::atomic::Ordering};
|
||||
use std::{sync::Arc, time::Instant};
|
||||
|
||||
/// Video player widget which displays the current frame of a [`Video`](crate::Video).
|
||||
pub struct VideoPlayer<Message, Theme = iced::Theme, Renderer = iced::Renderer>
|
||||
where
|
||||
Renderer: PrimitiveRenderer
|
||||
{
|
||||
video: Video,
|
||||
content_fit: iced::ContentFit,
|
||||
width: iced::Length,
|
||||
height: iced::Length,
|
||||
on_end_of_stream: Option<Message>,
|
||||
on_new_frame: Option<Message>,
|
||||
on_subtitle_text: Option<Box<dyn Fn(Option<String>) -> Message>>,
|
||||
on_error: Option<Box<dyn Fn(&glib::Error) -> Message>>,
|
||||
_phantom: PhantomData<(Theme, Renderer)>,
|
||||
}
|
||||
|
||||
impl<Message, Theme, Renderer> VideoPlayer<Message, Theme, Renderer>
|
||||
where
|
||||
Renderer: PrimitiveRenderer
|
||||
{
|
||||
/// Creates a new video player widget for a given video.
|
||||
pub fn new(video: Video) -> Self {
|
||||
VideoPlayer {
|
||||
video,
|
||||
content_fit: iced::ContentFit::default(),
|
||||
width: iced::Length::Shrink,
|
||||
height: iced::Length::Shrink,
|
||||
on_end_of_stream: None,
|
||||
on_new_frame: None,
|
||||
on_subtitle_text: None,
|
||||
on_error: None,
|
||||
_phantom: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the width of the `VideoPlayer` boundaries.
|
||||
pub fn width(self, width: impl Into<iced::Length>) -> Self {
|
||||
VideoPlayer {
|
||||
width: width.into(),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the height of the `VideoPlayer` boundaries.
|
||||
pub fn height(self, height: impl Into<iced::Length>) -> Self {
|
||||
VideoPlayer {
|
||||
height: height.into(),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the `ContentFit` of the `VideoPlayer`.
|
||||
pub fn content_fit(self, content_fit: iced::ContentFit) -> Self {
|
||||
VideoPlayer {
|
||||
content_fit,
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Message to send when the video reaches the end of stream (i.e., the video ends).
|
||||
pub fn on_end_of_stream(self, on_end_of_stream: Message) -> Self {
|
||||
VideoPlayer {
|
||||
on_end_of_stream: Some(on_end_of_stream),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Message to send when the video receives a new frame.
|
||||
pub fn on_new_frame(self, on_new_frame: Message) -> Self {
|
||||
VideoPlayer {
|
||||
on_new_frame: Some(on_new_frame),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Message to send when the video receives a new frame.
|
||||
pub fn on_subtitle_text<F>(self, on_subtitle_text: F) -> Self
|
||||
where
|
||||
F: 'static + Fn(Option<String>) -> Message,
|
||||
{
|
||||
VideoPlayer {
|
||||
on_subtitle_text: Some(Box::new(on_subtitle_text)),
|
||||
..self
|
||||
}
|
||||
}
|
||||
|
||||
/// Message to send when the video playback encounters an error.
|
||||
pub fn on_error<F>(self, on_error: F) -> Self
|
||||
where
|
||||
F: 'static + Fn(&glib::Error) -> Message,
|
||||
{
|
||||
VideoPlayer {
|
||||
on_error: Some(Box::new(on_error)),
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Message, Theme, Renderer> Widget<Message, Theme, Renderer>
|
||||
for VideoPlayer<Message, Theme, Renderer>
|
||||
where
|
||||
Message: Clone,
|
||||
Renderer: PrimitiveRenderer,
|
||||
{
|
||||
fn size(&self) -> iced::Size<iced::Length> {
|
||||
iced::Size {
|
||||
width: iced::Length::Shrink,
|
||||
height: iced::Length::Shrink,
|
||||
}
|
||||
}
|
||||
|
||||
fn layout(
|
||||
&self,
|
||||
_tree: &mut widget::Tree,
|
||||
_renderer: &Renderer,
|
||||
limits: &layout::Limits,
|
||||
) -> layout::Node {
|
||||
let (video_width, video_height) = self.video.size();
|
||||
|
||||
// based on `Image::layout`
|
||||
let image_size = iced::Size::new(video_width as f32, video_height as f32);
|
||||
let raw_size = limits.resolve(self.width, self.height, image_size);
|
||||
let full_size = self.content_fit.fit(image_size, raw_size);
|
||||
let final_size = iced::Size {
|
||||
width: match self.width {
|
||||
iced::Length::Shrink => f32::min(raw_size.width, full_size.width),
|
||||
_ => raw_size.width,
|
||||
},
|
||||
height: match self.height {
|
||||
iced::Length::Shrink => f32::min(raw_size.height, full_size.height),
|
||||
_ => raw_size.height,
|
||||
},
|
||||
};
|
||||
|
||||
layout::Node::new(final_size)
|
||||
}
|
||||
|
||||
fn draw(
|
||||
&self,
|
||||
_tree: &widget::Tree,
|
||||
renderer: &mut Renderer,
|
||||
_theme: &Theme,
|
||||
_style: &advanced::renderer::Style,
|
||||
layout: advanced::Layout<'_>,
|
||||
_cursor: advanced::mouse::Cursor,
|
||||
_viewport: &iced::Rectangle,
|
||||
) {
|
||||
let mut inner = self.video.write();
|
||||
|
||||
// bounds based on `Image::draw`
|
||||
let image_size = iced::Size::new(inner.width as f32, inner.height as f32);
|
||||
let bounds = layout.bounds();
|
||||
let adjusted_fit = self.content_fit.fit(image_size, bounds.size());
|
||||
let scale = iced::Vector::new(
|
||||
adjusted_fit.width / image_size.width,
|
||||
adjusted_fit.height / image_size.height,
|
||||
);
|
||||
let final_size = image_size * scale;
|
||||
|
||||
let position = match self.content_fit {
|
||||
iced::ContentFit::None => iced::Point::new(
|
||||
bounds.x + (image_size.width - adjusted_fit.width) / 2.0,
|
||||
bounds.y + (image_size.height - adjusted_fit.height) / 2.0,
|
||||
),
|
||||
_ => iced::Point::new(
|
||||
bounds.center_x() - final_size.width / 2.0,
|
||||
bounds.center_y() - final_size.height / 2.0,
|
||||
),
|
||||
};
|
||||
|
||||
let drawing_bounds = iced::Rectangle::new(position, final_size);
|
||||
|
||||
let upload_frame = inner.upload_frame.swap(false, Ordering::SeqCst);
|
||||
|
||||
if upload_frame {
|
||||
let last_frame_time = inner
|
||||
.last_frame_time
|
||||
.lock()
|
||||
.map(|time| *time)
|
||||
.unwrap_or_else(|_| Instant::now());
|
||||
inner.set_av_offset(Instant::now() - last_frame_time);
|
||||
}
|
||||
|
||||
let render = |renderer: &mut Renderer| {
|
||||
renderer.draw_primitive(
|
||||
drawing_bounds,
|
||||
VideoPrimitive::new(
|
||||
inner.id,
|
||||
Arc::clone(&inner.alive),
|
||||
Arc::clone(&inner.frame),
|
||||
(inner.width as _, inner.height as _),
|
||||
upload_frame,
|
||||
),
|
||||
);
|
||||
};
|
||||
|
||||
if adjusted_fit.width > bounds.width || adjusted_fit.height > bounds.height {
|
||||
renderer.with_layer(bounds, render);
|
||||
} else {
|
||||
render(renderer);
|
||||
}
|
||||
}
|
||||
|
||||
fn update(
|
||||
&mut self,
|
||||
_state: &mut widget::Tree,
|
||||
event: &iced::Event,
|
||||
_layout: advanced::Layout<'_>,
|
||||
_cursor: advanced::mouse::Cursor,
|
||||
_renderer: &Renderer,
|
||||
_clipboard: &mut dyn advanced::Clipboard,
|
||||
shell: &mut advanced::Shell<'_, Message>,
|
||||
_viewport: &iced::Rectangle,
|
||||
) -> () {
|
||||
let mut inner = self.video.write();
|
||||
|
||||
if let iced::Event::Window(iced::window::Event::RedrawRequested(_)) = event {
|
||||
if inner.restart_stream || (!inner.is_eos && !inner.paused()) {
|
||||
let mut restart_stream = false;
|
||||
if inner.restart_stream {
|
||||
restart_stream = true;
|
||||
// Set flag to false to avoid potentially multiple seeks
|
||||
inner.restart_stream = false;
|
||||
}
|
||||
let mut eos_pause = false;
|
||||
|
||||
while let Some(msg) = inner
|
||||
.bus
|
||||
.pop_filtered(&[gst::MessageType::Error, gst::MessageType::Eos])
|
||||
{
|
||||
match msg.view() {
|
||||
gst::MessageView::Error(err) => {
|
||||
error!("bus returned an error: {err}");
|
||||
if let Some(ref on_error) = self.on_error {
|
||||
shell.publish(on_error(&err.error()))
|
||||
};
|
||||
}
|
||||
gst::MessageView::Eos(_eos) => {
|
||||
if let Some(on_end_of_stream) = self.on_end_of_stream.clone() {
|
||||
shell.publish(on_end_of_stream);
|
||||
}
|
||||
if inner.looping {
|
||||
restart_stream = true;
|
||||
} else {
|
||||
eos_pause = true;
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// Don't run eos_pause if restart_stream is true; fixes "pausing" after restarting a stream
|
||||
if restart_stream {
|
||||
if let Err(err) = inner.restart_stream() {
|
||||
error!("cannot restart stream (can't seek): {err:#?}");
|
||||
}
|
||||
} else if eos_pause {
|
||||
inner.is_eos = true;
|
||||
inner.set_paused(true);
|
||||
}
|
||||
|
||||
if inner.upload_frame.load(Ordering::SeqCst) {
|
||||
if let Some(on_new_frame) = self.on_new_frame.clone() {
|
||||
shell.publish(on_new_frame);
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(on_subtitle_text) = &self.on_subtitle_text {
|
||||
if inner.upload_text.swap(false, Ordering::SeqCst) {
|
||||
if let Ok(text) = inner.subtitle_text.try_lock() {
|
||||
shell.publish(on_subtitle_text(text.clone()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
shell.request_redraw();
|
||||
} else {
|
||||
shell.request_redraw();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<Message, Theme, Renderer> From<VideoPlayer<Message, Theme, Renderer>>
|
||||
for Element<'_, Message, Theme, Renderer>
|
||||
where
|
||||
Message: 'static + Clone,
|
||||
Theme: 'static,
|
||||
Renderer: 'static + PrimitiveRenderer,
|
||||
{
|
||||
fn from(video_player: VideoPlayer<Message, Theme, Renderer>) -> Self {
|
||||
Self::new(video_player)
|
||||
}
|
||||
}
|
BIN
resources/icon.png
Normal file
BIN
resources/icon.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 317 KiB |
BIN
resources/placeholder.png
Normal file
BIN
resources/placeholder.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 13 KiB |
BIN
resources/wutheringwaves-bg.mp4
Normal file
BIN
resources/wutheringwaves-bg.mp4
Normal file
Binary file not shown.
358
src/main.rs
Normal file
358
src/main.rs
Normal file
|
@ -0,0 +1,358 @@
|
|||
#![feature(let_chains)]
|
||||
use ::image::ImageReader;
|
||||
use iced::{
|
||||
Alignment::Center,
|
||||
Color, Element, Length, Renderer, Size, Task, Theme,
|
||||
alignment::Vertical::Top,
|
||||
border, gradient,
|
||||
widget::{Column, Space, center, column, container, image, row, stack, text},
|
||||
window::{self, Settings, icon, settings::PlatformSpecific},
|
||||
};
|
||||
use iced_video_player::{Video, VideoPlayer};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs::{self, create_dir_all, read_to_string},
|
||||
io::{Cursor, Write},
|
||||
thread::sleep,
|
||||
};
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
#[derive(rust_embed::Embed)]
|
||||
#[folder = "resources"]
|
||||
struct Assets;
|
||||
|
||||
pub fn main() -> iced::Result {
|
||||
let icon_file = Assets::get("icon.png").unwrap();
|
||||
let icon_image = ImageReader::new(Cursor::new(icon_file.data))
|
||||
.with_guessed_format()
|
||||
.unwrap()
|
||||
.decode()
|
||||
.unwrap();
|
||||
let rgba_vec = icon_image.as_rgba8().unwrap().clone().into_vec();
|
||||
|
||||
let settings = Settings {
|
||||
decorations: false,
|
||||
icon: Some(icon::from_rgba(rgba_vec, icon_image.width(), icon_image.height()).unwrap()),
|
||||
size: Size::new(2000.0, 1000.0),
|
||||
maximized: false,
|
||||
fullscreen: false,
|
||||
position: window::Position::Centered,
|
||||
max_size: None,
|
||||
min_size: None,
|
||||
visible: true,
|
||||
resizable: true,
|
||||
transparent: false,
|
||||
level: window::Level::Normal,
|
||||
platform_specific: PlatformSpecific {
|
||||
drag_and_drop: false,
|
||||
skip_taskbar: false,
|
||||
undecorated_shadow: false,
|
||||
},
|
||||
exit_on_close_request: true,
|
||||
};
|
||||
|
||||
iced::application(Launcher::boot, Launcher::update, Launcher::view)
|
||||
// .subscription(Launcher::subscription)
|
||||
.title(Launcher::title)
|
||||
.window(settings)
|
||||
.window_size((1280.0, 760.0))
|
||||
.run()
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Clone, Default, Serialize, Deserialize)]
|
||||
enum PossibleGames {
|
||||
#[default]
|
||||
WutheringWaves,
|
||||
ZenlessZoneZero,
|
||||
HonkaiStarRail,
|
||||
GenshinImpact,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum Launcher {
|
||||
Loading,
|
||||
Loaded(State),
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Clone)]
|
||||
struct State {
|
||||
selected_game: PossibleGames,
|
||||
installed_games: Vec<PossibleGames>,
|
||||
installed_game_servers: Vec<PossibleGames>,
|
||||
db_software_installed: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Default, Serialize, Deserialize)]
|
||||
struct SavedState {
|
||||
installed_games: Vec<PossibleGames>,
|
||||
installed_game_servers: Vec<PossibleGames>,
|
||||
db_software_installed: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum LoadError {
|
||||
File,
|
||||
Format,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
enum SaveError {
|
||||
Write,
|
||||
Format,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum Message {
|
||||
Loaded(Result<State, LoadError>),
|
||||
Tick,
|
||||
GameSelected(PossibleGames),
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn path() -> std::path::PathBuf {
|
||||
let mut path = if let Some(project_dirs) =
|
||||
directories::ProjectDirs::from("rs", "reversed-rooms", "launcher")
|
||||
{
|
||||
project_dirs.data_dir().into()
|
||||
} else {
|
||||
std::env::current_dir().unwrap_or_default()
|
||||
};
|
||||
|
||||
path.push("launcher-state.json");
|
||||
|
||||
path
|
||||
}
|
||||
|
||||
fn load() -> Result<State, LoadError> {
|
||||
let contents = read_to_string(Self::path()).map_err(|_| LoadError::File)?;
|
||||
|
||||
let saved_state: SavedState =
|
||||
serde_json::from_str(&contents).map_err(|_| LoadError::Format)?;
|
||||
|
||||
Ok(State {
|
||||
selected_game: PossibleGames::WutheringWaves,
|
||||
installed_games: saved_state.installed_games,
|
||||
installed_game_servers: saved_state.installed_game_servers,
|
||||
db_software_installed: saved_state.db_software_installed,
|
||||
})
|
||||
}
|
||||
|
||||
async fn save(self) -> Result<(), SaveError> {
|
||||
let saved_state = SavedState {
|
||||
installed_games: self.installed_games,
|
||||
installed_game_servers: self.installed_game_servers,
|
||||
db_software_installed: self.db_software_installed,
|
||||
};
|
||||
|
||||
let json = serde_json::to_string_pretty(&saved_state).map_err(|_| SaveError::Format)?;
|
||||
|
||||
let path = Self::path();
|
||||
|
||||
if let Some(dir) = path.parent() {
|
||||
create_dir_all(dir).map_err(|_| SaveError::Write)?;
|
||||
}
|
||||
|
||||
{
|
||||
fs::write(path, json.as_bytes()).map_err(|_| SaveError::Write)?;
|
||||
}
|
||||
|
||||
sleep(std::time::Duration::from_secs(2));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn deg_to_rad(deg: f32) -> f32 {
|
||||
deg * std::f32::consts::PI / 180.0
|
||||
}
|
||||
|
||||
fn style_container(direction: f32) -> container::Style {
|
||||
let angle = deg_to_rad(direction);
|
||||
container::Style {
|
||||
text_color: Color::from_rgba8(255, 255, 255, 1.0).into(),
|
||||
background: Some(
|
||||
gradient::Linear::new(angle)
|
||||
.add_stop(0.0, Color::from_rgba8(0, 0, 0, 0.0))
|
||||
.add_stop(1.0, Color::from_rgba8(0, 0, 0, 0.45))
|
||||
.into(),
|
||||
),
|
||||
..container::Style::default()
|
||||
}
|
||||
}
|
||||
impl Launcher {
|
||||
fn boot() -> (Self, Task<Message>) {
|
||||
(Self::Loaded(State::default()), Task::none())
|
||||
}
|
||||
|
||||
fn title(&self) -> String {
|
||||
format!("RR Launcher v{}", env!("CARGO_PKG_VERSION"))
|
||||
}
|
||||
|
||||
fn update(&mut self, message: Message) -> Task<Message> {
|
||||
match self {
|
||||
Launcher::Loading => match message {
|
||||
Message::Loaded(Ok(state)) => {
|
||||
*self = Launcher::Loaded(state);
|
||||
Task::none()
|
||||
}
|
||||
_ => Task::none(),
|
||||
},
|
||||
_ => Task::none(),
|
||||
}
|
||||
}
|
||||
|
||||
fn view(&self) -> Element<Message> {
|
||||
let topbar = container(row![
|
||||
text("launcher... goog...").size(25),
|
||||
Space::new(Length::Fill, Length::Fixed(0.0)),
|
||||
text("rabbydevs").size(25),
|
||||
])
|
||||
.width(Length::Fill)
|
||||
.style(move |_| style_container(0.0))
|
||||
.padding(10);
|
||||
|
||||
let bottom_bar = container(row![
|
||||
text("insert game announcements").size(25),
|
||||
Space::new(Length::Fill, Length::Fixed(0.0)),
|
||||
container(text("Launch").size(25))
|
||||
.padding(10)
|
||||
.style(move |_| {
|
||||
container::Style {
|
||||
text_color: Color::from_rgba8(0, 0, 0, 1.0).into(),
|
||||
background: Some(Color::from_rgba8(255, 255, 255, 1.0).into()),
|
||||
border: border::rounded(5),
|
||||
..container::Style::default()
|
||||
}
|
||||
})
|
||||
])
|
||||
.width(Length::Fill)
|
||||
.style(move |_theme| style_container(180.0))
|
||||
.padding(20);
|
||||
|
||||
let user_area: Column<Message, Theme, Renderer> =
|
||||
column![topbar, Space::new(Length::Fill, Length::Fill), bottom_bar].width(Length::Fill);
|
||||
|
||||
let content = container(user_area).center(Length::Fill);
|
||||
|
||||
let game_selector = container(
|
||||
row![
|
||||
text("test").size(25),
|
||||
text("test").size(25),
|
||||
text("test").size(25),
|
||||
]
|
||||
.spacing(10),
|
||||
)
|
||||
.padding(10)
|
||||
.align_y(Top)
|
||||
.align_x(Center)
|
||||
.width(Length::Fill);
|
||||
|
||||
fn get_game_background(game: &PossibleGames, _video: Option<Video>) -> iced::widget::Image<image::Handle> {
|
||||
match game {
|
||||
PossibleGames::WutheringWaves => panic!("wuwa doesnt have a image lmao?"),
|
||||
PossibleGames::ZenlessZoneZero => {
|
||||
if let Some(img_file) = Assets::get("zenlesszonezero-bg.png") {
|
||||
let img = ImageReader::new(Cursor::new(img_file.data))
|
||||
.with_guessed_format()
|
||||
.unwrap()
|
||||
.decode()
|
||||
.unwrap();
|
||||
let handle = image::Handle::from_rgba(
|
||||
img.width(),
|
||||
img.height(),
|
||||
img.to_rgba8().into_raw()
|
||||
);
|
||||
return image(handle).content_fit(iced::ContentFit::Fill);
|
||||
}
|
||||
},
|
||||
PossibleGames::HonkaiStarRail => {
|
||||
if let Some(img_file) = Assets::get("honkaistarrail-bg.png") {
|
||||
let img = ImageReader::new(Cursor::new(img_file.data))
|
||||
.with_guessed_format()
|
||||
.unwrap()
|
||||
.decode()
|
||||
.unwrap();
|
||||
let handle = image::Handle::from_rgba(
|
||||
img.width(),
|
||||
img.height(),
|
||||
img.to_rgba8().into_raw()
|
||||
);
|
||||
return image(handle).content_fit(iced::ContentFit::Fill);
|
||||
}
|
||||
},
|
||||
PossibleGames::GenshinImpact => {
|
||||
if let Some(img_file) = Assets::get("genshinimpact-bg.png") {
|
||||
let img = ImageReader::new(Cursor::new(img_file.data))
|
||||
.with_guessed_format()
|
||||
.unwrap()
|
||||
.decode()
|
||||
.unwrap();
|
||||
let handle = image::Handle::from_rgba(
|
||||
img.width(),
|
||||
img.height(),
|
||||
img.to_rgba8().into_raw()
|
||||
);
|
||||
return image(handle).content_fit(iced::ContentFit::Fill);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let bg_file = Assets::get("placeholder.png").unwrap();
|
||||
let bg_image = ImageReader::new(Cursor::new(bg_file.data))
|
||||
.with_guessed_format()
|
||||
.unwrap()
|
||||
.decode()
|
||||
.unwrap();
|
||||
let handle = image::Handle::from_rgba(
|
||||
bg_image.width(),
|
||||
bg_image.height(),
|
||||
bg_image.to_rgba8().into_raw()
|
||||
);
|
||||
image(handle).content_fit(iced::ContentFit::Fill)
|
||||
}
|
||||
|
||||
println!("whuh");
|
||||
match self {
|
||||
Launcher::Loading => loading_message(),
|
||||
Launcher::Loaded(state) => {
|
||||
match state.selected_game {
|
||||
PossibleGames::WutheringWaves => {
|
||||
let video_file = Assets::get("wutheringwaves-bg.mp4").unwrap();
|
||||
let mut temp_file = NamedTempFile::new().unwrap();
|
||||
temp_file.write_all(&video_file.data).unwrap();
|
||||
temp_file.flush().unwrap();
|
||||
|
||||
let temp_path = temp_file.path().to_str().unwrap().to_string();
|
||||
let mut video =
|
||||
Video::new(url::Url::from_file_path(temp_path).unwrap()).unwrap();
|
||||
video.set_looping(true);
|
||||
|
||||
let game_video = video;
|
||||
let player = VideoPlayer::new(game_video);
|
||||
|
||||
stack![player, content, game_selector,].into()
|
||||
}
|
||||
PossibleGames::ZenlessZoneZero => {
|
||||
let bg_image = get_game_background(&state.selected_game, None);
|
||||
|
||||
stack![bg_image, content, game_selector].into()
|
||||
}
|
||||
PossibleGames::HonkaiStarRail => {
|
||||
let bg_image = get_game_background(&state.selected_game, None);
|
||||
|
||||
stack![bg_image, content, game_selector].into()
|
||||
}
|
||||
PossibleGames::GenshinImpact => {
|
||||
let bg_image = get_game_background(&state.selected_game, None);
|
||||
|
||||
stack![bg_image, content, game_selector].into()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn loading_message<'a>() -> Element<'a, Message> {
|
||||
center(text("Loading...").size(50)).into()
|
||||
}
|
Loading…
Reference in a new issue