diff --git a/LuaSTG/LuaSTG/GameResource/ResourceManager.h b/LuaSTG/LuaSTG/GameResource/ResourceManager.h index 431926cde..6d98bd058 100644 --- a/LuaSTG/LuaSTG/GameResource/ResourceManager.h +++ b/LuaSTG/LuaSTG/GameResource/ResourceManager.h @@ -98,6 +98,8 @@ namespace luastg // 纹理 bool LoadTexture(const char* name, const char* path, bool mipmaps = true) noexcept; bool CreateTexture(const char* name, int width, int height) noexcept; + // 视频纹理 + bool LoadVideo(const char* name, const char* path) noexcept; // 渲染目标 bool CreateRenderTarget(const char* name, int width = 0, int height = 0, bool depth_buffer = false) noexcept; // 图片精灵 diff --git a/LuaSTG/LuaSTG/GameResource/ResourcePool.cpp b/LuaSTG/LuaSTG/GameResource/ResourcePool.cpp index 66d237dd1..8ea4fafda 100644 --- a/LuaSTG/LuaSTG/GameResource/ResourcePool.cpp +++ b/LuaSTG/LuaSTG/GameResource/ResourcePool.cpp @@ -227,6 +227,44 @@ namespace luastg return true; } + bool ResourcePool::LoadVideo(const char* name, const char* path) noexcept + { + if (m_TexturePool.find(std::string_view(name)) != m_TexturePool.end()) + { + if (ResourceMgr::GetResourceLoadingLog()) + { + spdlog::warn("[luastg] LoadVideo: 纹理 '{}' 已存在,加载操作已取消", name); + } + return true; + } + + core::SmartReference p_texture; + if (!LAPP.getGraphicsDevice()->createVideoTexture(path, p_texture.put())) + { + spdlog::error("[luastg] 从 '{}' 创建视频纹理 '{}' 失败", path, name); + return false; + } + + try + { + core::SmartReference tRes; + tRes.attach(new ResourceTextureImpl(name, p_texture.get())); + m_TexturePool.emplace(name, tRes); + } + catch (std::exception const& e) + { + spdlog::error("[luastg] LoadVideo: 创建视频纹理 '{}' 失败 ({})", name, e.what()); + return false; + } + + if (ResourceMgr::GetResourceLoadingLog()) + { + spdlog::info("[luastg] LoadVideo: 已从 '{}' 加载视频 '{}' ({})", path, name, getResourcePoolTypeName()); + } + + return true; + } + bool ResourcePool::CreateTexture(const char* name, int width, int height) noexcept { if (m_TexturePool.find(std::string_view(name)) != m_TexturePool.end()) diff --git a/LuaSTG/LuaSTG/LuaBinding/LW_ResourceMgr.cpp b/LuaSTG/LuaSTG/LuaBinding/LW_ResourceMgr.cpp index cc00bf119..73785f1d6 100644 --- a/LuaSTG/LuaSTG/LuaBinding/LW_ResourceMgr.cpp +++ b/LuaSTG/LuaSTG/LuaBinding/LW_ResourceMgr.cpp @@ -1,6 +1,8 @@ #include "LuaBinding/LuaWrapper.hpp" #include "lua/plus.hpp" #include "AppFrame.h" +#include "d3d11/VideoTexture.hpp" +#include "core/VideoDecoder.hpp" void luastg::binding::ResourceManager::Register(lua_State* L) noexcept { @@ -52,6 +54,18 @@ void luastg::binding::ResourceManager::Register(lua_State* L) noexcept return luaL_error(L, "can't load texture from file '%s'.", path); return 0; } + static int LoadVideo(lua_State* L) noexcept + { + const char* name = luaL_checkstring(L, 1); + const char* path = luaL_checkstring(L, 2); + + ResourcePool* pActivedPool = LRES.GetActivedPool(); + if (!pActivedPool) + return luaL_error(L, "can't load resource at this time."); + if (!pActivedPool->LoadVideo(name, path)) + return luaL_error(L, "can't load video from file '%s'.", path); + return 0; + } static int LoadSprite(lua_State* L) noexcept { const char* name = luaL_checkstring(L, 1); @@ -671,6 +685,78 @@ void luastg::binding::ResourceManager::Register(lua_State* L) noexcept LRES.CacheTTFFontString(luaL_checkstring(L, 1), str, len); return 0; } + + // 视频控制函数 + static core::IVideoDecoder* GetVideoDecoder(const char* name) noexcept { + auto tex = LRES.FindTexture(name); + if (!tex) return nullptr; + + auto texture2d = tex->GetTexture(); + if (!texture2d) return nullptr; + + // 尝试转换为VideoTexture + auto video_texture = dynamic_cast(texture2d); + if (!video_texture) return nullptr; + + return video_texture->getVideoDecoder(); + } + + static int VideoSeek(lua_State* L) noexcept { + const char* name = luaL_checkstring(L, 1); + double time = luaL_checknumber(L, 2); + auto decoder = GetVideoDecoder(name); + if (!decoder) + return luaL_error(L, "video texture '%s' not found.", name); + lua_pushboolean(L, decoder->seek(time)); + return 1; + } + + static int VideoSetLooping(lua_State* L) noexcept { + const char* name = luaL_checkstring(L, 1); + bool loop = lua_toboolean(L, 2) != 0; + auto decoder = GetVideoDecoder(name); + if (!decoder) + return luaL_error(L, "video texture '%s' not found.", name); + decoder->setLooping(loop); + return 0; + } + + static int VideoUpdate(lua_State* L) noexcept { + const char* name = luaL_checkstring(L, 1); + double time = luaL_checknumber(L, 2); + auto decoder = GetVideoDecoder(name); + if (!decoder) + return luaL_error(L, "video texture '%s' not found.", name); + lua_pushboolean(L, decoder->updateToTime(time)); + return 1; + } + + static int VideoGetInfo(lua_State* L) noexcept { + const char* name = luaL_checkstring(L, 1); + auto decoder = GetVideoDecoder(name); + if (!decoder) + return luaL_error(L, "video texture '%s' not found.", name); + + lua_createtable(L, 0, 5); + + lua_pushnumber(L, decoder->getDuration()); + lua_setfield(L, -2, "duration"); + + lua_pushnumber(L, decoder->getCurrentTime()); + lua_setfield(L, -2, "time"); + + lua_pushboolean(L, decoder->isLooping()); + lua_setfield(L, -2, "looping"); + + auto size = decoder->getVideoSize(); + lua_pushinteger(L, size.x); + lua_setfield(L, -2, "width"); + + lua_pushinteger(L, size.y); + lua_setfield(L, -2, "height"); + + return 1; + } }; luaL_Reg const lib[] = { @@ -678,6 +764,7 @@ void luastg::binding::ResourceManager::Register(lua_State* L) noexcept { "SetResourceStatus", &Wrapper::SetResourceStatus }, { "GetResourceStatus", &Wrapper::GetResourceStatus }, { "LoadTexture", &Wrapper::LoadTexture }, + { "LoadVideo", &Wrapper::LoadVideo }, { "LoadImage", &Wrapper::LoadSprite }, { "LoadAnimation", &Wrapper::LoadAnimation }, { "LoadPS", &Wrapper::LoadPS }, @@ -710,6 +797,13 @@ void luastg::binding::ResourceManager::Register(lua_State* L) noexcept { "SetFontState", &Wrapper::SetFontState }, { "CacheTTFString", &Wrapper::CacheTTFString }, + + // 视频控制函数 + { "VideoSeek", &Wrapper::VideoSeek }, + { "VideoSetLooping", &Wrapper::VideoSetLooping }, + { "VideoUpdate", &Wrapper::VideoUpdate }, + { "VideoGetInfo", &Wrapper::VideoGetInfo }, + { NULL, NULL }, }; diff --git a/data/example/video_example.lua b/data/example/video_example.lua new file mode 100644 index 000000000..9c0352454 --- /dev/null +++ b/data/example/video_example.lua @@ -0,0 +1,26 @@ +-- 视频播放示例 +-- 使用 StopWatch 提供绝对时间,手动调用 VideoUpdate 更新画面 + +-- 在资源池中加载视频 +-- 参数:名称,路径 +LoadVideo('video1', 'test_video.mp4') + +-- 创建计时器并设置循环播放 +local video_clock = lstg.StopWatch() +VideoSetLooping('video1', true) + +-- 像普通纹理一样创建精灵 +LoadImage('video_sprite', 'video1', 0, 0, 640, 480) + +-- 在 RenderFunc 中渲染 +function RenderFunc() + -- 以秒表绝对时间驱动视频更新 + VideoUpdate('video1', video_clock:GetElapsed()) + Render('video_sprite', 100, 100) +end + +-- 可用控制: +-- 1. VideoSetLooping(name, bool) -- 设置是否循环播放 +-- 2. VideoSeek(name, time) -- 控制时间,但是不更新画面,适合进度条等场景,完成后需要调用 VideoUpdate 来刷新画面 +-- 3. VideoUpdate(name, absolute_time) -- 根据绝对时间更新视频画面,适合与 StopWatch 结合使用,确保视频播放与游戏时间同步 +-- 4. VideoGetInfo(name) -- 获取视频信息,如帧率、总时长等,返回一个表格 diff --git a/engine/graphics/CMakeLists.txt b/engine/graphics/CMakeLists.txt index 19c29b5e6..a9f2a4f25 100644 --- a/engine/graphics/CMakeLists.txt +++ b/engine/graphics/CMakeLists.txt @@ -46,6 +46,10 @@ target_link_libraries(${lib_name} PUBLIC dxgi.lib d3d11.lib d2d1.lib + # media foundation (for video) + mfplat.lib + mfreadwrite.lib + mfuuid.lib Microsoft.Windows.ImplementationLibrary Microsoft::DirectXTexMini libqoi diff --git a/engine/graphics/core/GraphicsDevice.hpp b/engine/graphics/core/GraphicsDevice.hpp index 6213e7978..ad59e79f5 100644 --- a/engine/graphics/core/GraphicsDevice.hpp +++ b/engine/graphics/core/GraphicsDevice.hpp @@ -3,6 +3,7 @@ #include "core/ImmutableString.hpp" #include "core/GraphicsBuffer.hpp" #include "core/Texture2D.hpp" +#include "core/VideoDecoder.hpp" #include "core/GraphicsSampler.hpp" #include "core/RenderTarget.hpp" #include "core/DepthStencilBuffer.hpp" @@ -108,6 +109,8 @@ namespace core { virtual bool createTextureFromFile(StringView path, bool mipmap, ITexture2D** out_texture) = 0; virtual bool createTextureFromImage(IImage* image, bool mipmap, ITexture2D** out_texture) = 0; virtual bool createTexture(Vector2U size, ITexture2D** out_texture) = 0; + virtual bool createVideoTexture(StringView path, ITexture2D** out_texture) = 0; + virtual bool createVideoDecoder(IVideoDecoder** out_decoder) = 0; virtual bool createSampler(const GraphicsSamplerInfo& info, IGraphicsSampler** out_sampler) = 0; diff --git a/engine/graphics/core/VideoDecoder.hpp b/engine/graphics/core/VideoDecoder.hpp new file mode 100644 index 000000000..c32b8df18 --- /dev/null +++ b/engine/graphics/core/VideoDecoder.hpp @@ -0,0 +1,42 @@ +#pragma once +#include "core/ReferenceCounted.hpp" +#include "core/Vector2.hpp" +#include "core/ImmutableString.hpp" + +namespace core { + CORE_INTERFACE IVideoDecoder : IReferenceCounted { + // 打开视频文件 + virtual bool open(StringView path) = 0; + + // 关闭视频 + virtual void close() = 0; + + // 获取状态 + virtual bool hasVideo() const noexcept = 0; + + // 视频信息 + virtual Vector2U getVideoSize() const noexcept = 0; + virtual double getDuration() const noexcept = 0; + virtual double getCurrentTime() const noexcept = 0; // 返回上次更新的帧时间 + + // 跳转到指定时间 + virtual bool seek(double time_in_seconds) = 0; + + // 循环播放设置 + virtual void setLooping(bool loop) = 0; + virtual bool isLooping() const noexcept = 0; + + // 手动更新到指定时间点 + // time_in_seconds: 目标时间(秒) + // 返回是否成功更新(失败可能是因为到达视频结尾且未循环) + virtual bool updateToTime(double time_in_seconds) = 0; + + // 获取用于渲染的纹理(返回 ID3D11Texture2D*) + virtual void* getNativeTexture() const noexcept = 0; + + // 获取 Shader Resource View(返回 ID3D11ShaderResourceView*) + virtual void* getNativeShaderResourceView() const noexcept = 0; + }; + + CORE_INTERFACE_ID(IVideoDecoder, "a4b5c6d7-e8f9-1234-5678-9abcdef01234") +} diff --git a/engine/graphics/d3d11/GraphicsDevice.hpp b/engine/graphics/d3d11/GraphicsDevice.hpp index d4450145d..b0e9ea87d 100644 --- a/engine/graphics/d3d11/GraphicsDevice.hpp +++ b/engine/graphics/d3d11/GraphicsDevice.hpp @@ -22,6 +22,8 @@ namespace core { bool createTextureFromFile(StringView path, bool mipmap, ITexture2D** out_texture) override; bool createTexture(Vector2U size, ITexture2D** out_texture) override; bool createTextureFromImage(IImage* image, bool mipmap, ITexture2D** out_texture) override; + bool createVideoTexture(StringView path, ITexture2D** out_texture) override; + bool createVideoDecoder(IVideoDecoder** out_decoder) override; bool createSampler(const GraphicsSamplerInfo& info, IGraphicsSampler** out_sampler) override; diff --git a/engine/graphics/d3d11/VideoDecoder.cpp b/engine/graphics/d3d11/VideoDecoder.cpp new file mode 100644 index 000000000..9bf79d82b --- /dev/null +++ b/engine/graphics/d3d11/VideoDecoder.cpp @@ -0,0 +1,499 @@ +#include "d3d11/VideoDecoder.hpp" +#include "core/Logger.hpp" +#include "utf8.hpp" +#include +#include + +#pragma comment(lib, "mfplat.lib") +#pragma comment(lib, "mfreadwrite.lib") +#pragma comment(lib, "mfuuid.lib") + +namespace { + class MFInitializer { + public: + static MFInitializer& getInstance() { + static MFInitializer instance; + return instance; + } + + bool isInitialized() const { return m_initialized; } + + private: + MFInitializer() { + HRESULT hr = MFStartup(MF_VERSION, MFSTARTUP_FULL); + m_initialized = SUCCEEDED(hr); + if (!m_initialized) { + core::Logger::error("[core] [VideoDecoder] Failed to initialize MediaFoundation, hr = {:#x}", (uint32_t)hr); + } + } + + ~MFInitializer() { + if (m_initialized) { + MFShutdown(); + } + } + + bool m_initialized{ false }; + }; +} + +namespace core { + VideoDecoder::VideoDecoder() = default; + + VideoDecoder::~VideoDecoder() { + if (m_initialized && m_device) { + m_device->removeEventListener(this); + } + close(); + } + + bool VideoDecoder::initialize(IGraphicsDevice* device) { + if (!device) { + Logger::error("[core] [VideoDecoder] Invalid device"); + return false; + } + + if (!MFInitializer::getInstance().isInitialized()) { + Logger::error("[core] [VideoDecoder] MediaFoundation not initialized"); + return false; + } + + m_device = device; + m_initialized = true; + m_device->addEventListener(this); + + return true; + } + + bool VideoDecoder::open(StringView path) { + if (!m_initialized) { + Logger::error("[core] [VideoDecoder] Not initialized"); + return false; + } + + close(); + + HRESULT hr = S_OK; + + std::wstring wide_path = utf8::to_wstring(path); + + win32::com_ptr attributes; + hr = MFCreateAttributes(attributes.put(), 1); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to create attributes, hr = {:#x}", (uint32_t)hr); + return false; + } + + hr = attributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE); + hr = attributes->SetUINT32(MF_SOURCE_READER_ENABLE_VIDEO_PROCESSING, TRUE); + + hr = MFCreateSourceReaderFromURL(wide_path.c_str(), attributes.get(), m_source_reader.put()); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to create source reader, hr = {:#x}", (uint32_t)hr); + return false; + } + + win32::com_ptr media_type; + hr = MFCreateMediaType(media_type.put()); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to create media type, hr = {:#x}", (uint32_t)hr); + return false; + } + + hr = media_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video); + + hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_ARGB32); + hr = m_source_reader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, nullptr, media_type.get()); + + if (FAILED(hr)) { + Logger::info("[core] [VideoDecoder] ARGB32 not supported, trying RGB32"); + hr = media_type->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32); + hr = m_source_reader->SetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, nullptr, media_type.get()); + + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to set media type (tried ARGB32 and RGB32), hr = {:#x}", (uint32_t)hr); + return false; + } + } + + hr = m_source_reader->GetCurrentMediaType((DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, m_media_type.put()); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to get media type, hr = {:#x}", (uint32_t)hr); + return false; + } + + GUID subtype = GUID_NULL; + hr = m_media_type->GetGUID(MF_MT_SUBTYPE, &subtype); + if (SUCCEEDED(hr)) { + if (subtype == MFVideoFormat_ARGB32) { + Logger::info("[core] [VideoDecoder] Using video format: ARGB32 (native BGRA)"); + } else if (subtype == MFVideoFormat_RGB32) { + Logger::info("[core] [VideoDecoder] Using video format: RGB32"); + } else { + Logger::warn("[core] [VideoDecoder] Using unknown video format, display may be incorrect"); + } + } + + UINT32 width = 0, height = 0; + hr = MFGetAttributeSize(m_media_type.get(), MF_MT_FRAME_SIZE, &width, &height); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to get frame size, hr = {:#x}", (uint32_t)hr); + return false; + } + + m_video_size = Vector2U{ width, height }; + m_target_size = m_video_size; + + PROPVARIANT var; + PropVariantInit(&var); + hr = m_source_reader->GetPresentationAttribute((DWORD)MF_SOURCE_READER_MEDIASOURCE, MF_PD_DURATION, &var); + if (SUCCEEDED(hr)) { + m_duration = var.hVal.QuadPart / 10000000.0; + PropVariantClear(&var); + } + + if (!createTexture()) { + close(); + return false; + } + + m_frame_pitch = m_target_size.x * 4; + + auto d3d_device = static_cast(m_device->getNativeDevice()); + if (d3d_device) { + d3d_device->GetImmediateContext(m_device_context.put()); + } + + m_current_time = 0.0; + m_last_requested_time = -1.0; + + bool first_frame_loaded = readFrameAtTime(0.0); + Logger::info("[core] [VideoDecoder] First frame loaded: {}", first_frame_loaded); + + Logger::info("[core] [VideoDecoder] Opened video: {}x{}, duration: {:.2f}s", + m_target_size.x, m_target_size.y, m_duration); + + return true; + } + + void VideoDecoder::close() { + m_source_reader.reset(); + m_media_type.reset(); + m_texture.reset(); + m_shader_resource_view.reset(); + m_device_context.reset(); + + m_video_size = Vector2U{}; + m_target_size = Vector2U{}; + m_duration = 0.0; + m_current_time = 0.0; + m_last_requested_time = -1.0; + m_frame_pitch = 0; + } + + bool VideoDecoder::seek(double time_in_seconds) { + if (!hasVideo()) { + return false; + } + + if (time_in_seconds < 0.0) { + time_in_seconds = 0.0; + } + if (time_in_seconds > m_duration) { + time_in_seconds = m_duration; + } + + PROPVARIANT var; + PropVariantInit(&var); + var.vt = VT_I8; + var.hVal.QuadPart = static_cast(time_in_seconds * 10000000.0); + + HRESULT hr = m_source_reader->SetCurrentPosition(GUID_NULL, var); + PropVariantClear(&var); + + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to seek, hr = {:#x}", (uint32_t)hr); + return false; + } + + m_current_time = time_in_seconds; + return true; + } + + bool VideoDecoder::updateToTime(double time_in_seconds) { + if (!hasVideo()) { + return false; + } + + if (m_duration <= 0.0) { + m_current_time = 0.0; + return false; + } + + if (time_in_seconds >= m_duration) { + if (m_looping) { + time_in_seconds = fmod(time_in_seconds, m_duration); + } else { + double const last_frame_time = std::max(0.0, m_duration - 1e-6); + bool const ok = readFrameAtTime(last_frame_time); + m_current_time = m_duration; + return ok; + } + } else if (time_in_seconds < 0.0) { + time_in_seconds = 0.0; + } + + constexpr double kTimeEpsilon = 1e-4; + constexpr double kBackwardTolerance = 1.0 / 120.0; + constexpr double kFrameTolerance = 1.0 / 24.0; + constexpr double kSeekThreshold = 0.25; + + bool const is_backward = (m_last_requested_time >= 0.0) && (time_in_seconds + kBackwardTolerance < m_last_requested_time); + bool const large_jump = (m_current_time + kSeekThreshold < time_in_seconds); + + m_last_requested_time = time_in_seconds; + + if (is_backward || large_jump) { + return readFrameAtTime(time_in_seconds); + } + + if (m_current_time + kFrameTolerance >= time_in_seconds) { + return true; + } + + for (int i = 0; i < 4; ++i) { + if (!readNextFrame()) { + return false; + } + if (m_current_time + kTimeEpsilon >= time_in_seconds) { + return true; + } + } + + return readFrameAtTime(time_in_seconds); + } + + bool VideoDecoder::readNextFrame() { + if (!hasVideo()) { + return false; + } + + HRESULT hr = S_OK; + win32::com_ptr sample; + DWORD stream_flags = 0; + LONGLONG timestamp = 0; + + hr = m_source_reader->ReadSample( + (DWORD)MF_SOURCE_READER_FIRST_VIDEO_STREAM, + 0, + nullptr, + &stream_flags, + ×tamp, + sample.put() + ); + + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to read sample, hr = {:#x}", (uint32_t)hr); + return false; + } + + if (stream_flags & MF_SOURCE_READERF_ENDOFSTREAM) { + m_current_time = m_duration; + return false; + } + + if (!sample) { + return false; + } + + m_current_time = timestamp / 10000000.0; + if (m_current_time > m_duration) { + m_current_time = m_duration; + } + + return updateTextureFromSample(sample.get()); + } + + bool VideoDecoder::readFrameAtTime(double time_in_seconds) { + if (!hasVideo()) { + return false; + } + + if (!seek(time_in_seconds)) { + return false; + } + + constexpr double kTimeEpsilon = 1e-4; + constexpr int kMaxDecodeAfterSeek = 360; + + for (int i = 0; i < kMaxDecodeAfterSeek; ++i) { + if (!readNextFrame()) { + return false; + } + if (m_current_time + kTimeEpsilon >= time_in_seconds) { + return true; + } + } + + return false; + } + + void VideoDecoder::onGraphicsDeviceCreate() { + if (hasVideo()) { + createTexture(); + } + } + + void VideoDecoder::onGraphicsDeviceDestroy() { + m_texture.reset(); + m_shader_resource_view.reset(); + m_device_context.reset(); + } + + bool VideoDecoder::createTexture() { + if (!m_device || m_target_size.x == 0 || m_target_size.y == 0) { + return false; + } + + auto d3d_device = static_cast(m_device->getNativeDevice()); + if (!d3d_device) { + return false; + } + + D3D11_TEXTURE2D_DESC desc = {}; + desc.Width = m_target_size.x; + desc.Height = m_target_size.y; + desc.MipLevels = 1; + desc.ArraySize = 1; + desc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; + desc.SampleDesc.Count = 1; + desc.SampleDesc.Quality = 0; + desc.Usage = D3D11_USAGE_DYNAMIC; + desc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; + desc.MiscFlags = 0; + + HRESULT hr = d3d_device->CreateTexture2D(&desc, nullptr, m_texture.put()); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to create texture, hr = {:#x}", (uint32_t)hr); + return false; + } + + D3D11_SHADER_RESOURCE_VIEW_DESC srv_desc = {}; + srv_desc.Format = desc.Format; + srv_desc.ViewDimension = D3D11_SRV_DIMENSION_TEXTURE2D; + srv_desc.Texture2D.MipLevels = 1; + srv_desc.Texture2D.MostDetailedMip = 0; + + hr = d3d_device->CreateShaderResourceView(m_texture.get(), &srv_desc, m_shader_resource_view.put()); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to create shader resource view, hr = {:#x}", (uint32_t)hr); + m_texture.reset(); + return false; + } + + d3d_device->GetImmediateContext(m_device_context.put()); + + return true; + } + + bool VideoDecoder::updateTextureFromSample(IMFSample* sample) { + if (!m_texture || !sample || !m_device_context) { + return false; + } + + win32::com_ptr buffer; + HRESULT hr = sample->ConvertToContiguousBuffer(buffer.put()); + if (FAILED(hr)) { + Logger::error("[core] [VideoDecoder] Failed to get buffer, hr = {:#x}", (uint32_t)hr); + return false; + } + + win32::com_ptr buffer_2d; + LONG source_pitch = m_frame_pitch; + BYTE* src_data = nullptr; + bool using_2d_buffer = false; + + if (SUCCEEDED(buffer->QueryInterface(IID_PPV_ARGS(buffer_2d.put())))) { + hr = buffer_2d->Lock2D(&src_data, &source_pitch); + if (SUCCEEDED(hr)) { + using_2d_buffer = true; + } + } + + if (!using_2d_buffer) { + DWORD max_length = 0, current_length = 0; + hr = buffer->Lock(&src_data, &max_length, ¤t_length); + if (FAILED(hr)) { + return false; + } + } + + D3D11_MAPPED_SUBRESOURCE mapped; + hr = m_device_context->Map(m_texture.get(), 0, D3D11_MAP_WRITE_DISCARD, 0, &mapped); + if (FAILED(hr)) { + if (using_2d_buffer) { + buffer_2d->Unlock2D(); + } else { + buffer->Unlock(); + } + Logger::error("[core] [VideoDecoder] Failed to map texture, hr = {:#x}", (uint32_t)hr); + return false; + } + + uint8_t* dst = static_cast(mapped.pData); + const uint8_t* src = src_data; + const size_t copy_size = m_frame_pitch; + + GUID format = GUID_NULL; + bool force_opaque_alpha = false; + if (m_media_type && SUCCEEDED(m_media_type->GetGUID(MF_MT_SUBTYPE, &format))) { + if (format == MFVideoFormat_RGB32) { + force_opaque_alpha = true; + } + } + + static bool first_copy = true; + if (first_copy) { + Logger::info("[core] [VideoDecoder] Texture update: source_pitch={}, mapped.RowPitch={}, copy_size={}, height={}, force_alpha={}", + source_pitch, mapped.RowPitch, copy_size, m_target_size.y, force_opaque_alpha); + bool all_zero = true; + for (size_t i = 0; i < std::min(16, copy_size); ++i) { + if (src[i] != 0) { + all_zero = false; + break; + } + } + Logger::info("[core] [VideoDecoder] First 16 bytes all zero: {}", all_zero); + first_copy = false; + } + + if (!force_opaque_alpha) { + for (uint32_t y = 0; y < m_target_size.y; ++y) { + memcpy(dst, src, copy_size); + dst += mapped.RowPitch; + src += source_pitch; + } + } else { + for (uint32_t y = 0; y < m_target_size.y; ++y) { + memcpy(dst, src, copy_size); + for (uint32_t x = 0; x < m_target_size.x; ++x) { + dst[x * 4 + 3] = 0xFF; + } + dst += mapped.RowPitch; + src += source_pitch; + } + } + + m_device_context->Unmap(m_texture.get(), 0); + + if (using_2d_buffer) { + buffer_2d->Unlock2D(); + } else { + buffer->Unlock(); + } + + return true; + } + +} diff --git a/engine/graphics/d3d11/VideoDecoder.hpp b/engine/graphics/d3d11/VideoDecoder.hpp new file mode 100644 index 000000000..98940eb79 --- /dev/null +++ b/engine/graphics/d3d11/VideoDecoder.hpp @@ -0,0 +1,75 @@ +#pragma once +#include "core/VideoDecoder.hpp" +#include "core/GraphicsDevice.hpp" +#include "core/SmartReference.hpp" +#include "core/implement/ReferenceCounted.hpp" +#include "d3d11/pch.h" +#include +#include +#include +#include +#include + +namespace core { + class VideoDecoder final : + public implement::ReferenceCounted, + public IGraphicsDeviceEventListener { + public: + // IVideoDecoder + + bool open(StringView path) override; + void close() override; + + bool hasVideo() const noexcept override { return m_source_reader.get() != nullptr; } + + Vector2U getVideoSize() const noexcept override { return m_target_size; } + double getDuration() const noexcept override { return m_duration; } + double getCurrentTime() const noexcept override { return m_current_time; } + + bool seek(double time_in_seconds) override; + + void setLooping(bool loop) override { m_looping = loop; } + bool isLooping() const noexcept override { return m_looping; } + + bool updateToTime(double time_in_seconds) override; + void* getNativeTexture() const noexcept override { return m_texture.get(); } + void* getNativeShaderResourceView() const noexcept override { return m_shader_resource_view.get(); } + + // IGraphicsDeviceEventListener + + void onGraphicsDeviceCreate() override; + void onGraphicsDeviceDestroy() override; + + // VideoDecoder + + VideoDecoder(); + ~VideoDecoder(); + + bool initialize(IGraphicsDevice* device); + + private: + bool createTexture(); + bool updateTextureFromSample(IMFSample* sample); + bool readNextFrame(); + bool readFrameAtTime(double time_in_seconds); + + SmartReference m_device; + win32::com_ptr m_texture; + win32::com_ptr m_shader_resource_view; + win32::com_ptr m_device_context; + + win32::com_ptr m_source_reader; + win32::com_ptr m_media_type; + + Vector2U m_video_size{}; + Vector2U m_target_size{}; + double m_duration{ 0.0 }; + double m_current_time{ 0.0 }; // 当前帧时间 + double m_last_requested_time{ -1.0 }; + bool m_looping{ false }; + + uint32_t m_frame_pitch{ 0 }; + + bool m_initialized{ false }; + }; +} diff --git a/engine/graphics/d3d11/VideoTexture.cpp b/engine/graphics/d3d11/VideoTexture.cpp new file mode 100644 index 000000000..5fc1ace5d --- /dev/null +++ b/engine/graphics/d3d11/VideoTexture.cpp @@ -0,0 +1,102 @@ +#include "d3d11/VideoTexture.hpp" +#include "d3d11/VideoDecoder.hpp" +#include "d3d11/GraphicsDevice.hpp" +#include "core/Logger.hpp" +#include "core/SmartReference.hpp" + +namespace core { + VideoTexture::VideoTexture() = default; + + VideoTexture::~VideoTexture() { + if (m_initialized && static_cast(m_device)) { + m_device->removeEventListener(this); + } + } + + bool VideoTexture::initialize(IGraphicsDevice* device, StringView path) { + if (!device) { + Logger::error("[core] [VideoTexture] Invalid device"); + return false; + } + + m_device = device; + + // 创建视频解码器 + auto decoder = new VideoDecoder(); + if (!decoder->initialize(device)) { + Logger::error("[core] [VideoTexture] Failed to initialize video decoder"); + decoder->release(); + return false; + } + + // 打开视频文件 + if (!decoder->open(path)) { + Logger::error("[core] [VideoTexture] Failed to open video file: {}", path); + decoder->release(); + return false; + } + + m_decoder = decoder; + decoder->release(); + + m_initialized = true; + m_device->addEventListener(this); + + Logger::info("[core] [VideoTexture] Created video texture from: {}", path); + + return true; + } + + void* VideoTexture::getNativeResource() const noexcept { + if (m_decoder) { + return m_decoder->getNativeTexture(); // ID3D11Texture2D* + } + return nullptr; + } + + void* VideoTexture::getNativeView() const noexcept { + if (m_decoder) { + return m_decoder->getNativeShaderResourceView(); // ID3D11ShaderResourceView* + } + return nullptr; + } + + Vector2U VideoTexture::getSize() const noexcept { + if (m_decoder) { + return m_decoder->getVideoSize(); + } + return Vector2U{}; + } + + void VideoTexture::onGraphicsDeviceCreate() {} + + void VideoTexture::onGraphicsDeviceDestroy() {} + + // GraphicsDevice 扩展:视频功能 + + bool GraphicsDevice::createVideoTexture(StringView path, ITexture2D** out_texture) { + if (out_texture == nullptr) { + assert(false); return false; + } + SmartReference video_texture; + video_texture.attach(new VideoTexture); + if (!video_texture->initialize(this, path)) { + return false; + } + *out_texture = video_texture.detach(); + return true; + } + + bool GraphicsDevice::createVideoDecoder(IVideoDecoder** out_decoder) { + if (out_decoder == nullptr) { + assert(false); return false; + } + SmartReference decoder; + decoder.attach(new VideoDecoder); + if (!decoder->initialize(this)) { + return false; + } + *out_decoder = decoder.detach(); + return true; + } +} diff --git a/engine/graphics/d3d11/VideoTexture.hpp b/engine/graphics/d3d11/VideoTexture.hpp new file mode 100644 index 000000000..0cfb2741a --- /dev/null +++ b/engine/graphics/d3d11/VideoTexture.hpp @@ -0,0 +1,56 @@ +#pragma once +#include "core/Texture2D.hpp" +#include "core/VideoDecoder.hpp" +#include "core/GraphicsDevice.hpp" +#include "core/SmartReference.hpp" +#include "core/implement/ReferenceCounted.hpp" +#include "d3d11/pch.h" + +namespace core { + // VideoTexture 类:结合了 Texture2D 和 VideoDecoder 的功能 + // 可以像普通纹理一样使用,但内容来自视频 + class VideoTexture final : + public implement::ReferenceCounted, + public IGraphicsDeviceEventListener { + public: + // ITexture2D + + void* getNativeResource() const noexcept override; + void* getNativeView() const noexcept override; + + bool isDynamic() const noexcept override { return true; } + bool isPremultipliedAlpha() const noexcept override { return false; } + void setPremultipliedAlpha(bool v) override {} + Vector2U getSize() const noexcept override; + + bool setSize(Vector2U size) override { return false; } + bool update(RectU rect, void const* data, uint32_t row_pitch_in_bytes) override { return false; } + void setImage(IImage* image) override {} + + bool saveToFile(StringView path) override { return false; } + + void setSamplerState(IGraphicsSampler* sampler) override { m_sampler = sampler; } + IGraphicsSampler* getSamplerState() const noexcept override { return m_sampler.get(); } + + // IGraphicsDeviceEventListener + + void onGraphicsDeviceCreate() override; + void onGraphicsDeviceDestroy() override; + + // VideoTexture + + VideoTexture(); + ~VideoTexture(); + + bool initialize(IGraphicsDevice* device, StringView path); + + // 获取内部的视频解码器(用于控制播放等) + IVideoDecoder* getVideoDecoder() const noexcept { return m_decoder.get(); } + + private: + SmartReference m_device; + SmartReference m_decoder; + SmartReference m_sampler; + bool m_initialized{ false }; + }; +}