-
\ No newline at end of file
+
diff --git a/.run/CompileSwiftLib.run.xml b/.run/CompileSwiftLib.run.xml
index be4c4c5f..cb22f970 100644
--- a/.run/CompileSwiftLib.run.xml
+++ b/.run/CompileSwiftLib.run.xml
@@ -1,6 +1,6 @@
-
+
@@ -14,4 +14,4 @@
-
\ No newline at end of file
+
diff --git a/.run/CompileWinLib.run.xml b/.run/CompileWinLib.run.xml
index a225c499..0ef5e236 100644
--- a/.run/CompileWinLib.run.xml
+++ b/.run/CompileWinLib.run.xml
@@ -2,10 +2,10 @@
-
+
-
+
@@ -14,4 +14,4 @@
-
\ No newline at end of file
+
diff --git a/mediaplayer/build.gradle.kts b/mediaplayer/build.gradle.kts
index be40107b..502a319e 100644
--- a/mediaplayer/build.gradle.kts
+++ b/mediaplayer/build.gradle.kts
@@ -1,6 +1,6 @@
@file:OptIn(ExperimentalWasmDsl::class)
-import com.vanniktech.maven.publish.SonatypeHost
+import org.apache.tools.ant.taskdefs.condition.Os
import org.jetbrains.dokka.gradle.DokkaTask
import org.jetbrains.kotlin.gradle.ExperimentalWasmDsl
import org.jetbrains.kotlin.gradle.plugin.mpp.NativeBuildType
@@ -32,6 +32,7 @@ tasks.withType().configureEach {
kotlin {
jvmToolchain(17)
+ @Suppress("DEPRECATION")
androidTarget { publishLibraryVariants("release") }
jvm()
js {
@@ -66,7 +67,8 @@ kotlin {
framework {
baseName = "ComposeMediaPlayer"
isStatic = false
- transitiveExport = false // This is default.
+ @OptIn(org.jetbrains.kotlin.gradle.ExperimentalKotlinGradlePluginApi::class)
+ transitiveExport = false
}
// Maps custom Xcode configuration to NativeBuildType
@@ -159,39 +161,50 @@ android {
}
}
-val buildMacArm: TaskProvider = tasks.register("buildNativeMacArm") {
- onlyIf { System.getProperty("os.name").startsWith("Mac") }
- workingDir(rootDir)
- commandLine(
- "swiftc", "-emit-library", "-emit-module", "-module-name", "NativeVideoPlayer",
- "-target", "arm64-apple-macosx14.0",
- "-o", "mediaplayer/src/jvmMain/resources/darwin-aarch64/libNativeVideoPlayer.dylib",
- "mediaplayer/src/jvmMain/kotlin/io/github/kdroidfilter/composemediaplayer/mac/native/NativeVideoPlayer.swift",
- "-O", "-whole-module-optimization"
- )
+val nativeResourceDir = layout.projectDirectory.dir("src/jvmMain/resources")
+
+val buildNativeMacOs by tasks.registering(Exec::class) {
+ description = "Compiles the Swift native library into macOS dylibs (arm64 + x64)"
+ group = "build"
+ val hasPrebuilt = nativeResourceDir
+ .dir("darwin-aarch64")
+ .file("libNativeVideoPlayer.dylib")
+ .asFile
+ .exists()
+ enabled = Os.isFamily(Os.FAMILY_MAC) && !hasPrebuilt
+
+ val nativeDir = layout.projectDirectory.dir("src/jvmMain/native/macos")
+ inputs.dir(nativeDir)
+ outputs.dir(nativeResourceDir)
+ workingDir(nativeDir)
+ commandLine("bash", "build.sh")
}
-val buildMacX64: TaskProvider = tasks.register("buildNativeMacX64") {
- onlyIf { System.getProperty("os.name").startsWith("Mac") }
- workingDir(rootDir)
- commandLine(
- "swiftc", "-emit-library", "-emit-module", "-module-name", "NativeVideoPlayer",
- "-target", "x86_64-apple-macosx14.0",
- "-o", "mediaplayer/src/jvmMain/resources/darwin-x86-64/libNativeVideoPlayer.dylib",
- "mediaplayer/src/jvmMain/kotlin/io/github/kdroidfilter/composemediaplayer/mac/native/NativeVideoPlayer.swift",
- "-O", "-whole-module-optimization"
- )
+val buildNativeWindows by tasks.registering(Exec::class) {
+ description = "Compiles the C++ native library into Windows DLLs (x64 + ARM64)"
+ group = "build"
+ val hasPrebuilt = nativeResourceDir
+ .dir("win32-x86-64")
+ .file("NativeVideoPlayer.dll")
+ .asFile
+ .exists()
+ enabled = Os.isFamily(Os.FAMILY_WINDOWS) && !hasPrebuilt
+
+ val nativeDir = layout.projectDirectory.dir("src/jvmMain/native/windows")
+ inputs.dir(nativeDir)
+ outputs.dir(nativeResourceDir)
+ workingDir(nativeDir)
+ commandLine("cmd", "/c", nativeDir.file("build.bat").asFile.absolutePath)
}
-val buildWin: TaskProvider = tasks.register("buildNativeWin") {
- onlyIf { System.getProperty("os.name").startsWith("Windows") }
- workingDir(rootDir.resolve("winlib"))
- commandLine("cmd", "/c", "build.bat")
+tasks.named("jvmProcessResources") {
+ dependsOn(buildNativeMacOs, buildNativeWindows)
}
-// tâche d’agrégation
-tasks.register("buildNativeLibraries") {
- dependsOn(buildMacArm, buildMacX64, buildWin)
+tasks.configureEach {
+ if (name == "sourcesJar") {
+ dependsOn(buildNativeMacOs, buildNativeWindows)
+ }
}
@@ -230,7 +243,6 @@ mavenPublishing {
}
}
- publishToMavenCentral(SonatypeHost.CENTRAL_PORTAL)
-
+ publishToMavenCentral()
signAllPublications()
}
diff --git a/mediaplayer/src/jvmMain/kotlin/io/github/kdroidfilter/composemediaplayer/mac/native/NativeVideoPlayer.swift b/mediaplayer/src/jvmMain/native/macos/NativeVideoPlayer.swift
similarity index 100%
rename from mediaplayer/src/jvmMain/kotlin/io/github/kdroidfilter/composemediaplayer/mac/native/NativeVideoPlayer.swift
rename to mediaplayer/src/jvmMain/native/macos/NativeVideoPlayer.swift
diff --git a/mediaplayer/src/jvmMain/native/macos/build.sh b/mediaplayer/src/jvmMain/native/macos/build.sh
new file mode 100644
index 00000000..9d5f124f
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/macos/build.sh
@@ -0,0 +1,37 @@
+#!/bin/bash
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)"
+RESOURCES_DIR="$SCRIPT_DIR/../../resources"
+
+SWIFT_SOURCE="$SCRIPT_DIR/NativeVideoPlayer.swift"
+
+# Output directories (JNA resource path convention)
+ARM64_DIR="$RESOURCES_DIR/darwin-aarch64"
+X64_DIR="$RESOURCES_DIR/darwin-x86-64"
+
+mkdir -p "$ARM64_DIR" "$X64_DIR"
+
+echo "=== Building NativeVideoPlayer for macOS arm64 ==="
+swiftc -emit-library -emit-module -module-name NativeVideoPlayer \
+ -target arm64-apple-macosx14.0 \
+ -o "$ARM64_DIR/libNativeVideoPlayer.dylib" \
+ "$SWIFT_SOURCE" \
+ -O -whole-module-optimization
+
+echo "=== Building NativeVideoPlayer for macOS x86_64 ==="
+swiftc -emit-library -emit-module -module-name NativeVideoPlayer \
+ -target x86_64-apple-macosx14.0 \
+ -o "$X64_DIR/libNativeVideoPlayer.dylib" \
+ "$SWIFT_SOURCE" \
+ -O -whole-module-optimization
+
+# Clean up swift build artifacts
+rm -f "$ARM64_DIR"/NativeVideoPlayer.abi.json "$ARM64_DIR"/NativeVideoPlayer.swiftdoc \
+ "$ARM64_DIR"/NativeVideoPlayer.swiftmodule "$ARM64_DIR"/NativeVideoPlayer.swiftsourceinfo
+rm -f "$X64_DIR"/NativeVideoPlayer.abi.json "$X64_DIR"/NativeVideoPlayer.swiftdoc \
+ "$X64_DIR"/NativeVideoPlayer.swiftmodule "$X64_DIR"/NativeVideoPlayer.swiftsourceinfo
+
+echo "=== Build completed ==="
+echo "arm64: $ARM64_DIR/libNativeVideoPlayer.dylib"
+echo "x86_64: $X64_DIR/libNativeVideoPlayer.dylib"
diff --git a/mediaplayer/src/jvmMain/native/windows/AudioManager.cpp b/mediaplayer/src/jvmMain/native/windows/AudioManager.cpp
new file mode 100644
index 00000000..f83af94d
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/AudioManager.cpp
@@ -0,0 +1,340 @@
+// AudioManager_improved.cpp – full rewrite with tighter A/V synchronisation
+// -----------------------------------------------------------------------------
+// * Keeps the original public API so that existing call‑sites still compile.
+// * Uses an event‑driven render loop instead of busy‑wait polling where possible.
+// * Measures drift between the WASAPI render clock and the Media Foundation
+// presentation clock and corrects it gradually to avoid audible glitches.
+// * All sleeps are clamped to a minimum of 1 ms to keep the thread responsive.
+// * Volume scaling is done in place only when necessary and supports both
+// 16‑bit and 32‑bit (float) PCM formats.
+// -----------------------------------------------------------------------------
+
+#include "AudioManager.h"
+#include "VideoPlayerInstance.h"
+#include "Utils.h"
+#include "MediaFoundationManager.h"
+#include
+#include
+#include
+
+using namespace VideoPlayerUtils;
+
+namespace AudioManager {
+
+// ‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑ Helper constants ‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑‑
+constexpr REFERENCE_TIME kTargetBufferDuration100ns = 2'000'000; // 200 ms
+constexpr REFERENCE_TIME kMinSleepUs = 1'000; // 1 ms
+constexpr double kDriftPositiveThresholdMs = 15.0; // audio ahead → wait
+constexpr double kDriftNegativeThresholdMs = -50.0; // audio behind → drop
+
+// ------------------------------------------------------------------------------------
+// InitWASAPI – initialises the shared WASAPI client for the default render endpoint
+// ------------------------------------------------------------------------------------
+HRESULT InitWASAPI(VideoPlayerInstance* inst, const WAVEFORMATEX* srcFmt)
+{
+ if (!inst) return E_INVALIDARG;
+
+ // Re‑use previously initialised client if still valid
+ if (inst->pAudioClient && inst->pRenderClient) {
+ inst->bAudioInitialized = TRUE;
+ return S_OK;
+ }
+
+ HRESULT hr = S_OK;
+ WAVEFORMATEX* deviceMixFmt = nullptr;
+
+ // 1. Get the default render device
+ IMMDeviceEnumerator* enumerator = MediaFoundation::GetDeviceEnumerator();
+ if (!enumerator) return E_FAIL;
+
+ hr = enumerator->GetDefaultAudioEndpoint(eRender, eConsole, &inst->pDevice);
+ if (FAILED(hr)) return hr;
+
+ // 2. Activate IAudioClient + IAudioEndpointVolume
+ hr = inst->pDevice->Activate(__uuidof(IAudioClient), CLSCTX_ALL, nullptr,
+ reinterpret_cast(&inst->pAudioClient));
+ if (FAILED(hr)) return hr;
+
+ hr = inst->pDevice->Activate(__uuidof(IAudioEndpointVolume), CLSCTX_ALL, nullptr,
+ reinterpret_cast(&inst->pAudioEndpointVolume));
+ if (FAILED(hr)) return hr;
+
+ // 3. Determine the format that will be rendered
+ if (!srcFmt) {
+ hr = inst->pAudioClient->GetMixFormat(&deviceMixFmt);
+ if (FAILED(hr)) return hr;
+ srcFmt = deviceMixFmt; // use mix format as fall‑back
+ }
+ inst->pSourceAudioFormat = reinterpret_cast(CoTaskMemAlloc(srcFmt->cbSize + sizeof(WAVEFORMATEX)));
+ memcpy(inst->pSourceAudioFormat, srcFmt, srcFmt->cbSize + sizeof(WAVEFORMATEX));
+
+ // 4. Create (or re‑use) the render‑ready event
+ if (!inst->hAudioSamplesReadyEvent) {
+ inst->hAudioSamplesReadyEvent = CreateEvent(nullptr, FALSE, FALSE, nullptr);
+ if (!inst->hAudioSamplesReadyEvent) {
+ hr = HRESULT_FROM_WIN32(GetLastError());
+ goto cleanup;
+ }
+ }
+
+ // 5. Initialise the audio client in shared, event‑callback mode
+ hr = inst->pAudioClient->Initialize(AUDCLNT_SHAREMODE_SHARED,
+ AUDCLNT_STREAMFLAGS_EVENTCALLBACK,
+ kTargetBufferDuration100ns, // buffer dur
+ 0, // periodicity → let system decide
+ srcFmt,
+ nullptr);
+ if (FAILED(hr)) goto cleanup;
+
+ hr = inst->pAudioClient->SetEventHandle(inst->hAudioSamplesReadyEvent);
+ if (FAILED(hr)) goto cleanup;
+
+ // 6. Grab the render‑client service interface
+ hr = inst->pAudioClient->GetService(__uuidof(IAudioRenderClient),
+ reinterpret_cast(&inst->pRenderClient));
+ if (FAILED(hr)) goto cleanup;
+
+ inst->bAudioInitialized = TRUE;
+
+cleanup:
+ if (deviceMixFmt) CoTaskMemFree(deviceMixFmt);
+ return hr;
+}
+
+// ----------------------------------------------------------------------------
+// AudioThreadProc – feeds decoded audio samples into the WASAPI render client
+// ----------------------------------------------------------------------------
+DWORD WINAPI AudioThreadProc(LPVOID lpParam)
+{
+ auto* inst = static_cast(lpParam);
+ if (!inst || !inst->pAudioClient || !inst->pRenderClient || !inst->pSourceReaderAudio)
+ return 0;
+
+ // Pre‑warm the audio engine so that GetBufferSize() is valid
+ UINT32 engineBufferFrames = 0;
+ if (FAILED(inst->pAudioClient->GetBufferSize(&engineBufferFrames)))
+ return 0;
+
+ if (inst->hAudioReadyEvent)
+ WaitForSingleObject(inst->hAudioReadyEvent, INFINITE);
+
+ const UINT32 blockAlign = inst->pSourceAudioFormat ? inst->pSourceAudioFormat->nBlockAlign : 4;
+
+ // Main render loop – wait for "ready" event, then push as many frames as possible
+ while (inst->bAudioThreadRunning) {
+ DWORD signalled = WaitForSingleObject(inst->hAudioSamplesReadyEvent, 10);
+ if (signalled != WAIT_OBJECT_0) continue; // timeout ⇒ loop back
+
+ // Handle seek / pause concurrently with the decoder thread
+ {
+ EnterCriticalSection(&inst->csClockSync);
+ bool suspended = inst->bSeekInProgress || inst->llPauseStart != 0;
+ LeaveCriticalSection(&inst->csClockSync);
+ if (suspended) {
+ PreciseSleepHighRes(5);
+ continue;
+ }
+ }
+
+ // How many frames are currently available for writing?
+ UINT32 framesPadding = 0;
+ if (FAILED(inst->pAudioClient->GetCurrentPadding(&framesPadding)))
+ break;
+ UINT32 framesFree = engineBufferFrames - framesPadding;
+ if (framesFree == 0) continue; // buffer full – wait for next event
+
+ // Read one decoded sample from MF (non‑blocking)
+ IMFSample* sample = nullptr;
+ DWORD flags = 0;
+ LONGLONG ts100n = 0;
+ HRESULT hr = inst->pSourceReaderAudio->ReadSample(MF_SOURCE_READER_FIRST_AUDIO_STREAM,
+ 0, nullptr, &flags, &ts100n, &sample);
+ if (FAILED(hr)) break;
+ if (!sample) continue; // decoder starved – wait for more data
+ if (flags & MF_SOURCE_READERF_ENDOFSTREAM) {
+ sample->Release();
+ break;
+ }
+
+ // Measure drift between sample PTS and wall clock (real elapsed time)
+ // This ensures audio and video are synchronized to the same time reference
+ double driftMs = 0.0;
+ if (inst->bUseClockSync && inst->llPlaybackStartTime != 0 && ts100n > 0) {
+ // Calculate elapsed time since playback started (in milliseconds)
+ LONGLONG currentTimeMs = GetCurrentTimeMs();
+ LONGLONG elapsedMs = currentTimeMs - inst->llPlaybackStartTime - inst->llTotalPauseTime;
+
+ // Apply playback speed to elapsed time
+ double adjustedElapsedMs = elapsedMs * inst->playbackSpeed;
+
+ // Convert sample timestamp from 100ns units to milliseconds
+ double sampleTimeMs = ts100n / 10000.0;
+
+ // Calculate drift: positive means audio is ahead, negative means audio is late
+ driftMs = sampleTimeMs - adjustedElapsedMs;
+ }
+
+ if (driftMs > kDriftPositiveThresholdMs) {
+ // Audio ahead → delay feed to renderer
+ PreciseSleepHighRes(std::min(driftMs, 100.0));
+ } else if (driftMs < kDriftNegativeThresholdMs) {
+ // Audio too late → drop sample completely (skip)
+ sample->Release();
+ continue;
+ }
+
+ // Copy contiguous audio buffer into render buffer – may span multiple GetBuffer() calls
+ IMFMediaBuffer* mediaBuf = nullptr;
+ if (FAILED(sample->ConvertToContiguousBuffer(&mediaBuf)) || !mediaBuf) {
+ sample->Release();
+ continue;
+ }
+
+ BYTE* srcData = nullptr;
+ DWORD srcSize = 0, srcMax = 0;
+ if (FAILED(mediaBuf->Lock(&srcData, &srcMax, &srcSize))) {
+ mediaBuf->Release();
+ sample->Release();
+ continue;
+ }
+
+ UINT32 totalFrames = srcSize / blockAlign;
+ UINT32 offsetFrames = 0;
+
+ while (offsetFrames < totalFrames) {
+ UINT32 framesWanted = std::min(totalFrames - offsetFrames, framesFree);
+ if (framesWanted == 0) {
+ // Renderer is full → wait for next event
+ WaitForSingleObject(inst->hAudioSamplesReadyEvent, 5);
+ if (FAILED(inst->pAudioClient->GetCurrentPadding(&framesPadding))) break;
+ framesFree = engineBufferFrames - framesPadding;
+ continue;
+ }
+
+ BYTE* dstData = nullptr;
+ if (FAILED(inst->pRenderClient->GetBuffer(framesWanted, &dstData)) || !dstData) break;
+
+ const BYTE* chunkStart = srcData + (offsetFrames * blockAlign);
+ memcpy(dstData, chunkStart, framesWanted * blockAlign);
+
+ // Apply per‑instance volume in‑place (16‑bit PCM or IEEE‑float)
+ if (inst->instanceVolume < 0.999f) {
+ if (inst->pSourceAudioFormat->wFormatTag == WAVE_FORMAT_PCM &&
+ inst->pSourceAudioFormat->wBitsPerSample == 16) {
+ auto* s = reinterpret_cast(dstData);
+ size_t n = (framesWanted * blockAlign) / sizeof(int16_t);
+ for (size_t i = 0; i < n; ++i) s[i] = static_cast(s[i] * inst->instanceVolume);
+ } else if (inst->pSourceAudioFormat->wFormatTag == WAVE_FORMAT_IEEE_FLOAT &&
+ inst->pSourceAudioFormat->wBitsPerSample == 32) {
+ auto* s = reinterpret_cast(dstData);
+ size_t n = (framesWanted * blockAlign) / sizeof(float);
+ for (size_t i = 0; i < n; ++i) s[i] *= inst->instanceVolume;
+ }
+ }
+
+ inst->pRenderClient->ReleaseBuffer(framesWanted, 0);
+ offsetFrames += framesWanted;
+
+ // Recompute free frames for potential second iteration in this loop
+ if (FAILED(inst->pAudioClient->GetCurrentPadding(&framesPadding))) break;
+ framesFree = engineBufferFrames - framesPadding;
+ }
+
+ mediaBuf->Unlock();
+ mediaBuf->Release();
+ sample->Release();
+ }
+
+ inst->pAudioClient->Stop();
+ return 0;
+}
+
+// -------------------------------------------------------------
+// Thread management helpers
+// -------------------------------------------------------------
+HRESULT StartAudioThread(VideoPlayerInstance* inst)
+{
+ if (!inst || !inst->bHasAudio || !inst->bAudioInitialized)
+ return E_INVALIDARG;
+
+ // Terminate any previous thread first
+ if (inst->hAudioThread) {
+ WaitForSingleObject(inst->hAudioThread, 5000);
+ CloseHandle(inst->hAudioThread);
+ inst->hAudioThread = nullptr;
+ }
+
+ inst->bAudioThreadRunning = TRUE;
+ inst->hAudioThread = CreateThread(nullptr, 0, AudioThreadProc, inst, 0, nullptr);
+ if (!inst->hAudioThread) {
+ inst->bAudioThreadRunning = FALSE;
+ return HRESULT_FROM_WIN32(GetLastError());
+ }
+
+ if (inst->hAudioReadyEvent) SetEvent(inst->hAudioReadyEvent);
+ return S_OK;
+}
+
+void StopAudioThread(VideoPlayerInstance* inst)
+{
+ if (!inst) return;
+
+ inst->bAudioThreadRunning = FALSE;
+ if (inst->hAudioThread) {
+ if (WaitForSingleObject(inst->hAudioThread, 1000) == WAIT_TIMEOUT)
+ TerminateThread(inst->hAudioThread, 0);
+ CloseHandle(inst->hAudioThread);
+ inst->hAudioThread = nullptr;
+ }
+
+ if (inst->pAudioClient) inst->pAudioClient->Stop();
+}
+
+// -----------------------------------------
+// Per‑instance volume helpers (0.0 – 1.0)
+// -----------------------------------------
+HRESULT SetVolume(VideoPlayerInstance* inst, float vol)
+{
+ if (!inst) return E_INVALIDARG;
+ inst->instanceVolume = std::clamp(vol, 0.0f, 1.0f);
+ return S_OK;
+}
+
+HRESULT GetVolume(const VideoPlayerInstance* inst, float* out)
+{
+ if (!inst || !out) return E_INVALIDARG;
+ *out = inst->instanceVolume;
+ return S_OK;
+}
+
+// -------------------------------------------
+// Peak‑meter (endpoint) level in percentage
+// -------------------------------------------
+HRESULT GetAudioLevels(const VideoPlayerInstance* inst, float* left, float* right)
+{
+ if (!inst || !left || !right) return E_INVALIDARG;
+ if (!inst->pDevice) return E_FAIL;
+
+ IAudioMeterInformation* meter = nullptr;
+ HRESULT hr = inst->pDevice->Activate(__uuidof(IAudioMeterInformation), CLSCTX_ALL, nullptr,
+ reinterpret_cast(&meter));
+ if (FAILED(hr)) return hr;
+
+ std::array peaks = {0.f, 0.f};
+ hr = meter->GetChannelsPeakValues(2, peaks.data());
+ meter->Release();
+ if (FAILED(hr)) return hr;
+
+ auto toPercent = [](float level) {
+ if (level <= 0.f) return 0.f;
+ float db = 20.f * log10(level);
+ float pct = std::clamp((db + 60.f) / 60.f, 0.f, 1.f);
+ return pct * 100.f;
+ };
+
+ *left = toPercent(peaks[0]);
+ *right = toPercent(peaks[1]);
+ return S_OK;
+}
+
+} // namespace AudioManager
diff --git a/mediaplayer/src/jvmMain/native/windows/AudioManager.h b/mediaplayer/src/jvmMain/native/windows/AudioManager.h
new file mode 100644
index 00000000..0b17eee1
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/AudioManager.h
@@ -0,0 +1,72 @@
+#pragma once
+
+#include
+#include
+#include
+#include
+#include
+
+// Error code definitions
+#define OP_E_NOT_INITIALIZED ((HRESULT)0x80000001L)
+#define OP_E_ALREADY_INITIALIZED ((HRESULT)0x80000002L)
+#define OP_E_INVALID_PARAMETER ((HRESULT)0x80000003L)
+
+// Forward declarations
+struct VideoPlayerInstance;
+
+namespace AudioManager {
+
+/**
+ * @brief Initializes WASAPI for audio playback.
+ * @param pInstance Pointer to the video player instance.
+ * @param pSourceFormat Optional source audio format.
+ * @return S_OK on success, or an error code.
+ */
+HRESULT InitWASAPI(VideoPlayerInstance* pInstance, const WAVEFORMATEX* pSourceFormat = nullptr);
+
+/**
+ * @brief Audio processing thread procedure.
+ * @param lpParam Pointer to the video player instance.
+ * @return Thread exit code.
+ */
+DWORD WINAPI AudioThreadProc(LPVOID lpParam);
+
+/**
+ * @brief Starts the audio thread for a video player instance.
+ * @param pInstance Pointer to the video player instance.
+ * @return S_OK on success, or an error code.
+ */
+HRESULT StartAudioThread(VideoPlayerInstance* pInstance);
+
+/**
+ * @brief Stops the audio thread for a video player instance.
+ * @param pInstance Pointer to the video player instance.
+ */
+void StopAudioThread(VideoPlayerInstance* pInstance);
+
+/**
+ * @brief Sets the audio volume for a video player instance.
+ * @param pInstance Pointer to the video player instance.
+ * @param volume Volume level (0.0 to 1.0).
+ * @return S_OK on success, or an error code.
+ */
+HRESULT SetVolume(VideoPlayerInstance* pInstance, float volume);
+
+/**
+ * @brief Gets the audio volume for a video player instance.
+ * @param pInstance Pointer to the video player instance.
+ * @param volume Pointer to receive the volume level.
+ * @return S_OK on success, or an error code.
+ */
+HRESULT GetVolume(const VideoPlayerInstance* pInstance, float* volume);
+
+/**
+ * @brief Gets the audio levels for a video player instance.
+ * @param pInstance Pointer to the video player instance.
+ * @param pLeftLevel Pointer to receive the left channel level.
+ * @param pRightLevel Pointer to receive the right channel level.
+ * @return S_OK on success, or an error code.
+ */
+HRESULT GetAudioLevels(const VideoPlayerInstance* pInstance, float* pLeftLevel, float* pRightLevel);
+
+} // namespace AudioManager
diff --git a/mediaplayer/src/jvmMain/native/windows/CMakeLists.txt b/mediaplayer/src/jvmMain/native/windows/CMakeLists.txt
new file mode 100644
index 00000000..a036a3ac
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/CMakeLists.txt
@@ -0,0 +1,72 @@
+cmake_minimum_required(VERSION 3.15)
+project(NativeVideoPlayer LANGUAGES CXX)
+
+set(CMAKE_CXX_STANDARD 17)
+
+# Check target architecture
+if(CMAKE_GENERATOR_PLATFORM STREQUAL "x64" OR CMAKE_GENERATOR_PLATFORM STREQUAL "")
+ set(TARGET_ARCH "x64")
+ set(OUTPUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../resources/win32-x86-64")
+ add_compile_options("/arch:AVX2")
+elseif(CMAKE_GENERATOR_PLATFORM STREQUAL "ARM64")
+ set(TARGET_ARCH "ARM64")
+ set(OUTPUT_DIR "${CMAKE_CURRENT_SOURCE_DIR}/../../resources/win32-arm64")
+ add_compile_options("/arch:arm64")
+else()
+ message(FATAL_ERROR "Unsupported architecture: ${CMAKE_GENERATOR_PLATFORM}")
+endif()
+
+# Ensure output directory exists
+file(MAKE_DIRECTORY ${OUTPUT_DIR})
+
+# Define the target
+add_library(NativeVideoPlayer SHARED
+ NativeVideoPlayer.cpp
+ NativeVideoPlayer.h
+ VideoPlayerInstance.h
+ Utils.cpp
+ Utils.h
+ MediaFoundationManager.cpp
+ MediaFoundationManager.h
+ AudioManager.cpp
+ AudioManager.h
+)
+
+# Compilation definitions
+target_compile_definitions(NativeVideoPlayer PRIVATE
+ WIN32_LEAN_AND_MEAN
+ NOMINMAX
+ NATIVEVIDEOPLAYER_EXPORTS
+)
+
+# Linked libraries
+target_link_libraries(NativeVideoPlayer PRIVATE
+ mf
+ mfplat
+ mfreadwrite
+ mfuuid
+ wmcodecdspuuid
+ ole32
+ oleaut32
+ avrt
+ mfsensorgroup
+ dxva2
+ d3d11
+ dxgi
+ evr
+)
+
+# Configure output directory
+set_target_properties(NativeVideoPlayer PROPERTIES
+ OUTPUT_NAME "NativeVideoPlayer"
+ LIBRARY_OUTPUT_DIRECTORY "${OUTPUT_DIR}"
+ LIBRARY_OUTPUT_DIRECTORY_DEBUG "${OUTPUT_DIR}"
+ LIBRARY_OUTPUT_DIRECTORY_RELEASE "${OUTPUT_DIR}"
+ RUNTIME_OUTPUT_DIRECTORY "${OUTPUT_DIR}"
+ RUNTIME_OUTPUT_DIRECTORY_DEBUG "${OUTPUT_DIR}"
+ RUNTIME_OUTPUT_DIRECTORY_RELEASE "${OUTPUT_DIR}"
+)
+
+# Display target architecture and output directory
+message(STATUS "Target architecture: ${TARGET_ARCH}")
+message(STATUS "Output directory: ${OUTPUT_DIR}")
diff --git a/mediaplayer/src/jvmMain/native/windows/MediaFoundationManager.cpp b/mediaplayer/src/jvmMain/native/windows/MediaFoundationManager.cpp
new file mode 100644
index 00000000..74e1f933
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/MediaFoundationManager.cpp
@@ -0,0 +1,130 @@
+#include "MediaFoundationManager.h"
+#include
+#include
+#include
+
+namespace MediaFoundation {
+
+// Global resources shared across all instances
+static bool g_bMFInitialized = false;
+static ID3D11Device* g_pD3DDevice = nullptr;
+static IMFDXGIDeviceManager* g_pDXGIDeviceManager = nullptr;
+static UINT32 g_dwResetToken = 0;
+static IMMDeviceEnumerator* g_pEnumerator = nullptr;
+static int g_instanceCount = 0;
+
+HRESULT Initialize() {
+ if (g_bMFInitialized)
+ return OP_E_ALREADY_INITIALIZED;
+
+ HRESULT hr = CoInitializeEx(nullptr, COINIT_MULTITHREADED);
+ if (SUCCEEDED(hr))
+ hr = MFStartup(MF_VERSION);
+ if (FAILED(hr))
+ return hr;
+
+ hr = CreateDX11Device();
+ if (FAILED(hr)) {
+ MFShutdown();
+ return hr;
+ }
+
+ hr = MFCreateDXGIDeviceManager(&g_dwResetToken, &g_pDXGIDeviceManager);
+ if (SUCCEEDED(hr))
+ hr = g_pDXGIDeviceManager->ResetDevice(g_pD3DDevice, g_dwResetToken);
+ if (FAILED(hr)) {
+ if (g_pD3DDevice) {
+ g_pD3DDevice->Release();
+ g_pD3DDevice = nullptr;
+ }
+ MFShutdown();
+ return hr;
+ }
+
+ g_bMFInitialized = true;
+ return S_OK;
+}
+
+HRESULT Shutdown() {
+ if (g_instanceCount > 0)
+ return E_FAIL; // Instances still active
+
+ HRESULT hr = S_OK;
+
+ // Release DXGI and D3D resources
+ if (g_pDXGIDeviceManager) {
+ g_pDXGIDeviceManager->Release();
+ g_pDXGIDeviceManager = nullptr;
+ }
+
+ if (g_pD3DDevice) {
+ g_pD3DDevice->Release();
+ g_pD3DDevice = nullptr;
+ }
+
+ // Release audio enumerator
+ if (g_pEnumerator) {
+ g_pEnumerator->Release();
+ g_pEnumerator = nullptr;
+ }
+
+ // Shutdown Media Foundation last
+ if (g_bMFInitialized) {
+ hr = MFShutdown();
+ g_bMFInitialized = false;
+ }
+
+ // Uninitialize COM
+ CoUninitialize();
+ return hr;
+}
+
+HRESULT CreateDX11Device() {
+ HRESULT hr = D3D11CreateDevice(nullptr, D3D_DRIVER_TYPE_HARDWARE, nullptr,
+ D3D11_CREATE_DEVICE_VIDEO_SUPPORT, nullptr, 0,
+ D3D11_SDK_VERSION, &g_pD3DDevice, nullptr, nullptr);
+ if (FAILED(hr))
+ return hr;
+
+ ID3D10Multithread* pMultithread = nullptr;
+ if (SUCCEEDED(g_pD3DDevice->QueryInterface(__uuidof(ID3D10Multithread), reinterpret_cast(&pMultithread)))) {
+ pMultithread->SetMultithreadProtected(TRUE);
+ pMultithread->Release();
+ }
+
+ return hr;
+}
+
+ID3D11Device* GetD3DDevice() {
+ return g_pD3DDevice;
+}
+
+IMFDXGIDeviceManager* GetDXGIDeviceManager() {
+ return g_pDXGIDeviceManager;
+}
+
+IMMDeviceEnumerator* GetDeviceEnumerator() {
+ if (!g_pEnumerator) {
+ CoCreateInstance(__uuidof(MMDeviceEnumerator), nullptr, CLSCTX_ALL,
+ IID_PPV_ARGS(&g_pEnumerator));
+ }
+ return g_pEnumerator;
+}
+
+void IncrementInstanceCount() {
+ g_instanceCount++;
+}
+
+void DecrementInstanceCount() {
+ g_instanceCount--;
+}
+
+bool IsInitialized() {
+ return g_bMFInitialized;
+}
+
+int GetInstanceCount() {
+ return g_instanceCount;
+}
+
+} // namespace MediaFoundation
\ No newline at end of file
diff --git a/mediaplayer/src/jvmMain/native/windows/MediaFoundationManager.h b/mediaplayer/src/jvmMain/native/windows/MediaFoundationManager.h
new file mode 100644
index 00000000..495e04eb
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/MediaFoundationManager.h
@@ -0,0 +1,74 @@
+#pragma once
+
+#include
+#include
+#include
+#include
+#include
+
+// Error code definitions
+#define OP_E_NOT_INITIALIZED ((HRESULT)0x80000001L)
+#define OP_E_ALREADY_INITIALIZED ((HRESULT)0x80000002L)
+#define OP_E_INVALID_PARAMETER ((HRESULT)0x80000003L)
+
+namespace MediaFoundation {
+
+/**
+ * @brief Initializes Media Foundation, Direct3D11, and the DXGI device manager.
+ * @return S_OK on success, or an error code.
+ */
+HRESULT Initialize();
+
+/**
+ * @brief Shuts down Media Foundation and releases global resources.
+ * @return S_OK on success, or an error code.
+ */
+HRESULT Shutdown();
+
+/**
+ * @brief Creates a Direct3D11 device with video support.
+ * @return S_OK on success, or an error code.
+ */
+HRESULT CreateDX11Device();
+
+/**
+ * @brief Gets the D3D11 device.
+ * @return Pointer to the D3D11 device.
+ */
+ID3D11Device* GetD3DDevice();
+
+/**
+ * @brief Gets the DXGI device manager.
+ * @return Pointer to the DXGI device manager.
+ */
+IMFDXGIDeviceManager* GetDXGIDeviceManager();
+
+/**
+ * @brief Gets the device enumerator for audio devices.
+ * @return Pointer to the device enumerator.
+ */
+IMMDeviceEnumerator* GetDeviceEnumerator();
+
+/**
+ * @brief Increments the instance count.
+ */
+void IncrementInstanceCount();
+
+/**
+ * @brief Decrements the instance count.
+ */
+void DecrementInstanceCount();
+
+/**
+ * @brief Checks if Media Foundation is initialized.
+ * @return True if initialized, false otherwise.
+ */
+bool IsInitialized();
+
+/**
+ * @brief Gets the current instance count.
+ * @return The number of active instances.
+ */
+int GetInstanceCount();
+
+} // namespace MediaFoundation
diff --git a/mediaplayer/src/jvmMain/native/windows/NativeVideoPlayer.cpp b/mediaplayer/src/jvmMain/native/windows/NativeVideoPlayer.cpp
new file mode 100644
index 00000000..893e5cba
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/NativeVideoPlayer.cpp
@@ -0,0 +1,1349 @@
+// NativeVideoPlayer.cpp
+#include "NativeVideoPlayer.h"
+#include "VideoPlayerInstance.h"
+#include "Utils.h"
+#include "MediaFoundationManager.h"
+#include "AudioManager.h"
+#include
+#include
+#include
+#include
+
+// For IMF2DBuffer and IMF2DBuffer2 interfaces
+#include
+
+using namespace VideoPlayerUtils;
+using namespace MediaFoundation;
+using namespace AudioManager;
+
+// Error code definitions from header
+#define OP_E_NOT_INITIALIZED ((HRESULT)0x80000001L)
+#define OP_E_ALREADY_INITIALIZED ((HRESULT)0x80000002L)
+#define OP_E_INVALID_PARAMETER ((HRESULT)0x80000003L)
+
+// Debug print macro
+#ifdef _DEBUG
+#define PrintHR(msg, hr) fprintf(stderr, "%s (hr=0x%08x)\n", msg, static_cast(hr))
+#else
+#define PrintHR(msg, hr) ((void)0)
+#endif
+
+// API Implementation
+NATIVEVIDEOPLAYER_API HRESULT InitMediaFoundation() {
+ return Initialize();
+}
+
+NATIVEVIDEOPLAYER_API HRESULT CreateVideoPlayerInstance(VideoPlayerInstance** ppInstance) {
+ // Parameter validation
+ if (!ppInstance)
+ return E_INVALIDARG;
+
+ // Ensure Media Foundation is initialized
+ if (!IsInitialized()) {
+ HRESULT hr = Initialize();
+ if (FAILED(hr))
+ return hr;
+ }
+
+ // Allocate and initialize a new instance
+ auto* pInstance = new (std::nothrow) VideoPlayerInstance();
+ if (!pInstance)
+ return E_OUTOFMEMORY;
+
+ // Initialize critical section for synchronization
+ InitializeCriticalSection(&pInstance->csClockSync);
+
+ pInstance->bUseClockSync = TRUE;
+
+ // Create audio synchronization event
+ pInstance->hAudioReadyEvent = CreateEvent(nullptr, FALSE, FALSE, nullptr);
+ if (!pInstance->hAudioReadyEvent) {
+ DeleteCriticalSection(&pInstance->csClockSync);
+ delete pInstance;
+ return HRESULT_FROM_WIN32(GetLastError());
+ }
+
+ // Increment instance count and return the instance
+ IncrementInstanceCount();
+ *ppInstance = pInstance;
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API void DestroyVideoPlayerInstance(VideoPlayerInstance* pInstance) {
+ if (pInstance) {
+ // Ensure all media resources are released
+ CloseMedia(pInstance);
+
+ // Double-check that cached sample is released
+ // This is already done in CloseMedia, but we do it again as a safety measure
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+
+ // Delete critical section
+ DeleteCriticalSection(&pInstance->csClockSync);
+
+ // Delete instance and decrement counter
+ delete pInstance;
+ DecrementInstanceCount();
+ }
+}
+
+NATIVEVIDEOPLAYER_API HRESULT OpenMedia(VideoPlayerInstance* pInstance, const wchar_t* url, BOOL startPlayback) {
+ // Parameter validation
+ if (!pInstance || !url)
+ return OP_E_INVALID_PARAMETER;
+ if (!IsInitialized())
+ return OP_E_NOT_INITIALIZED;
+
+ // Close previous media and reset state
+ CloseMedia(pInstance);
+ pInstance->bEOF = FALSE;
+ pInstance->videoWidth = pInstance->videoHeight = 0;
+ pInstance->bHasAudio = FALSE;
+
+ // Initialize frame caching for paused state
+ pInstance->bHasInitialFrame = FALSE;
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+
+ HRESULT hr = S_OK;
+
+ // Helper function to safely release COM objects
+ auto safeRelease = [](IUnknown* obj) { if (obj) obj->Release(); };
+
+ // 1. Configure and open media source with both audio and video streams
+ // ------------------------------------------------------------------
+ IMFAttributes* pAttributes = nullptr;
+ hr = MFCreateAttributes(&pAttributes, 5);
+ if (FAILED(hr))
+ return hr;
+
+ // Configure attributes for hardware acceleration
+ pAttributes->SetUINT32(MF_READWRITE_ENABLE_HARDWARE_TRANSFORMS, TRUE);
+ pAttributes->SetUINT32(MF_SOURCE_READER_DISABLE_DXVA, FALSE);
+ pAttributes->SetUnknown(MF_SOURCE_READER_D3D_MANAGER, GetDXGIDeviceManager());
+
+ // Enable advanced video processing for better synchronization
+ pAttributes->SetUINT32(MF_SOURCE_READER_ENABLE_ADVANCED_VIDEO_PROCESSING, TRUE);
+
+ // Create source reader for both audio and video
+ hr = MFCreateSourceReaderFromURL(url, pAttributes, &pInstance->pSourceReader);
+ safeRelease(pAttributes);
+ if (FAILED(hr))
+ return hr;
+
+ // 2. Configure video stream
+ // ------------------------------------------
+ // Enable video stream
+ hr = pInstance->pSourceReader->SetStreamSelection(MF_SOURCE_READER_ALL_STREAMS, FALSE);
+ if (SUCCEEDED(hr))
+ hr = pInstance->pSourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_VIDEO_STREAM, TRUE);
+ if (FAILED(hr))
+ return hr;
+
+ // Configure video format (RGB32)
+ IMFMediaType* pType = nullptr;
+ hr = MFCreateMediaType(&pType);
+ if (SUCCEEDED(hr)) {
+ hr = pType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video);
+ if (SUCCEEDED(hr))
+ hr = pType->SetGUID(MF_MT_SUBTYPE, MFVideoFormat_RGB32);
+ if (SUCCEEDED(hr))
+ hr = pInstance->pSourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, nullptr, pType);
+ safeRelease(pType);
+ }
+ if (FAILED(hr))
+ return hr;
+
+ // Get video dimensions
+ IMFMediaType* pCurrent = nullptr;
+ hr = pInstance->pSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pCurrent);
+ if (SUCCEEDED(hr)) {
+ hr = MFGetAttributeSize(pCurrent, MF_MT_FRAME_SIZE, &pInstance->videoWidth, &pInstance->videoHeight);
+ safeRelease(pCurrent);
+ }
+
+ // 3. Configure audio stream (if available)
+ // ------------------------------------------
+ // Try to enable audio stream
+ hr = pInstance->pSourceReader->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, TRUE);
+ if (SUCCEEDED(hr)) {
+ // Configure audio format (PCM 16-bit stereo 48kHz)
+ IMFMediaType* pWantedType = nullptr;
+ hr = MFCreateMediaType(&pWantedType);
+ if (SUCCEEDED(hr)) {
+ pWantedType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ pWantedType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
+ pWantedType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, 2);
+ pWantedType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, 48000);
+ pWantedType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, 4);
+ pWantedType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, 192000);
+ pWantedType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, 16);
+ hr = pInstance->pSourceReader->SetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM, nullptr, pWantedType);
+ safeRelease(pWantedType);
+ }
+
+ if (SUCCEEDED(hr)) {
+ // Get the actual audio format for WASAPI
+ IMFMediaType* pActualType = nullptr;
+ hr = pInstance->pSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM, &pActualType);
+ if (SUCCEEDED(hr) && pActualType) {
+ WAVEFORMATEX* pWfx = nullptr;
+ UINT32 size = 0;
+ hr = MFCreateWaveFormatExFromMFMediaType(pActualType, &pWfx, &size);
+ if (SUCCEEDED(hr) && pWfx) {
+ hr = InitWASAPI(pInstance, pWfx);
+ if (FAILED(hr)) {
+ PrintHR("InitWASAPI failed", hr);
+ if (pWfx) CoTaskMemFree(pWfx);
+ safeRelease(pActualType);
+ } else {
+ if (pInstance->pSourceAudioFormat)
+ CoTaskMemFree(pInstance->pSourceAudioFormat);
+ pInstance->pSourceAudioFormat = pWfx;
+ pInstance->bHasAudio = TRUE;
+ }
+ }
+ safeRelease(pActualType);
+ }
+ }
+
+ // Create a separate audio source reader for the audio thread
+ // This is needed even with automatic synchronization
+ hr = MFCreateSourceReaderFromURL(url, nullptr, &pInstance->pSourceReaderAudio);
+ if (SUCCEEDED(hr)) {
+ // Select only audio stream
+ hr = pInstance->pSourceReaderAudio->SetStreamSelection(MF_SOURCE_READER_ALL_STREAMS, FALSE);
+ if (SUCCEEDED(hr))
+ hr = pInstance->pSourceReaderAudio->SetStreamSelection(MF_SOURCE_READER_FIRST_AUDIO_STREAM, TRUE);
+
+ if (SUCCEEDED(hr)) {
+ // Configure audio format (same as main reader)
+ IMFMediaType* pWantedAudioType = nullptr;
+ hr = MFCreateMediaType(&pWantedAudioType);
+ if (SUCCEEDED(hr)) {
+ pWantedAudioType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio);
+ pWantedAudioType->SetGUID(MF_MT_SUBTYPE, MFAudioFormat_PCM);
+ pWantedAudioType->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, 2);
+ pWantedAudioType->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, 48000);
+ pWantedAudioType->SetUINT32(MF_MT_AUDIO_BLOCK_ALIGNMENT, 4);
+ pWantedAudioType->SetUINT32(MF_MT_AUDIO_AVG_BYTES_PER_SECOND, 192000);
+ pWantedAudioType->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, 16);
+ hr = pInstance->pSourceReaderAudio->SetCurrentMediaType(MF_SOURCE_READER_FIRST_AUDIO_STREAM, nullptr, pWantedAudioType);
+ safeRelease(pWantedAudioType);
+ }
+ }
+
+ if (FAILED(hr)) {
+ PrintHR("Failed to configure audio source reader", hr);
+ safeRelease(pInstance->pSourceReaderAudio);
+ pInstance->pSourceReaderAudio = nullptr;
+ }
+ } else {
+ PrintHR("Failed to create audio source reader", hr);
+ }
+ }
+
+ if (pInstance->bUseClockSync) {
+ // 4. Set up presentation clock for synchronization
+ // ----------------------------------------------------------
+ // Get the media source from the source reader
+ hr = pInstance->pSourceReader->GetServiceForStream(
+ MF_SOURCE_READER_MEDIASOURCE,
+ GUID_NULL,
+ IID_PPV_ARGS(&pInstance->pMediaSource));
+
+ if (SUCCEEDED(hr)) {
+ // Create the presentation clock
+ hr = MFCreatePresentationClock(&pInstance->pPresentationClock);
+ if (SUCCEEDED(hr)) {
+ // Create a system time source
+ IMFPresentationTimeSource* pTimeSource = nullptr;
+ hr = MFCreateSystemTimeSource(&pTimeSource);
+ if (SUCCEEDED(hr)) {
+ // Set the time source on the presentation clock
+ hr = pInstance->pPresentationClock->SetTimeSource(pTimeSource);
+ if (SUCCEEDED(hr)) {
+ // Set the rate control on the presentation clock
+ IMFRateControl* pRateControl = nullptr;
+ hr = pInstance->pPresentationClock->QueryInterface(IID_PPV_ARGS(&pRateControl));
+ if (SUCCEEDED(hr)) {
+ // Explicitly set rate to 1.0 to ensure correct initial playback speed
+ hr = pRateControl->SetRate(FALSE, 1.0f);
+ if (FAILED(hr)) {
+ PrintHR("Failed to set initial presentation clock rate", hr);
+ }
+ pRateControl->Release();
+ }
+
+ // Get the media sink from the media source
+ IMFMediaSink* pMediaSink = nullptr;
+ hr = pInstance->pMediaSource->QueryInterface(IID_PPV_ARGS(&pMediaSink));
+ if (SUCCEEDED(hr)) {
+ // Set the presentation clock on the media sink
+ IMFClockStateSink* pClockStateSink = nullptr;
+ hr = pMediaSink->QueryInterface(IID_PPV_ARGS(&pClockStateSink));
+ if (SUCCEEDED(hr)) {
+ // Start the presentation clock only if startPlayback is TRUE
+ // This allows the player to be initialized in a paused state
+ // when InitialPlayerState.PAUSE is specified in the Kotlin code
+ if (startPlayback) {
+ hr = pInstance->pPresentationClock->Start(0);
+ if (FAILED(hr)) {
+ PrintHR("Failed to start presentation clock", hr);
+ }
+ } else {
+ // If not starting playback, initialize the clock but don't start it
+ // This keeps the player in a paused state until explicitly started
+ hr = pInstance->pPresentationClock->Pause();
+ if (FAILED(hr)) {
+ PrintHR("Failed to pause presentation clock", hr);
+ // Continue even if pause fails - this is not a critical error
+ // The player will still be usable, just not in the ideal initial state
+ }
+ }
+ pClockStateSink->Release();
+ }
+ pMediaSink->Release();
+ } else {
+ PrintHR("Failed to get media sink from media source", hr);
+ }
+ }
+ safeRelease(pTimeSource);
+ }
+ }
+ }
+ }
+
+ // 5. Initialize playback timing and start audio thread
+ // ----------------------------------------------------
+ if (startPlayback) {
+ // IMPORTANT: Initialize llPlaybackStartTime when starting playback
+ // This is crucial for A/V synchronization - without this, the sync code won't work
+ pInstance->llPlaybackStartTime = GetCurrentTimeMs();
+ pInstance->llTotalPauseTime = 0;
+ pInstance->llPauseStart = 0;
+
+ // Start audio thread if audio is available
+ if (pInstance->bHasAudio && pInstance->bAudioInitialized && pInstance->pSourceReaderAudio) {
+ hr = StartAudioThread(pInstance);
+ if (FAILED(hr)) {
+ PrintHR("StartAudioThread failed", hr);
+ }
+ }
+ }
+
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT ReadVideoFrame(VideoPlayerInstance* pInstance, BYTE** pData, DWORD* pDataSize) {
+ if (!pInstance || !pInstance->pSourceReader || !pData || !pDataSize)
+ return OP_E_NOT_INITIALIZED;
+
+ if (pInstance->pLockedBuffer)
+ UnlockVideoFrame(pInstance);
+
+ if (pInstance->bEOF) {
+ *pData = nullptr;
+ *pDataSize = 0;
+ return S_FALSE;
+ }
+
+ // Check if player is paused
+ BOOL isPaused = (pInstance->llPauseStart != 0);
+ IMFSample* pSample = nullptr;
+ HRESULT hr = S_OK;
+ DWORD streamIndex = 0, dwFlags = 0;
+ LONGLONG llTimestamp = 0;
+
+ if (isPaused) {
+ // Player is paused - check if we need to read an initial frame
+ if (!pInstance->bHasInitialFrame) {
+ // Read one frame when paused and cache it
+ hr = pInstance->pSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, &streamIndex, &dwFlags, &llTimestamp, &pSample);
+ if (FAILED(hr))
+ return hr;
+
+ if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
+ pInstance->bEOF = TRUE;
+ if (pSample) pSample->Release();
+ *pData = nullptr;
+ *pDataSize = 0;
+ return S_FALSE;
+ }
+
+ if (!pSample) {
+ *pData = nullptr;
+ *pDataSize = 0;
+ return S_OK;
+ }
+
+ // Store the frame for future use
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+ pSample->AddRef(); // Add reference for the cached sample
+ pInstance->pCachedSample = pSample;
+ pInstance->bHasInitialFrame = TRUE;
+
+ // Don't update position when paused - keep the current position
+ } else {
+ // Already have an initial frame, use the cached sample
+ if (pInstance->pCachedSample) {
+ pSample = pInstance->pCachedSample;
+ pSample->AddRef(); // Add reference for this function's use
+ // Don't update position when paused
+ } else {
+ // No cached sample available (shouldn't happen)
+ *pData = nullptr;
+ *pDataSize = 0;
+ return S_OK;
+ }
+ }
+ } else {
+ // Player is playing - read a new frame
+ hr = pInstance->pSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, &streamIndex, &dwFlags, &llTimestamp, &pSample);
+ if (FAILED(hr))
+ return hr;
+
+ if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
+ pInstance->bEOF = TRUE;
+ if (pSample) pSample->Release();
+ *pData = nullptr;
+ *pDataSize = 0;
+ return S_FALSE;
+ }
+
+ if (!pSample) {
+ *pData = nullptr;
+ *pDataSize = 0;
+ return S_OK;
+ }
+
+ // Update cached sample for future paused state
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+ pSample->AddRef(); // Add reference for the cached sample
+ pInstance->pCachedSample = pSample;
+
+ // Store current position when playing
+ pInstance->llCurrentPosition = llTimestamp;
+ }
+
+ // Synchronization using wall clock time (real elapsed time since playback started)
+ // This is more reliable than the presentation clock which is not tied to the source reader
+ if (pInstance->bUseClockSync && pInstance->llPlaybackStartTime != 0 && llTimestamp > 0) {
+ // Calculate elapsed time since playback started (in milliseconds)
+ LONGLONG currentTimeMs = GetCurrentTimeMs();
+ LONGLONG elapsedMs = currentTimeMs - pInstance->llPlaybackStartTime - pInstance->llTotalPauseTime;
+
+ // Apply playback speed to elapsed time
+ double adjustedElapsedMs = elapsedMs * pInstance->playbackSpeed;
+
+ // Convert frame timestamp from 100ns units to milliseconds
+ double frameTimeMs_ts = llTimestamp / 10000.0;
+
+ // Calculate frame rate for skip threshold
+ UINT frameRateNum = 60, frameRateDenom = 1;
+ GetVideoFrameRate(pInstance, &frameRateNum, &frameRateDenom);
+ double frameIntervalMs = 1000.0 * frameRateDenom / frameRateNum;
+
+ // Calculate difference: positive means frame is ahead, negative means frame is late
+ double diffMs = frameTimeMs_ts - adjustedElapsedMs;
+
+ // If frame is very late (more than 3 frames behind), skip it
+ if (diffMs < -frameIntervalMs * 3) {
+ pSample->Release();
+ *pData = nullptr;
+ *pDataSize = 0;
+ return S_OK;
+ }
+ // If frame is ahead of schedule, wait to maintain correct frame rate
+ else if (diffMs > 1.0) {
+ // Limit maximum wait time to avoid freezing if timestamps are far apart
+ double waitTime = std::min(diffMs, frameIntervalMs * 2);
+ PreciseSleepHighRes(waitTime);
+ }
+ }
+
+ IMFMediaBuffer* pBuffer = nullptr;
+ DWORD bufferCount = 0;
+ hr = pSample->GetBufferCount(&bufferCount);
+ if (SUCCEEDED(hr) && bufferCount == 1) {
+ hr = pSample->GetBufferByIndex(0, &pBuffer);
+ } else {
+ hr = pSample->ConvertToContiguousBuffer(&pBuffer);
+ }
+ if (FAILED(hr)) {
+ PrintHR("Failed to get contiguous buffer", hr);
+ pSample->Release();
+ return hr;
+ }
+
+ BYTE* pBytes = nullptr;
+ DWORD cbMax = 0, cbCurr = 0;
+ hr = pBuffer->Lock(&pBytes, &cbMax, &cbCurr);
+ if (FAILED(hr)) {
+ PrintHR("Buffer->Lock failed", hr);
+ pBuffer->Release();
+ pSample->Release();
+ return hr;
+ }
+
+ pInstance->pLockedBuffer = pBuffer;
+ pInstance->pLockedBytes = pBytes;
+ pInstance->lockedMaxSize = cbMax;
+ pInstance->lockedCurrSize = cbCurr;
+ *pData = pBytes;
+ *pDataSize = cbCurr;
+ pSample->Release();
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT UnlockVideoFrame(VideoPlayerInstance* pInstance) {
+ if (!pInstance)
+ return E_INVALIDARG;
+ if (pInstance->pLockedBuffer) {
+ pInstance->pLockedBuffer->Unlock();
+ pInstance->pLockedBuffer->Release();
+ pInstance->pLockedBuffer = nullptr;
+ }
+ pInstance->pLockedBytes = nullptr;
+ pInstance->lockedMaxSize = pInstance->lockedCurrSize = 0;
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT ReadVideoFrameInto(
+ VideoPlayerInstance* pInstance,
+ BYTE* pDst,
+ DWORD dstRowBytes,
+ DWORD dstCapacity,
+ LONGLONG* pTimestamp) {
+ if (!pInstance || !pDst || dstRowBytes == 0 || dstCapacity == 0) {
+ return OP_E_INVALID_PARAMETER;
+ }
+
+ if (!pInstance->pSourceReader)
+ return OP_E_NOT_INITIALIZED;
+
+ if (pInstance->pLockedBuffer)
+ UnlockVideoFrame(pInstance);
+
+ if (pInstance->bEOF) {
+ if (pTimestamp) *pTimestamp = pInstance->llCurrentPosition;
+ return S_FALSE;
+ }
+
+ // Check if player is paused
+ BOOL isPaused = (pInstance->llPauseStart != 0);
+ IMFSample* pSample = nullptr;
+ HRESULT hr = S_OK;
+ DWORD streamIndex = 0, dwFlags = 0;
+ LONGLONG llTimestamp = 0;
+
+ if (isPaused) {
+ if (!pInstance->bHasInitialFrame) {
+ hr = pInstance->pSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, &streamIndex, &dwFlags, &llTimestamp, &pSample);
+ if (FAILED(hr)) return hr;
+
+ if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
+ pInstance->bEOF = TRUE;
+ if (pSample) pSample->Release();
+ if (pTimestamp) *pTimestamp = pInstance->llCurrentPosition;
+ return S_FALSE;
+ }
+
+ if (!pSample) {
+ if (pTimestamp) *pTimestamp = pInstance->llCurrentPosition;
+ return S_OK;
+ }
+
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+ pSample->AddRef();
+ pInstance->pCachedSample = pSample;
+ pInstance->bHasInitialFrame = TRUE;
+ } else {
+ if (pInstance->pCachedSample) {
+ pSample = pInstance->pCachedSample;
+ pSample->AddRef();
+ } else {
+ if (pTimestamp) *pTimestamp = pInstance->llCurrentPosition;
+ return S_OK;
+ }
+ }
+ } else {
+ hr = pInstance->pSourceReader->ReadSample(MF_SOURCE_READER_FIRST_VIDEO_STREAM, 0, &streamIndex, &dwFlags, &llTimestamp, &pSample);
+ if (FAILED(hr)) return hr;
+
+ if (dwFlags & MF_SOURCE_READERF_ENDOFSTREAM) {
+ pInstance->bEOF = TRUE;
+ if (pSample) pSample->Release();
+ if (pTimestamp) *pTimestamp = pInstance->llCurrentPosition;
+ return S_FALSE;
+ }
+
+ if (!pSample) {
+ if (pTimestamp) *pTimestamp = pInstance->llCurrentPosition;
+ return S_OK;
+ }
+
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+ pSample->AddRef();
+ pInstance->pCachedSample = pSample;
+ pInstance->llCurrentPosition = llTimestamp;
+ }
+
+ // Frame timing synchronization
+ if (pInstance->bUseClockSync && pInstance->llPlaybackStartTime != 0 && llTimestamp > 0) {
+ LONGLONG currentTimeMs = GetCurrentTimeMs();
+ LONGLONG elapsedMs = currentTimeMs - pInstance->llPlaybackStartTime - pInstance->llTotalPauseTime;
+ double adjustedElapsedMs = elapsedMs * pInstance->playbackSpeed;
+ double frameTimeMs_ts = llTimestamp / 10000.0;
+
+ UINT frameRateNum = 60, frameRateDenom = 1;
+ GetVideoFrameRate(pInstance, &frameRateNum, &frameRateDenom);
+ double frameIntervalMs = 1000.0 * frameRateDenom / frameRateNum;
+
+ double diffMs = frameTimeMs_ts - adjustedElapsedMs;
+
+ if (diffMs < -frameIntervalMs * 3) {
+ pSample->Release();
+ if (pTimestamp) *pTimestamp = pInstance->llCurrentPosition;
+ return S_OK;
+ }
+ else if (diffMs > 1.0) {
+ double waitTime = std::min(diffMs, frameIntervalMs * 2);
+ PreciseSleepHighRes(waitTime);
+ }
+ }
+
+ if (pTimestamp) {
+ *pTimestamp = pInstance->llCurrentPosition;
+ }
+
+ const UINT32 width = pInstance->videoWidth;
+ const UINT32 height = pInstance->videoHeight;
+ if (width == 0 || height == 0) {
+ pSample->Release();
+ return S_FALSE;
+ }
+
+ const DWORD requiredDst = dstRowBytes * height;
+ if (dstCapacity < requiredDst) {
+ pSample->Release();
+ return OP_E_INVALID_PARAMETER;
+ }
+
+ // Try to use IMF2DBuffer2 for optimized zero-copy access
+ IMFMediaBuffer* pBuffer = nullptr;
+ hr = pSample->ConvertToContiguousBuffer(&pBuffer);
+ if (FAILED(hr)) {
+ pSample->Release();
+ return hr;
+ }
+
+ // Attempt IMF2DBuffer2 for direct 2D access (most efficient)
+ IMF2DBuffer2* p2DBuffer2 = nullptr;
+ IMF2DBuffer* p2DBuffer = nullptr;
+ BYTE* pScanline0 = nullptr;
+ LONG srcPitch = 0;
+ BYTE* pBufferStart = nullptr;
+ DWORD cbBufferLength = 0;
+ bool usedDirect2D = false;
+
+ hr = pBuffer->QueryInterface(IID_PPV_ARGS(&p2DBuffer2));
+ if (SUCCEEDED(hr) && p2DBuffer2) {
+ // Use Lock2DSize for optimal access - avoids internal copies
+ hr = p2DBuffer2->Lock2DSize(MF2DBuffer_LockFlags_Read, &pScanline0, &srcPitch, &pBufferStart, &cbBufferLength);
+ if (SUCCEEDED(hr)) {
+ usedDirect2D = true;
+ const DWORD srcRowBytes = width * 4;
+
+ // Zero-copy path: if strides match exactly, use memcpy for the entire buffer
+ if (static_cast(dstRowBytes) == srcPitch && static_cast(srcRowBytes) == srcPitch) {
+ memcpy(pDst, pScanline0, srcRowBytes * height);
+ } else {
+ // Strides differ - must copy row by row but still more efficient than MFCopyImage
+ BYTE* pSrc = pScanline0;
+ BYTE* pDstRow = pDst;
+ const DWORD copyBytes = std::min(srcRowBytes, dstRowBytes);
+ for (UINT32 y = 0; y < height; y++) {
+ memcpy(pDstRow, pSrc, copyBytes);
+ pSrc += srcPitch;
+ pDstRow += dstRowBytes;
+ }
+ }
+ p2DBuffer2->Unlock2D();
+ }
+ p2DBuffer2->Release();
+ }
+
+ // Fallback to IMF2DBuffer if IMF2DBuffer2 failed
+ if (!usedDirect2D) {
+ hr = pBuffer->QueryInterface(IID_PPV_ARGS(&p2DBuffer));
+ if (SUCCEEDED(hr) && p2DBuffer) {
+ hr = p2DBuffer->Lock2D(&pScanline0, &srcPitch);
+ if (SUCCEEDED(hr)) {
+ usedDirect2D = true;
+ const DWORD srcRowBytes = width * 4;
+
+ if (static_cast(dstRowBytes) == srcPitch && static_cast(srcRowBytes) == srcPitch) {
+ memcpy(pDst, pScanline0, srcRowBytes * height);
+ } else {
+ BYTE* pSrc = pScanline0;
+ BYTE* pDstRow = pDst;
+ const DWORD copyBytes = std::min(srcRowBytes, dstRowBytes);
+ for (UINT32 y = 0; y < height; y++) {
+ memcpy(pDstRow, pSrc, copyBytes);
+ pSrc += srcPitch;
+ pDstRow += dstRowBytes;
+ }
+ }
+ p2DBuffer->Unlock2D();
+ }
+ p2DBuffer->Release();
+ }
+ }
+
+ // Ultimate fallback to standard buffer lock
+ if (!usedDirect2D) {
+ BYTE* pBytes = nullptr;
+ DWORD cbMax = 0, cbCurr = 0;
+ hr = pBuffer->Lock(&pBytes, &cbMax, &cbCurr);
+ if (SUCCEEDED(hr)) {
+ const DWORD srcRowBytes = width * 4;
+ const DWORD requiredSrc = srcRowBytes * height;
+ if (cbCurr >= requiredSrc) {
+ // Use MFCopyImage as last resort
+ MFCopyImage(pDst, dstRowBytes, pBytes, srcRowBytes, srcRowBytes, height);
+ }
+ pBuffer->Unlock();
+ }
+ }
+
+ pBuffer->Release();
+ pSample->Release();
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API BOOL IsEOF(const VideoPlayerInstance* pInstance) {
+ if (!pInstance)
+ return FALSE;
+ return pInstance->bEOF;
+}
+
+NATIVEVIDEOPLAYER_API void GetVideoSize(const VideoPlayerInstance* pInstance, UINT32* pWidth, UINT32* pHeight) {
+ if (!pInstance)
+ return;
+ if (pWidth) *pWidth = pInstance->videoWidth;
+ if (pHeight) *pHeight = pInstance->videoHeight;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT GetVideoFrameRate(const VideoPlayerInstance* pInstance, UINT* pNum, UINT* pDenom) {
+ if (!pInstance || !pInstance->pSourceReader || !pNum || !pDenom)
+ return OP_E_NOT_INITIALIZED;
+
+ IMFMediaType* pType = nullptr;
+ HRESULT hr = pInstance->pSourceReader->GetCurrentMediaType(MF_SOURCE_READER_FIRST_VIDEO_STREAM, &pType);
+ if (SUCCEEDED(hr)) {
+ hr = MFGetAttributeRatio(pType, MF_MT_FRAME_RATE, pNum, pDenom);
+ pType->Release();
+ }
+ return hr;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT SeekMedia(VideoPlayerInstance* pInstance, LONGLONG llPositionIn100Ns) {
+ if (!pInstance || !pInstance->pSourceReader)
+ return OP_E_NOT_INITIALIZED;
+
+ EnterCriticalSection(&pInstance->csClockSync);
+ pInstance->bSeekInProgress = TRUE;
+ LeaveCriticalSection(&pInstance->csClockSync);
+
+ if (pInstance->llPauseStart != 0) {
+ pInstance->llTotalPauseTime += (GetCurrentTimeMs() - pInstance->llPauseStart);
+ pInstance->llPauseStart = GetCurrentTimeMs();
+ }
+
+ if (pInstance->pLockedBuffer)
+ UnlockVideoFrame(pInstance);
+
+ // Release cached sample when seeking
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+
+ // Reset initial frame flag to ensure we read a new frame at the new position
+ pInstance->bHasInitialFrame = FALSE;
+
+ PROPVARIANT var;
+ PropVariantInit(&var);
+ var.vt = VT_I8;
+ var.hVal.QuadPart = llPositionIn100Ns;
+
+ bool wasPlaying = false;
+ if (pInstance->bHasAudio && pInstance->pAudioClient) {
+ wasPlaying = (pInstance->llPauseStart == 0);
+ pInstance->pAudioClient->Stop();
+ Sleep(5);
+ }
+
+ // Stop the presentation clock
+ if (pInstance->bUseClockSync && pInstance->pPresentationClock) {
+ pInstance->pPresentationClock->Stop();
+ }
+
+ // Seek the main source reader
+ HRESULT hr = pInstance->pSourceReader->SetCurrentPosition(GUID_NULL, var);
+ if (FAILED(hr)) {
+ EnterCriticalSection(&pInstance->csClockSync);
+ pInstance->bSeekInProgress = FALSE;
+ LeaveCriticalSection(&pInstance->csClockSync);
+ PropVariantClear(&var);
+ return hr;
+ }
+
+ // Also seek the audio source reader if available
+ if (pInstance->pSourceReaderAudio) {
+ PROPVARIANT varAudio;
+ PropVariantInit(&varAudio);
+ varAudio.vt = VT_I8;
+ varAudio.hVal.QuadPart = llPositionIn100Ns;
+
+ HRESULT hrAudio = pInstance->pSourceReaderAudio->SetCurrentPosition(GUID_NULL, varAudio);
+ if (FAILED(hrAudio)) {
+ PrintHR("Failed to seek audio source reader", hrAudio);
+ }
+ PropVariantClear(&varAudio);
+ }
+
+
+ // Reset audio client if needed
+ if (pInstance->bHasAudio && pInstance->pRenderClient && pInstance->pAudioClient) {
+ UINT32 bufferFrameCount = 0;
+ if (SUCCEEDED(pInstance->pAudioClient->GetBufferSize(&bufferFrameCount))) {
+ pInstance->pAudioClient->Reset();
+ }
+ }
+
+ PropVariantClear(&var);
+
+ // Update position and state
+ EnterCriticalSection(&pInstance->csClockSync);
+ pInstance->llCurrentPosition = llPositionIn100Ns;
+ pInstance->bSeekInProgress = FALSE;
+ LeaveCriticalSection(&pInstance->csClockSync);
+
+ pInstance->bEOF = FALSE;
+
+ // IMPORTANT: Reset timing for A/V sync after seek
+ // We adjust llPlaybackStartTime so that the elapsed time calculation matches the seek position
+ // Formula: elapsedMs should equal seekPositionMs after seek
+ // elapsedMs = currentTimeMs - llPlaybackStartTime - llTotalPauseTime
+ // So: llPlaybackStartTime = currentTimeMs - seekPositionMs / playbackSpeed
+ if (pInstance->bUseClockSync) {
+ double seekPositionMs = llPositionIn100Ns / 10000.0;
+ double adjustedSeekMs = seekPositionMs / static_cast(pInstance->playbackSpeed);
+ pInstance->llPlaybackStartTime = GetCurrentTimeMs() - static_cast(adjustedSeekMs);
+ pInstance->llTotalPauseTime = 0;
+
+ // If paused, set pause start to now so pause time accounting works correctly
+ if (!wasPlaying) {
+ pInstance->llPauseStart = GetCurrentTimeMs();
+ } else {
+ pInstance->llPauseStart = 0;
+ }
+ }
+
+ // Restart the presentation clock at the new position
+ if (pInstance->bUseClockSync && pInstance->pPresentationClock) {
+ hr = pInstance->pPresentationClock->Start(llPositionIn100Ns);
+ if (FAILED(hr)) {
+ PrintHR("Failed to restart presentation clock after seek", hr);
+ }
+ }
+
+ // Restart audio if it was playing
+ if (pInstance->bHasAudio && pInstance->pAudioClient && wasPlaying) {
+ Sleep(5);
+ pInstance->pAudioClient->Start();
+ }
+
+ // Signal audio thread to continue
+ if (pInstance->hAudioReadyEvent)
+ SetEvent(pInstance->hAudioReadyEvent);
+
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT GetMediaDuration(const VideoPlayerInstance* pInstance, LONGLONG* pDuration) {
+ if (!pInstance || !pInstance->pSourceReader || !pDuration)
+ return OP_E_NOT_INITIALIZED;
+
+ IMFMediaSource* pMediaSource = nullptr;
+ IMFPresentationDescriptor* pPresentationDescriptor = nullptr;
+ HRESULT hr = pInstance->pSourceReader->GetServiceForStream(MF_SOURCE_READER_MEDIASOURCE, GUID_NULL, IID_PPV_ARGS(&pMediaSource));
+ if (SUCCEEDED(hr)) {
+ hr = pMediaSource->CreatePresentationDescriptor(&pPresentationDescriptor);
+ if (SUCCEEDED(hr)) {
+ hr = pPresentationDescriptor->GetUINT64(MF_PD_DURATION, reinterpret_cast(pDuration));
+ pPresentationDescriptor->Release();
+ }
+ pMediaSource->Release();
+ }
+ return hr;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT GetMediaPosition(const VideoPlayerInstance* pInstance, LONGLONG* pPosition) {
+ if (!pInstance || !pPosition)
+ return OP_E_NOT_INITIALIZED;
+
+ *pPosition = pInstance->llCurrentPosition;
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT SetPlaybackState(VideoPlayerInstance* pInstance, BOOL bPlaying, BOOL bStop) {
+ if (!pInstance)
+ return OP_E_NOT_INITIALIZED;
+
+ HRESULT hr = S_OK;
+
+ if (bStop && !bPlaying) {
+ // Stop playback completely
+ if (pInstance->llPlaybackStartTime != 0) {
+ pInstance->llTotalPauseTime = 0;
+ pInstance->llPauseStart = 0;
+ pInstance->llPlaybackStartTime = 0;
+
+ // Stop presentation clock
+ if (pInstance->bUseClockSync && pInstance->pPresentationClock) {
+ pInstance->pPresentationClock->Stop();
+ }
+
+ // Stop audio thread if running
+ if (pInstance->bAudioThreadRunning) {
+ StopAudioThread(pInstance);
+ }
+
+ // Reset initial frame flag when stopping
+ pInstance->bHasInitialFrame = FALSE;
+
+ // Release cached sample when stopping
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+ }
+ } else if (bPlaying) {
+ // Start or resume playback
+ if (pInstance->llPlaybackStartTime == 0) {
+ // First start
+ pInstance->llPlaybackStartTime = GetCurrentTimeMs();
+ } else if (pInstance->llPauseStart != 0) {
+ // Resume from pause
+ pInstance->llTotalPauseTime += (GetCurrentTimeMs() - pInstance->llPauseStart);
+ pInstance->llPauseStart = 0;
+ }
+
+ // Reset initial frame flag when switching to playing state
+ pInstance->bHasInitialFrame = FALSE;
+
+ // Start audio client if available
+ if (pInstance->pAudioClient && pInstance->bAudioInitialized) {
+ hr = pInstance->pAudioClient->Start();
+ if (FAILED(hr)) {
+ PrintHR("Failed to start audio client", hr);
+ }
+ }
+
+ // IMPORTANT: Démarrer le thread audio s'il n'est pas déjà en cours d'exécution
+ // Ceci est crucial pour le cas où on démarre en pause puis on fait play()
+ if (pInstance->bHasAudio && pInstance->bAudioInitialized && pInstance->pSourceReaderAudio) {
+ if (!pInstance->bAudioThreadRunning || pInstance->hAudioThread == nullptr) {
+ hr = StartAudioThread(pInstance);
+ if (FAILED(hr)) {
+ PrintHR("Failed to start audio thread on play", hr);
+ // Continue anyway - video can still play without audio
+ }
+ }
+ }
+
+ // Start or resume presentation clock
+ if (pInstance->bUseClockSync && pInstance->pPresentationClock) {
+ // IMPORTANT: Démarrer depuis la position actuelle stockée
+ hr = pInstance->pPresentationClock->Start(pInstance->llCurrentPosition);
+ if (FAILED(hr)) {
+ PrintHR("Failed to start presentation clock", hr);
+ }
+ }
+
+ // Signal audio thread to continue if it was waiting
+ if (pInstance->hAudioReadyEvent) {
+ SetEvent(pInstance->hAudioReadyEvent);
+ }
+ } else {
+ // Pause playback
+ if (pInstance->llPauseStart == 0) {
+ pInstance->llPauseStart = GetCurrentTimeMs();
+ }
+
+ // Reset initial frame flag when switching to paused state
+ pInstance->bHasInitialFrame = FALSE;
+
+ // Pause audio client if available
+ if (pInstance->pAudioClient && pInstance->bAudioInitialized) {
+ pInstance->pAudioClient->Stop();
+ }
+
+ // Pause presentation clock
+ if (pInstance->bUseClockSync && pInstance->pPresentationClock) {
+ hr = pInstance->pPresentationClock->Pause();
+ if (FAILED(hr)) {
+ PrintHR("Failed to pause presentation clock", hr);
+ }
+ }
+
+ // Note: On ne stoppe PAS le thread audio en pause, on le laisse tourner
+ // Il va simplement attendre sur les événements de synchronisation
+ }
+ return hr;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT ShutdownMediaFoundation() {
+ return Shutdown();
+}
+
+NATIVEVIDEOPLAYER_API void CloseMedia(VideoPlayerInstance* pInstance) {
+ if (!pInstance)
+ return;
+
+ // Stop audio thread
+ StopAudioThread(pInstance);
+
+ // Release video buffer
+ if (pInstance->pLockedBuffer) {
+ UnlockVideoFrame(pInstance);
+ }
+
+ // Release cached sample
+ if (pInstance->pCachedSample) {
+ pInstance->pCachedSample->Release();
+ pInstance->pCachedSample = nullptr;
+ }
+
+ // Reset initial frame flag
+ pInstance->bHasInitialFrame = FALSE;
+
+ // Macro for safely releasing COM interfaces
+ #define SAFE_RELEASE(obj) if (obj) { obj->Release(); obj = nullptr; }
+
+ // Stop and release audio resources
+ if (pInstance->pAudioClient) {
+ pInstance->pAudioClient->Stop();
+ SAFE_RELEASE(pInstance->pAudioClient);
+ }
+
+ // Stop and release presentation clock
+ if (pInstance->pPresentationClock) {
+ pInstance->pPresentationClock->Stop();
+ SAFE_RELEASE(pInstance->pPresentationClock);
+ }
+
+ // Release media source
+ SAFE_RELEASE(pInstance->pMediaSource);
+
+ // Release other COM resources
+ SAFE_RELEASE(pInstance->pRenderClient);
+ SAFE_RELEASE(pInstance->pDevice);
+ SAFE_RELEASE(pInstance->pAudioEndpointVolume);
+ SAFE_RELEASE(pInstance->pSourceReader);
+ SAFE_RELEASE(pInstance->pSourceReaderAudio);
+
+ // Release audio format
+ if (pInstance->pSourceAudioFormat) {
+ CoTaskMemFree(pInstance->pSourceAudioFormat);
+ pInstance->pSourceAudioFormat = nullptr;
+ }
+
+ // Close event handles
+ #define SAFE_CLOSE_HANDLE(handle) if (handle) { CloseHandle(handle); handle = nullptr; }
+
+ SAFE_CLOSE_HANDLE(pInstance->hAudioSamplesReadyEvent);
+ SAFE_CLOSE_HANDLE(pInstance->hAudioReadyEvent);
+
+ // Reset state variables
+ pInstance->bEOF = FALSE;
+ pInstance->videoWidth = pInstance->videoHeight = 0;
+ pInstance->bHasAudio = FALSE;
+ pInstance->bAudioInitialized = FALSE;
+ pInstance->llPlaybackStartTime = 0;
+ pInstance->llTotalPauseTime = 0;
+ pInstance->llPauseStart = 0;
+ pInstance->llCurrentPosition = 0;
+ pInstance->bSeekInProgress = FALSE;
+ pInstance->playbackSpeed = 1.0f;
+
+ #undef SAFE_RELEASE
+ #undef SAFE_CLOSE_HANDLE
+}
+
+NATIVEVIDEOPLAYER_API HRESULT SetAudioVolume(VideoPlayerInstance* pInstance, float volume) {
+ return SetVolume(pInstance, volume);
+}
+
+NATIVEVIDEOPLAYER_API HRESULT GetAudioVolume(const VideoPlayerInstance* pInstance, float* volume) {
+ return GetVolume(pInstance, volume);
+}
+
+NATIVEVIDEOPLAYER_API HRESULT GetAudioLevels(const VideoPlayerInstance* pInstance, float* pLeftLevel, float* pRightLevel) {
+ return AudioManager::GetAudioLevels(pInstance, pLeftLevel, pRightLevel);
+}
+
+NATIVEVIDEOPLAYER_API HRESULT SetPlaybackSpeed(VideoPlayerInstance* pInstance, float speed) {
+ if (!pInstance)
+ return OP_E_NOT_INITIALIZED;
+
+ // Limit speed between 0.5 and 2.0
+ speed = std::max(0.5f, std::min(speed, 2.0f));
+
+ // Store speed in instance
+ pInstance->playbackSpeed = speed;
+
+ // Update the presentation clock rate
+ if (pInstance->bUseClockSync && pInstance->pPresentationClock) {
+ // Get the rate control interface from the presentation clock
+ IMFRateControl* pRateControl = nullptr;
+ HRESULT hr = pInstance->pPresentationClock->QueryInterface(IID_PPV_ARGS(&pRateControl));
+ if (SUCCEEDED(hr)) {
+ // Set the playback rate
+ hr = pRateControl->SetRate(FALSE, speed);
+ if (FAILED(hr)) {
+ PrintHR("Failed to set presentation clock rate", hr);
+ }
+ pRateControl->Release();
+ }
+ }
+
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT GetPlaybackSpeed(const VideoPlayerInstance* pInstance, float* pSpeed) {
+ if (!pInstance || !pSpeed)
+ return OP_E_INVALID_PARAMETER;
+
+ // Return instance-specific playback speed
+ *pSpeed = pInstance->playbackSpeed;
+
+ return S_OK;
+}
+
+NATIVEVIDEOPLAYER_API HRESULT GetVideoMetadata(const VideoPlayerInstance* pInstance, VideoMetadata* pMetadata) {
+ if (!pInstance || !pMetadata)
+ return OP_E_INVALID_PARAMETER;
+ if (!pInstance->pSourceReader)
+ return OP_E_NOT_INITIALIZED;
+
+ // Initialize metadata structure with default values
+ ZeroMemory(pMetadata, sizeof(VideoMetadata));
+
+ HRESULT hr = S_OK;
+
+ // Get media source for property access
+ IMFMediaSource* pMediaSource = nullptr;
+ IMFPresentationDescriptor* pPresentationDescriptor = nullptr;
+
+ // Get media source from source reader
+ hr = pInstance->pSourceReader->GetServiceForStream(
+ MF_SOURCE_READER_MEDIASOURCE,
+ GUID_NULL,
+ IID_PPV_ARGS(&pMediaSource));
+
+ if (SUCCEEDED(hr) && pMediaSource) {
+ // Get presentation descriptor
+ hr = pMediaSource->CreatePresentationDescriptor(&pPresentationDescriptor);
+
+ if (SUCCEEDED(hr) && pPresentationDescriptor) {
+ // Get duration
+ UINT64 duration = 0;
+ if (SUCCEEDED(pPresentationDescriptor->GetUINT64(MF_PD_DURATION, &duration))) {
+ pMetadata->duration = static_cast(duration);
+ pMetadata->hasDuration = TRUE;
+ }
+
+ // Get stream descriptors to access more metadata
+ DWORD streamCount = 0;
+ hr = pPresentationDescriptor->GetStreamDescriptorCount(&streamCount);
+
+ if (SUCCEEDED(hr)) {
+ // Try to get title and other metadata from attributes
+ IMFAttributes* pAttributes = nullptr;
+ if (SUCCEEDED(pPresentationDescriptor->QueryInterface(IID_PPV_ARGS(&pAttributes)))) {
+ // We can't directly access some metadata attributes due to missing definitions
+ // Set a default title based on the file path if available
+ if (pInstance->pSourceReader) {
+ // For now, we'll leave title empty as we can't reliably extract it
+ // without the proper attribute definitions
+ pMetadata->hasTitle = FALSE;
+ }
+
+ // Try to estimate bitrate from stream properties
+ UINT64 duration = 0;
+ if (SUCCEEDED(pPresentationDescriptor->GetUINT64(MF_PD_DURATION, &duration)) && duration > 0) {
+ // We'll try to estimate bitrate later from individual streams
+ pMetadata->hasBitrate = FALSE;
+ }
+
+ pAttributes->Release();
+ }
+
+ // Process each stream to get more metadata
+ for (DWORD i = 0; i < streamCount; i++) {
+ BOOL selected = FALSE;
+ IMFStreamDescriptor* pStreamDescriptor = nullptr;
+
+ if (SUCCEEDED(pPresentationDescriptor->GetStreamDescriptorByIndex(i, &selected, &pStreamDescriptor))) {
+ // Get media type handler
+ IMFMediaTypeHandler* pHandler = nullptr;
+ if (SUCCEEDED(pStreamDescriptor->GetMediaTypeHandler(&pHandler))) {
+ // Get major type to determine if video or audio
+ GUID majorType;
+ if (SUCCEEDED(pHandler->GetMajorType(&majorType))) {
+ if (majorType == MFMediaType_Video) {
+ // Get current media type
+ IMFMediaType* pMediaType = nullptr;
+ if (SUCCEEDED(pHandler->GetCurrentMediaType(&pMediaType))) {
+ // Get video dimensions
+ UINT32 width = 0, height = 0;
+ if (SUCCEEDED(MFGetAttributeSize(pMediaType, MF_MT_FRAME_SIZE, &width, &height))) {
+ pMetadata->width = width;
+ pMetadata->height = height;
+ pMetadata->hasWidth = TRUE;
+ pMetadata->hasHeight = TRUE;
+ }
+
+ // Get frame rate
+ UINT32 numerator = 0, denominator = 1;
+ if (SUCCEEDED(MFGetAttributeRatio(pMediaType, MF_MT_FRAME_RATE, &numerator, &denominator))) {
+ if (denominator > 0) {
+ pMetadata->frameRate = static_cast(numerator) / static_cast(denominator);
+ pMetadata->hasFrameRate = TRUE;
+ }
+ }
+
+ // Get subtype (format) for mime type
+ GUID subtype;
+ if (SUCCEEDED(pMediaType->GetGUID(MF_MT_SUBTYPE, &subtype))) {
+ // Convert subtype to mime type string
+ if (subtype == MFVideoFormat_H264) {
+ wcscpy_s(pMetadata->mimeType, L"video/h264");
+ pMetadata->hasMimeType = TRUE;
+ }
+ else if (subtype == MFVideoFormat_HEVC) {
+ wcscpy_s(pMetadata->mimeType, L"video/hevc");
+ pMetadata->hasMimeType = TRUE;
+ }
+ else if (subtype == MFVideoFormat_MPEG2) {
+ wcscpy_s(pMetadata->mimeType, L"video/mpeg2");
+ pMetadata->hasMimeType = TRUE;
+ }
+ else if (subtype == MFVideoFormat_WMV3) {
+ wcscpy_s(pMetadata->mimeType, L"video/wmv");
+ pMetadata->hasMimeType = TRUE;
+ }
+ else {
+ wcscpy_s(pMetadata->mimeType, L"video/unknown");
+ pMetadata->hasMimeType = TRUE;
+ }
+ }
+
+ pMediaType->Release();
+ }
+ }
+ else if (majorType == MFMediaType_Audio) {
+ // Get current media type
+ IMFMediaType* pMediaType = nullptr;
+ if (SUCCEEDED(pHandler->GetCurrentMediaType(&pMediaType))) {
+ // Get audio channels
+ UINT32 channels = 0;
+ if (SUCCEEDED(pMediaType->GetUINT32(MF_MT_AUDIO_NUM_CHANNELS, &channels))) {
+ pMetadata->audioChannels = channels;
+ pMetadata->hasAudioChannels = TRUE;
+ }
+
+ // Get audio sample rate
+ UINT32 sampleRate = 0;
+ if (SUCCEEDED(pMediaType->GetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, &sampleRate))) {
+ pMetadata->audioSampleRate = sampleRate;
+ pMetadata->hasAudioSampleRate = TRUE;
+ }
+
+ pMediaType->Release();
+ }
+ }
+ }
+ pHandler->Release();
+ }
+ pStreamDescriptor->Release();
+ }
+ }
+ }
+ pPresentationDescriptor->Release();
+ }
+ pMediaSource->Release();
+ }
+
+ // If we couldn't get some metadata from the media source, try to get it from the instance
+ if (!pMetadata->hasWidth || !pMetadata->hasHeight) {
+ if (pInstance->videoWidth > 0 && pInstance->videoHeight > 0) {
+ pMetadata->width = pInstance->videoWidth;
+ pMetadata->height = pInstance->videoHeight;
+ pMetadata->hasWidth = TRUE;
+ pMetadata->hasHeight = TRUE;
+ }
+ }
+
+ // If we couldn't get frame rate from media source, try to get it directly
+ if (!pMetadata->hasFrameRate) {
+ UINT numerator = 0, denominator = 1;
+ if (SUCCEEDED(GetVideoFrameRate(pInstance, &numerator, &denominator)) && denominator > 0) {
+ pMetadata->frameRate = static_cast(numerator) / static_cast(denominator);
+ pMetadata->hasFrameRate = TRUE;
+ }
+ }
+
+ // If we couldn't get duration from media source, try to get it directly
+ if (!pMetadata->hasDuration) {
+ LONGLONG duration = 0;
+ if (SUCCEEDED(GetMediaDuration(pInstance, &duration))) {
+ pMetadata->duration = duration;
+ pMetadata->hasDuration = TRUE;
+ }
+ }
+
+ // If we couldn't get audio channels, check if audio is available
+ if (!pMetadata->hasAudioChannels && pInstance->bHasAudio) {
+ if (pInstance->pSourceAudioFormat) {
+ pMetadata->audioChannels = pInstance->pSourceAudioFormat->nChannels;
+ pMetadata->hasAudioChannels = TRUE;
+
+ pMetadata->audioSampleRate = pInstance->pSourceAudioFormat->nSamplesPerSec;
+ pMetadata->hasAudioSampleRate = TRUE;
+ }
+ }
+
+ return S_OK;
+}
diff --git a/mediaplayer/src/jvmMain/native/windows/NativeVideoPlayer.h b/mediaplayer/src/jvmMain/native/windows/NativeVideoPlayer.h
new file mode 100644
index 00000000..fc060fc7
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/NativeVideoPlayer.h
@@ -0,0 +1,239 @@
+// NativeVideoPlayer.h
+#pragma once
+#ifndef NATIVE_VIDEO_PLAYER_H
+#define NATIVE_VIDEO_PLAYER_H
+
+#include
+#include
+#include
+#include
+#include
+#include
+
+// Structure to hold video metadata
+typedef struct VideoMetadata {
+ wchar_t title[256]; // Title of the video (empty if not available)
+ LONGLONG duration; // Duration in 100-ns units
+ UINT32 width; // Width in pixels
+ UINT32 height; // Height in pixels
+ LONGLONG bitrate; // Bitrate in bits per second
+ float frameRate; // Frame rate in frames per second
+ wchar_t mimeType[64]; // MIME type of the video
+ UINT32 audioChannels; // Number of audio channels
+ UINT32 audioSampleRate; // Audio sample rate in Hz
+ BOOL hasTitle; // TRUE if title is available
+ BOOL hasDuration; // TRUE if duration is available
+ BOOL hasWidth; // TRUE if width is available
+ BOOL hasHeight; // TRUE if height is available
+ BOOL hasBitrate; // TRUE if bitrate is available
+ BOOL hasFrameRate; // TRUE if frame rate is available
+ BOOL hasMimeType; // TRUE if MIME type is available
+ BOOL hasAudioChannels; // TRUE if audio channels is available
+ BOOL hasAudioSampleRate; // TRUE if audio sample rate is available
+} VideoMetadata;
+
+// Macro d'exportation pour la DLL Windows
+#ifdef _WIN32
+#ifdef NATIVEVIDEOPLAYER_EXPORTS
+#define NATIVEVIDEOPLAYER_API __declspec(dllexport)
+#else
+#define NATIVEVIDEOPLAYER_API __declspec(dllimport)
+#endif
+#else
+#define NATIVEVIDEOPLAYER_API
+#endif
+
+// Codes d'erreur personnalisés
+#define OP_E_NOT_INITIALIZED ((HRESULT)0x80000001L)
+#define OP_E_ALREADY_INITIALIZED ((HRESULT)0x80000002L)
+#define OP_E_INVALID_PARAMETER ((HRESULT)0x80000003L)
+
+// Structure pour encapsuler l'état d'une instance de lecteur vidéo
+struct VideoPlayerInstance;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+// ====================================================================
+// Fonctions exportées pour la gestion des instances et la lecture multimédia
+// ====================================================================
+
+/**
+ * @brief Initialise Media Foundation, Direct3D11 et le gestionnaire DXGI (une seule fois pour toutes les instances).
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT InitMediaFoundation();
+
+/**
+ * @brief Crée une nouvelle instance de lecteur vidéo.
+ * @param ppInstance Pointeur pour recevoir le handle de l'instance créée.
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT CreateVideoPlayerInstance(VideoPlayerInstance** ppInstance);
+
+/**
+ * @brief Détruit une instance de lecteur vidéo et libère ses ressources.
+ * @param pInstance Handle de l'instance à détruire.
+ */
+NATIVEVIDEOPLAYER_API void DestroyVideoPlayerInstance(VideoPlayerInstance* pInstance);
+
+/**
+ * @brief Ouvre un média (fichier ou URL) et prépare le décodage avec accélération matérielle pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param url Chemin ou URL du média (chaîne large).
+ * @param startPlayback TRUE pour démarrer la lecture immédiatement, FALSE pour rester en pause.
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT OpenMedia(VideoPlayerInstance* pInstance, const wchar_t* url, BOOL startPlayback = TRUE);
+
+/**
+ * @brief Lit la prochaine frame vidéo en format RGB32 pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param pData Reçoit un pointeur sur les données de la frame (à ne pas libérer).
+ * @param pDataSize Reçoit la taille en octets du tampon.
+ * @return S_OK si une frame est lue, S_FALSE en fin de flux, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT ReadVideoFrame(VideoPlayerInstance* pInstance, BYTE** pData, DWORD* pDataSize);
+
+/**
+ * @brief Déverrouille le tampon de la frame vidéo précédemment verrouillé pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @return S_OK en cas de succès.
+ */
+NATIVEVIDEOPLAYER_API HRESULT UnlockVideoFrame(VideoPlayerInstance* pInstance);
+
+/*
+ * Reads the next video frame and copies it into a destination buffer.
+ * pTimestamp receives the 100ns timestamp when available.
+ */
+NATIVEVIDEOPLAYER_API HRESULT ReadVideoFrameInto(
+ VideoPlayerInstance* pInstance,
+ BYTE* pDst,
+ DWORD dstRowBytes,
+ DWORD dstCapacity,
+ LONGLONG* pTimestamp);
+
+/**
+ * @brief Ferme le média et libère les ressources associées pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ */
+NATIVEVIDEOPLAYER_API void CloseMedia(VideoPlayerInstance* pInstance);
+
+/**
+ * @brief Indique si la fin du flux média a été atteinte pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @return TRUE si fin de flux, FALSE sinon.
+ */
+NATIVEVIDEOPLAYER_API BOOL IsEOF(const VideoPlayerInstance* pInstance);
+
+/**
+ * @brief Récupère les dimensions de la vidéo pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param pWidth Pointeur pour recevoir la largeur en pixels.
+ * @param pHeight Pointeur pour recevoir la hauteur en pixels.
+ */
+NATIVEVIDEOPLAYER_API void GetVideoSize(const VideoPlayerInstance* pInstance, UINT32* pWidth, UINT32* pHeight);
+
+/**
+ * @brief Récupère le taux de rafraîchissement (frame rate) de la vidéo pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param pNum Pointeur pour recevoir le numérateur.
+ * @param pDenom Pointeur pour recevoir le dénominateur.
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT GetVideoFrameRate(const VideoPlayerInstance* pInstance, UINT* pNum, UINT* pDenom);
+
+/**
+ * @brief Recherche une position spécifique dans le média pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param llPosition Position (en 100-ns) à atteindre.
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT SeekMedia(VideoPlayerInstance* pInstance, LONGLONG llPosition);
+
+/**
+ * @brief Obtient la durée totale du média pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param pDuration Pointeur pour recevoir la durée (en 100-ns).
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT GetMediaDuration(const VideoPlayerInstance* pInstance, LONGLONG* pDuration);
+
+/**
+ * @brief Obtient la position de lecture courante pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param pPosition Pointeur pour recevoir la position (en 100-ns).
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT GetMediaPosition(const VideoPlayerInstance* pInstance, LONGLONG* pPosition);
+
+/**
+ * @brief Définit l'état de lecture (lecture ou pause) pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param bPlaying TRUE pour lecture, FALSE pour pause.
+ * @param bStop TRUE si c'est un arrêt complet, FALSE si c'est simplement une pause.
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT SetPlaybackState(VideoPlayerInstance* pInstance, BOOL bPlaying, BOOL bStop = FALSE);
+
+/**
+ * @brief Arrête Media Foundation et libère les ressources globales (après destruction de toutes les instances).
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT ShutdownMediaFoundation();
+
+/**
+ * @brief Définit le niveau de volume audio pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param volume Niveau de volume (0.0 à 1.0).
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT SetAudioVolume(VideoPlayerInstance* pInstance, float volume);
+
+/**
+ * @brief Récupère le niveau de volume audio actuel pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param volume Pointeur pour recevoir le niveau de volume (0.0 à 1.0).
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT GetAudioVolume(const VideoPlayerInstance* pInstance, float* volume);
+
+/**
+ * @brief Récupère les niveaux audio pour les canaux gauche et droit pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param pLeftLevel Pointeur pour le niveau du canal gauche.
+ * @param pRightLevel Pointeur pour le niveau du canal droit.
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT GetAudioLevels(const VideoPlayerInstance* pInstance, float* pLeftLevel, float* pRightLevel);
+
+/**
+ * @brief Définit la vitesse de lecture pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param speed Vitesse de lecture (0.5 à 2.0, où 1.0 est la vitesse normale).
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT SetPlaybackSpeed(VideoPlayerInstance* pInstance, float speed);
+
+/**
+ * @brief Récupère la vitesse de lecture actuelle pour une instance spécifique.
+ * @param pInstance Handle de l'instance.
+ * @param pSpeed Pointeur pour recevoir la vitesse de lecture.
+ * @return S_OK en cas de succès, ou un code d'erreur.
+ */
+NATIVEVIDEOPLAYER_API HRESULT GetPlaybackSpeed(const VideoPlayerInstance* pInstance, float* pSpeed);
+
+/**
+ * @brief Retrieves all available metadata for the current media.
+ * @param pInstance Handle to the instance.
+ * @param pMetadata Pointer to receive the metadata structure.
+ * @return S_OK on success, or an error code.
+ */
+NATIVEVIDEOPLAYER_API HRESULT GetVideoMetadata(const VideoPlayerInstance* pInstance, VideoMetadata* pMetadata);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif // NATIVE_VIDEO_PLAYER_H
diff --git a/mediaplayer/src/jvmMain/native/windows/Utils.cpp b/mediaplayer/src/jvmMain/native/windows/Utils.cpp
new file mode 100644
index 00000000..78724730
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/Utils.cpp
@@ -0,0 +1,24 @@
+#include "Utils.h"
+#include
+#include
+
+namespace VideoPlayerUtils {
+
+void PreciseSleepHighRes(double ms) {
+ if (ms <= 0.1)
+ return;
+
+ // Use a single static timer for all sleep operations
+ static HANDLE hTimer = CreateWaitableTimer(nullptr, TRUE, nullptr);
+ if (!hTimer) {
+ std::this_thread::sleep_for(std::chrono::duration(ms));
+ return;
+ }
+
+ LARGE_INTEGER liDueTime;
+ liDueTime.QuadPart = -static_cast(ms * 10000.0);
+ SetWaitableTimer(hTimer, &liDueTime, 0, nullptr, nullptr, FALSE);
+ WaitForSingleObject(hTimer, INFINITE);
+}
+
+} // namespace VideoPlayerUtils
\ No newline at end of file
diff --git a/mediaplayer/src/jvmMain/native/windows/Utils.h b/mediaplayer/src/jvmMain/native/windows/Utils.h
new file mode 100644
index 00000000..81f2aaca
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/Utils.h
@@ -0,0 +1,23 @@
+#pragma once
+
+#include
+#include
+
+namespace VideoPlayerUtils {
+
+/**
+ * @brief Gets the current time in milliseconds.
+ * @return Current time in milliseconds.
+ */
+inline ULONGLONG GetCurrentTimeMs() {
+ return static_cast(std::chrono::duration_cast(
+ std::chrono::steady_clock::now().time_since_epoch()).count());
+}
+
+/**
+ * @brief Performs a high-resolution sleep for the specified duration.
+ * @param ms Sleep duration in milliseconds.
+ */
+void PreciseSleepHighRes(double ms);
+
+} // namespace VideoPlayerUtils
\ No newline at end of file
diff --git a/mediaplayer/src/jvmMain/native/windows/VideoPlayerInstance.h b/mediaplayer/src/jvmMain/native/windows/VideoPlayerInstance.h
new file mode 100644
index 00000000..02581efa
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/VideoPlayerInstance.h
@@ -0,0 +1,59 @@
+#pragma once
+
+#include
+#include
+#include
+#include
+#include
+#include
+#include
+
+/**
+ * @brief Structure to encapsulate the state of a video player instance.
+ */
+struct VideoPlayerInstance {
+ // Video related members
+ IMFSourceReader* pSourceReader = nullptr;
+ IMFMediaBuffer* pLockedBuffer = nullptr;
+ BYTE* pLockedBytes = nullptr;
+ DWORD lockedMaxSize = 0;
+ DWORD lockedCurrSize = 0;
+ UINT32 videoWidth = 0;
+ UINT32 videoHeight = 0;
+ BOOL bEOF = FALSE;
+
+ // Frame caching for paused state
+ IMFSample* pCachedSample = nullptr; // Cached sample for paused state
+ BOOL bHasInitialFrame = FALSE; // Whether we've read an initial frame when paused
+
+ // Audio related members
+ IMFSourceReader* pSourceReaderAudio = nullptr;
+ BOOL bHasAudio = FALSE;
+ BOOL bAudioInitialized = FALSE;
+ IAudioClient* pAudioClient = nullptr;
+ IAudioRenderClient* pRenderClient = nullptr;
+ IMMDevice* pDevice = nullptr;
+ WAVEFORMATEX* pSourceAudioFormat = nullptr;
+ HANDLE hAudioSamplesReadyEvent = nullptr;
+ HANDLE hAudioThread = nullptr;
+ BOOL bAudioThreadRunning = FALSE;
+ HANDLE hAudioReadyEvent = nullptr;
+ IAudioEndpointVolume* pAudioEndpointVolume = nullptr;
+
+ // Media Foundation clock for synchronization
+ IMFPresentationClock* pPresentationClock = nullptr;
+ IMFMediaSource* pMediaSource = nullptr;
+ BOOL bUseClockSync = FALSE;
+
+ // Timing and synchronization
+ LONGLONG llCurrentPosition = 0;
+ ULONGLONG llPlaybackStartTime = 0;
+ ULONGLONG llTotalPauseTime = 0;
+ ULONGLONG llPauseStart = 0;
+ CRITICAL_SECTION csClockSync{};
+ BOOL bSeekInProgress = FALSE;
+
+ // Playback control
+ float instanceVolume = 1.0f; // Volume specific to this instance (1.0 = 100%)
+ float playbackSpeed = 1.0f; // Playback speed (1.0 = 100%)
+};
diff --git a/mediaplayer/src/jvmMain/native/windows/build.bat b/mediaplayer/src/jvmMain/native/windows/build.bat
new file mode 100644
index 00000000..18f0ab9b
--- /dev/null
+++ b/mediaplayer/src/jvmMain/native/windows/build.bat
@@ -0,0 +1,49 @@
+@echo off
+setlocal
+
+echo === Starting compilation for x64 and ARM64 ===
+
+echo.
+echo === x64 Configuration ===
+cmake -B build-x64 -A x64 .
+if %ERRORLEVEL% neq 0 (
+ echo Error during x64 configuration
+ exit /b %ERRORLEVEL%
+)
+
+echo.
+echo === x64 Compilation ===
+cmake --build build-x64 --config Release
+if %ERRORLEVEL% neq 0 (
+ echo Error during x64 compilation
+ exit /b %ERRORLEVEL%
+)
+
+echo.
+echo === ARM64 Configuration ===
+cmake -B build-arm64 -A ARM64 .
+if %ERRORLEVEL% neq 0 (
+ echo Error during ARM64 configuration
+ exit /b %ERRORLEVEL%
+)
+
+echo.
+echo === ARM64 Compilation ===
+cmake --build build-arm64 --config Release
+if %ERRORLEVEL% neq 0 (
+ echo Error during ARM64 compilation
+ exit /b %ERRORLEVEL%
+)
+
+echo.
+echo === Compilation completed successfully for both architectures ===
+echo.
+
+rem Clean up build directories
+if exist build-x64 rmdir /s /q build-x64
+if exist build-arm64 rmdir /s /q build-arm64
+
+echo x64 DLL: ..\..\resources\win32-x86-64\NativeVideoPlayer.dll
+echo ARM64 DLL: ..\..\resources\win32-arm64\NativeVideoPlayer.dll
+
+endlocal
diff --git a/mediaplayer/src/jvmMain/resources/darwin-aarch64/libNativeVideoPlayer.dylib b/mediaplayer/src/jvmMain/resources/darwin-aarch64/libNativeVideoPlayer.dylib
deleted file mode 100755
index b1bfe7f0..00000000
Binary files a/mediaplayer/src/jvmMain/resources/darwin-aarch64/libNativeVideoPlayer.dylib and /dev/null differ
diff --git a/mediaplayer/src/jvmMain/resources/darwin-x86-64/libNativeVideoPlayer.dylib b/mediaplayer/src/jvmMain/resources/darwin-x86-64/libNativeVideoPlayer.dylib
deleted file mode 100755
index c553e75e..00000000
Binary files a/mediaplayer/src/jvmMain/resources/darwin-x86-64/libNativeVideoPlayer.dylib and /dev/null differ
diff --git a/mediaplayer/src/jvmMain/resources/win32-arm64/NativeVideoPlayer.dll b/mediaplayer/src/jvmMain/resources/win32-arm64/NativeVideoPlayer.dll
deleted file mode 100644
index 119ecf7e..00000000
Binary files a/mediaplayer/src/jvmMain/resources/win32-arm64/NativeVideoPlayer.dll and /dev/null differ
diff --git a/mediaplayer/src/jvmMain/resources/win32-x86-64/NativeVideoPlayer.dll b/mediaplayer/src/jvmMain/resources/win32-x86-64/NativeVideoPlayer.dll
deleted file mode 100644
index 20e025d6..00000000
Binary files a/mediaplayer/src/jvmMain/resources/win32-x86-64/NativeVideoPlayer.dll and /dev/null differ
diff --git a/sample/composeApp/build.gradle.kts b/sample/composeApp/build.gradle.kts
index 08c92fce..a8e84c10 100644
--- a/sample/composeApp/build.gradle.kts
+++ b/sample/composeApp/build.gradle.kts
@@ -15,6 +15,7 @@ plugins {
kotlin {
jvmToolchain(17)
+ @Suppress("DEPRECATION")
androidTarget()
jvm()
js(IR) {
diff --git a/winlib b/winlib
deleted file mode 160000
index a505e660..00000000
--- a/winlib
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit a505e660530bd8cbc721769343014cf79fb661d9