2022-04-23 04:59:50 -04:00
|
|
|
// SPDX-FileCopyrightText: Copyright 2020 yuzu Emulator Project
|
|
|
|
// SPDX-License-Identifier: GPL-2.0-or-later
|
2020-02-17 19:19:26 -05:00
|
|
|
|
|
|
|
#pragma once
|
|
|
|
|
|
|
|
#include <algorithm>
|
2023-04-14 18:03:48 -04:00
|
|
|
#include <condition_variable>
|
2021-11-05 10:52:31 -04:00
|
|
|
#include <cstring>
|
2022-02-05 19:16:11 -05:00
|
|
|
#include <deque>
|
|
|
|
#include <functional>
|
2021-11-05 10:52:31 -04:00
|
|
|
#include <memory>
|
2023-04-14 18:03:48 -04:00
|
|
|
#include <mutex>
|
|
|
|
#include <thread>
|
2020-02-18 16:20:39 -05:00
|
|
|
#include <queue>
|
2020-02-17 19:19:26 -05:00
|
|
|
|
|
|
|
#include "common/common_types.h"
|
2023-04-14 18:03:48 -04:00
|
|
|
#include "common/microprofile.h"
|
|
|
|
#include "common/scope_exit.h"
|
|
|
|
#include "common/settings.h"
|
|
|
|
#include "common/thread.h"
|
2020-12-30 00:25:23 -05:00
|
|
|
#include "video_core/delayed_destruction_ring.h"
|
2020-02-17 19:19:26 -05:00
|
|
|
#include "video_core/gpu.h"
|
2022-01-30 04:31:13 -05:00
|
|
|
#include "video_core/host1x/host1x.h"
|
|
|
|
#include "video_core/host1x/syncpoint_manager.h"
|
2020-02-17 19:19:26 -05:00
|
|
|
#include "video_core/rasterizer_interface.h"
|
|
|
|
|
|
|
|
namespace VideoCommon {
|
|
|
|
|
|
|
|
class FenceBase {
|
|
|
|
public:
|
2022-02-05 19:16:11 -05:00
|
|
|
explicit FenceBase(bool is_stubbed_) : is_stubbed{is_stubbed_} {}
|
2020-02-19 12:40:37 -05:00
|
|
|
|
2023-04-14 18:03:48 -04:00
|
|
|
bool IsStubbed() const {
|
|
|
|
return is_stubbed;
|
|
|
|
}
|
|
|
|
|
2020-02-19 12:40:37 -05:00
|
|
|
protected:
|
|
|
|
bool is_stubbed;
|
2020-02-17 19:19:26 -05:00
|
|
|
};
|
|
|
|
|
2023-04-14 18:03:48 -04:00
|
|
|
template <typename Traits>
|
2020-02-17 19:19:26 -05:00
|
|
|
class FenceManager {
|
2023-04-14 18:03:48 -04:00
|
|
|
using TFence = typename Traits::FenceType;
|
|
|
|
using TTextureCache = typename Traits::TextureCacheType;
|
|
|
|
using TBufferCache = typename Traits::BufferCacheType;
|
|
|
|
using TQueryCache = typename Traits::QueryCacheType;
|
|
|
|
static constexpr bool can_async_check = Traits::HAS_ASYNC_CHECK;
|
|
|
|
|
2020-02-17 19:19:26 -05:00
|
|
|
public:
|
2020-12-30 00:25:23 -05:00
|
|
|
/// Notify the fence manager about a new frame
|
|
|
|
void TickFrame() {
|
2023-04-14 18:03:48 -04:00
|
|
|
std::unique_lock lock(ring_guard);
|
2020-12-30 00:25:23 -05:00
|
|
|
delayed_destruction_ring.Tick();
|
|
|
|
}
|
|
|
|
|
2021-07-07 10:42:26 -04:00
|
|
|
// Unlike other fences, this one doesn't
|
|
|
|
void SignalOrdering() {
|
2023-08-06 03:38:16 -04:00
|
|
|
if constexpr (!can_async_check) {
|
|
|
|
TryReleasePendingFences<false>();
|
|
|
|
}
|
2023-05-03 21:16:57 -04:00
|
|
|
std::scoped_lock lock{buffer_cache.mutex};
|
|
|
|
buffer_cache.AccumulateFlushes();
|
|
|
|
}
|
|
|
|
|
|
|
|
void SignalReference() {
|
|
|
|
std::function<void()> do_nothing([] {});
|
2023-04-30 11:14:06 -04:00
|
|
|
SignalFence(std::move(do_nothing));
|
2021-07-07 10:42:26 -04:00
|
|
|
}
|
|
|
|
|
2022-02-05 19:16:11 -05:00
|
|
|
void SyncOperation(std::function<void()>&& func) {
|
|
|
|
uncommitted_operations.emplace_back(std::move(func));
|
|
|
|
}
|
|
|
|
|
|
|
|
void SignalFence(std::function<void()>&& func) {
|
2023-04-22 05:59:57 -04:00
|
|
|
bool delay_fence = Settings::IsGPULevelHigh();
|
2023-04-14 18:03:48 -04:00
|
|
|
if constexpr (!can_async_check) {
|
|
|
|
TryReleasePendingFences<false>();
|
|
|
|
}
|
2020-04-22 11:14:40 -04:00
|
|
|
const bool should_flush = ShouldFlush();
|
2020-04-16 12:29:53 -04:00
|
|
|
CommitAsyncFlushes();
|
2022-02-05 19:16:11 -05:00
|
|
|
TFence new_fence = CreateFence(!should_flush);
|
2023-04-14 18:03:48 -04:00
|
|
|
if constexpr (can_async_check) {
|
|
|
|
guard.lock();
|
|
|
|
}
|
2023-04-22 05:59:57 -04:00
|
|
|
if (delay_fence) {
|
|
|
|
uncommitted_operations.emplace_back(std::move(func));
|
|
|
|
}
|
2023-04-14 18:03:48 -04:00
|
|
|
pending_operations.emplace_back(std::move(uncommitted_operations));
|
2023-11-10 09:40:48 -05:00
|
|
|
QueueFence(new_fence);
|
2023-04-22 05:59:57 -04:00
|
|
|
if (!delay_fence) {
|
|
|
|
func();
|
|
|
|
}
|
2023-04-14 18:03:48 -04:00
|
|
|
fences.push(std::move(new_fence));
|
2020-02-19 12:40:37 -05:00
|
|
|
if (should_flush) {
|
|
|
|
rasterizer.FlushCommands();
|
|
|
|
}
|
2023-04-14 18:03:48 -04:00
|
|
|
if constexpr (can_async_check) {
|
|
|
|
guard.unlock();
|
|
|
|
cv.notify_all();
|
|
|
|
}
|
2023-06-28 13:32:50 -04:00
|
|
|
rasterizer.InvalidateGPUCache();
|
2020-02-19 12:40:37 -05:00
|
|
|
}
|
|
|
|
|
|
|
|
void SignalSyncPoint(u32 value) {
|
2022-01-30 04:31:13 -05:00
|
|
|
syncpoint_manager.IncrementGuest(value);
|
2022-02-05 19:16:11 -05:00
|
|
|
std::function<void()> func([this, value] { syncpoint_manager.IncrementHost(value); });
|
|
|
|
SignalFence(std::move(func));
|
2020-02-17 19:19:26 -05:00
|
|
|
}
|
|
|
|
|
2023-08-06 03:38:16 -04:00
|
|
|
void WaitPendingFences([[maybe_unused]] bool force) {
|
2023-04-14 18:03:48 -04:00
|
|
|
if constexpr (!can_async_check) {
|
2023-08-06 03:38:16 -04:00
|
|
|
TryReleasePendingFences<true>();
|
2023-08-03 21:32:30 -04:00
|
|
|
} else {
|
|
|
|
if (!force) {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
std::mutex wait_mutex;
|
|
|
|
std::condition_variable wait_cv;
|
|
|
|
std::atomic<bool> wait_finished{};
|
|
|
|
std::function<void()> func([&] {
|
|
|
|
std::scoped_lock lk(wait_mutex);
|
|
|
|
wait_finished.store(true, std::memory_order_relaxed);
|
|
|
|
wait_cv.notify_all();
|
|
|
|
});
|
|
|
|
SignalFence(std::move(func));
|
|
|
|
std::unique_lock lk(wait_mutex);
|
2023-08-06 03:38:16 -04:00
|
|
|
wait_cv.wait(
|
|
|
|
lk, [&wait_finished] { return wait_finished.load(std::memory_order_relaxed); });
|
2020-02-17 19:19:26 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
protected:
|
2020-06-11 20:24:45 -04:00
|
|
|
explicit FenceManager(VideoCore::RasterizerInterface& rasterizer_, Tegra::GPU& gpu_,
|
2023-04-14 18:03:48 -04:00
|
|
|
TTextureCache& texture_cache_, TBufferCache& buffer_cache_,
|
2020-06-11 20:24:45 -04:00
|
|
|
TQueryCache& query_cache_)
|
2022-01-30 04:31:13 -05:00
|
|
|
: rasterizer{rasterizer_}, gpu{gpu_}, syncpoint_manager{gpu.Host1x().GetSyncpointManager()},
|
2023-04-14 18:03:48 -04:00
|
|
|
texture_cache{texture_cache_}, buffer_cache{buffer_cache_}, query_cache{query_cache_} {
|
|
|
|
if constexpr (can_async_check) {
|
|
|
|
fence_thread =
|
|
|
|
std::jthread([this](std::stop_token token) { ReleaseThreadFunc(token); });
|
|
|
|
}
|
|
|
|
}
|
2020-02-17 19:19:26 -05:00
|
|
|
|
2023-04-14 18:03:48 -04:00
|
|
|
virtual ~FenceManager() {
|
|
|
|
if constexpr (can_async_check) {
|
|
|
|
fence_thread.request_stop();
|
|
|
|
cv.notify_all();
|
|
|
|
fence_thread.join();
|
|
|
|
}
|
|
|
|
}
|
2020-04-16 12:29:53 -04:00
|
|
|
|
2022-02-05 19:16:11 -05:00
|
|
|
/// Creates a Fence Interface, does not create a backend fence if 'is_stubbed' is
|
2020-04-16 12:29:53 -04:00
|
|
|
/// true
|
2022-02-05 19:16:11 -05:00
|
|
|
virtual TFence CreateFence(bool is_stubbed) = 0;
|
2020-04-16 12:29:53 -04:00
|
|
|
/// Queues a fence into the backend if the fence isn't stubbed.
|
2020-02-17 19:19:26 -05:00
|
|
|
virtual void QueueFence(TFence& fence) = 0;
|
2020-04-16 12:29:53 -04:00
|
|
|
/// Notifies that the backend fence has been signaled/reached in host GPU.
|
|
|
|
virtual bool IsFenceSignaled(TFence& fence) const = 0;
|
|
|
|
/// Waits until a fence has been signalled by the host GPU.
|
2020-02-17 19:19:26 -05:00
|
|
|
virtual void WaitFence(TFence& fence) = 0;
|
|
|
|
|
|
|
|
VideoCore::RasterizerInterface& rasterizer;
|
2020-06-11 20:24:45 -04:00
|
|
|
Tegra::GPU& gpu;
|
2022-01-30 04:31:13 -05:00
|
|
|
Tegra::Host1x::SyncpointManager& syncpoint_manager;
|
2020-02-17 19:19:26 -05:00
|
|
|
TTextureCache& texture_cache;
|
2023-04-14 18:03:48 -04:00
|
|
|
TBufferCache& buffer_cache;
|
2020-04-15 16:36:14 -04:00
|
|
|
TQueryCache& query_cache;
|
2020-02-17 19:19:26 -05:00
|
|
|
|
|
|
|
private:
|
2023-04-14 18:03:48 -04:00
|
|
|
template <bool force_wait>
|
2020-02-17 19:19:26 -05:00
|
|
|
void TryReleasePendingFences() {
|
|
|
|
while (!fences.empty()) {
|
|
|
|
TFence& current_fence = fences.front();
|
2020-04-16 12:29:53 -04:00
|
|
|
if (ShouldWait() && !IsFenceSignaled(current_fence)) {
|
2023-04-14 18:03:48 -04:00
|
|
|
if constexpr (force_wait) {
|
|
|
|
WaitFence(current_fence);
|
|
|
|
} else {
|
|
|
|
return;
|
|
|
|
}
|
2020-02-17 19:19:26 -05:00
|
|
|
}
|
2020-04-16 12:29:53 -04:00
|
|
|
PopAsyncFlushes();
|
2022-02-05 19:16:11 -05:00
|
|
|
auto operations = std::move(pending_operations.front());
|
|
|
|
pending_operations.pop_front();
|
|
|
|
for (auto& operation : operations) {
|
|
|
|
operation();
|
2020-02-19 12:40:37 -05:00
|
|
|
}
|
2023-04-14 18:03:48 -04:00
|
|
|
{
|
|
|
|
std::unique_lock lock(ring_guard);
|
|
|
|
delayed_destruction_ring.Push(std::move(current_fence));
|
|
|
|
}
|
|
|
|
fences.pop();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void ReleaseThreadFunc(std::stop_token stop_token) {
|
|
|
|
std::string name = "GPUFencingThread";
|
|
|
|
MicroProfileOnThreadCreate(name.c_str());
|
|
|
|
|
|
|
|
// Cleanup
|
|
|
|
SCOPE_EXIT({ MicroProfileOnThreadExit(); });
|
|
|
|
|
|
|
|
Common::SetCurrentThreadName(name.c_str());
|
|
|
|
Common::SetCurrentThreadPriority(Common::ThreadPriority::High);
|
|
|
|
|
|
|
|
TFence current_fence;
|
|
|
|
std::deque<std::function<void()>> current_operations;
|
|
|
|
while (!stop_token.stop_requested()) {
|
|
|
|
{
|
|
|
|
std::unique_lock lock(guard);
|
|
|
|
cv.wait(lock, [&] { return stop_token.stop_requested() || !fences.empty(); });
|
|
|
|
if (stop_token.stop_requested()) [[unlikely]] {
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
current_fence = std::move(fences.front());
|
|
|
|
current_operations = std::move(pending_operations.front());
|
|
|
|
fences.pop();
|
|
|
|
pending_operations.pop_front();
|
|
|
|
}
|
|
|
|
if (!current_fence->IsStubbed()) {
|
|
|
|
WaitFence(current_fence);
|
|
|
|
}
|
|
|
|
PopAsyncFlushes();
|
|
|
|
for (auto& operation : current_operations) {
|
|
|
|
operation();
|
|
|
|
}
|
|
|
|
{
|
|
|
|
std::unique_lock lock(ring_guard);
|
|
|
|
delayed_destruction_ring.Push(std::move(current_fence));
|
|
|
|
}
|
2020-02-17 19:19:26 -05:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-04-16 12:29:53 -04:00
|
|
|
bool ShouldWait() const {
|
2021-01-16 18:48:58 -05:00
|
|
|
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
|
2020-04-16 12:29:53 -04:00
|
|
|
return texture_cache.ShouldWaitAsyncFlushes() || buffer_cache.ShouldWaitAsyncFlushes() ||
|
|
|
|
query_cache.ShouldWaitAsyncFlushes();
|
|
|
|
}
|
|
|
|
|
|
|
|
bool ShouldFlush() const {
|
2021-01-16 18:48:58 -05:00
|
|
|
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
|
2020-04-16 12:29:53 -04:00
|
|
|
return texture_cache.HasUncommittedFlushes() || buffer_cache.HasUncommittedFlushes() ||
|
|
|
|
query_cache.HasUncommittedFlushes();
|
|
|
|
}
|
|
|
|
|
|
|
|
void PopAsyncFlushes() {
|
2022-02-05 19:16:11 -05:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
|
|
|
|
texture_cache.PopAsyncFlushes();
|
|
|
|
buffer_cache.PopAsyncFlushes();
|
|
|
|
}
|
2020-04-16 12:29:53 -04:00
|
|
|
query_cache.PopAsyncFlushes();
|
|
|
|
}
|
|
|
|
|
|
|
|
void CommitAsyncFlushes() {
|
2022-02-05 19:16:11 -05:00
|
|
|
{
|
|
|
|
std::scoped_lock lock{buffer_cache.mutex, texture_cache.mutex};
|
|
|
|
texture_cache.CommitAsyncFlushes();
|
|
|
|
buffer_cache.CommitAsyncFlushes();
|
|
|
|
}
|
2020-04-16 12:29:53 -04:00
|
|
|
query_cache.CommitAsyncFlushes();
|
|
|
|
}
|
|
|
|
|
2020-02-17 19:19:26 -05:00
|
|
|
std::queue<TFence> fences;
|
2022-02-05 19:16:11 -05:00
|
|
|
std::deque<std::function<void()>> uncommitted_operations;
|
|
|
|
std::deque<std::deque<std::function<void()>>> pending_operations;
|
2020-12-30 00:25:23 -05:00
|
|
|
|
2023-04-14 18:03:48 -04:00
|
|
|
std::mutex guard;
|
|
|
|
std::mutex ring_guard;
|
|
|
|
std::condition_variable cv;
|
|
|
|
|
|
|
|
std::jthread fence_thread;
|
|
|
|
|
2020-12-30 00:25:23 -05:00
|
|
|
DelayedDestructionRing<TFence, 6> delayed_destruction_ring;
|
2020-02-17 19:19:26 -05:00
|
|
|
};
|
|
|
|
|
|
|
|
} // namespace VideoCommon
|