Some cleanup

This commit is contained in:
capitalistspz 2026-02-22 02:30:32 +00:00
parent 45c8e0f758
commit 2f7aaf28d4
4 changed files with 571 additions and 550 deletions

View File

@ -11,307 +11,310 @@
namespace camera
{
constexpr unsigned CAMERA_WIDTH = 640;
constexpr unsigned CAMERA_HEIGHT = 480;
constexpr unsigned CAMERA_PITCH = 768;
enum CAMStatus : sint32
{
CAM_STATUS_SUCCESS = 0,
CAM_STATUS_INVALID_ARG = -1,
CAM_STATUS_INVALID_HANDLE = -2,
CAM_STATUS_SURFACE_QUEUE_FULL = -4,
CAM_STATUS_INSUFFICIENT_MEMORY = -5,
CAM_STATUS_NOT_READY = -6,
CAM_STATUS_UNINITIALIZED = -8,
CAM_STATUS_UVC_ERROR = -9,
CAM_STATUS_DECODER_INIT_INIT_FAILED = -10,
CAM_STATUS_DEVICE_IN_USE = -12,
CAM_STATUS_DECODER_SESSION_FAILED = -13,
CAM_STATUS_INVALID_PROPERTY = -14,
CAM_STATUS_SEGMENT_VIOLATION = -15
};
enum CAMStatus : sint32
{
CAM_STATUS_SUCCESS = 0,
CAM_STATUS_INVALID_ARG = -1,
CAM_STATUS_INVALID_HANDLE = -2,
CAM_STATUS_SURFACE_QUEUE_FULL = -4,
CAM_STATUS_INSUFFICIENT_MEMORY = -5,
CAM_STATUS_NOT_READY = -6,
CAM_STATUS_UNINITIALIZED = -8,
CAM_STATUS_UVC_ERROR = -9,
CAM_STATUS_DECODER_INIT_INIT_FAILED = -10,
CAM_STATUS_DEVICE_IN_USE = -12,
CAM_STATUS_DECODER_SESSION_FAILED = -13,
CAM_STATUS_INVALID_PROPERTY = -14,
CAM_STATUS_SEGMENT_VIOLATION = -15
};
enum class CAMFps : uint32
{
_15 = 0,
_30 = 1
};
enum class CAMFps : uint32
{
_15 = 0,
_30 = 1
};
enum class CAMEventType : uint32
{
Decode = 0,
Detached = 1
};
enum class CAMEventType : uint32
{
Decode = 0,
Detached = 1
};
enum class CAMForceDisplay
{
None = 0,
DRC = 1
};
enum class CAMForceDisplay
{
None = 0,
DRC = 1
};
enum class CAMImageType : uint32
{
Default = 0
};
enum class CAMImageType : uint32
{
Default = 0
};
struct CAMImageInfo
{
betype<CAMImageType> type;
uint32be height;
uint32be width;
};
struct CAMImageInfo
{
betype<CAMImageType> type;
uint32be height;
uint32be width;
};
static_assert(sizeof(CAMImageInfo) == 0x0C);
static_assert(sizeof(CAMImageInfo) == 0x0C);
struct CAMInitInfo_t
{
CAMImageInfo imageInfo;
uint32be workMemorySize;
MEMPTR<void> workMemoryData;
MEMPTR<void> callback;
betype<CAMForceDisplay> forceDisplay;
betype<CAMFps> fps;
uint32be threadFlags;
uint8 unk[0x10];
};
static_assert(sizeof(CAMInitInfo_t) == 0x34);
struct CAMInitInfo_t
{
CAMImageInfo imageInfo;
uint32be workMemorySize;
MEMPTR<void> workMemoryData;
MEMPTR<void> callback;
betype<CAMForceDisplay> forceDisplay;
betype<CAMFps> fps;
uint32be threadFlags;
uint8 unk[0x10];
};
struct CAMTargetSurface
{
sint32be size;
MEMPTR<uint8> data;
uint8 unused[0x18];
};
static_assert(sizeof(CAMTargetSurface) == 0x20);
static_assert(sizeof(CAMInitInfo_t) == 0x34);
struct CAMDecodeEventParam
{
betype<CAMEventType> type;
MEMPTR<void> data;
uint32be channel;
uint32be errored;
};
static_assert(sizeof(CAMDecodeEventParam) == 0x10);
struct CAMTargetSurface
{
sint32be size;
MEMPTR<uint8> data;
uint8 unused[0x18];
};
constexpr static int32_t CAM_HANDLE = 0;
static_assert(sizeof(CAMTargetSurface) == 0x20);
struct
{
std::recursive_mutex mutex{};
bool initialized = false;
bool shouldTriggerCallback = false;
std::atomic_bool isOpen = false;
std::atomic_bool isExiting = false;
bool isWorking = false;
unsigned fps = 30;
MEMPTR<void> eventCallback = nullptr;
RingBuffer<MEMPTR<uint8>, 20> inTargetBuffers{};
RingBuffer<MEMPTR<uint8>, 20> outTargetBuffers{};
} s_instance;
struct CAMDecodeEventParam
{
betype<CAMEventType> type;
MEMPTR<void> data;
uint32be channel;
uint32be errored;
};
SysAllocator<CAMDecodeEventParam> s_cameraEventData;
SysAllocator<OSThread_t> s_cameraWorkerThread;
SysAllocator<uint8, 1024 * 64> s_cameraWorkerThreadStack;
SysAllocator<CafeString<22>> s_cameraWorkerThreadNameBuffer;
SysAllocator<coreinit::OSEvent> s_cameraOpenEvent;
static_assert(sizeof(CAMDecodeEventParam) == 0x10);
void WorkerThread(PPCInterpreter_t*)
{
s_cameraEventData->type = CAMEventType::Decode;
s_cameraEventData->channel = 0;
s_cameraEventData->data = nullptr;
s_cameraEventData->errored = false;
PPCCoreCallback(s_instance.eventCallback, s_cameraEventData.GetMPTR());
constexpr static int32_t CAM_HANDLE = 0;
while (!s_instance.isExiting)
{
coreinit::OSWaitEvent(s_cameraOpenEvent);
while (true)
{
if (!s_instance.isOpen || s_instance.isExiting)
{
// Fill leftover buffers before stopping
if (!s_instance.inTargetBuffers.HasData())
break;
}
s_cameraEventData->type = CAMEventType::Decode;
s_cameraEventData->channel = 0;
struct
{
std::recursive_mutex mutex{};
bool initialized = false;
bool shouldTriggerCallback = false;
std::atomic_bool isOpen = false;
std::atomic_bool isExiting = false;
bool isWorking = false;
unsigned fps = 30;
MEMPTR<void> eventCallback = nullptr;
RingBuffer<MEMPTR<uint8>, 20> inTargetBuffers{};
RingBuffer<MEMPTR<uint8>, 20> outTargetBuffers{};
} s_instance;
const auto surfaceBuffer = s_instance.inTargetBuffers.Pop();
if (surfaceBuffer.IsNull())
{
s_cameraEventData->data = nullptr;
s_cameraEventData->errored = true;
}
else
{
CameraManager::FillNV12Buffer(surfaceBuffer.GetPtr());
s_cameraEventData->data = surfaceBuffer;
s_cameraEventData->errored = false;
}
PPCCoreCallback(s_instance.eventCallback, s_cameraEventData.GetMPTR());
coreinit::OSSleepTicks(Espresso::TIMER_CLOCK / (s_instance.fps - 1));
}
}
coreinit::OSExitThread(0);
}
SysAllocator<CAMDecodeEventParam> s_cameraEventData;
SysAllocator<OSThread_t> s_cameraWorkerThread;
SysAllocator<uint8, 1024 * 64> s_cameraWorkerThreadStack;
SysAllocator<CafeString<22>> s_cameraWorkerThreadNameBuffer;
SysAllocator<coreinit::OSEvent> s_cameraOpenEvent;
sint32 CAMGetMemReq(const CAMImageInfo* info)
{
if (!info)
return CAM_STATUS_INVALID_ARG;
return 1 * 1024; // always return 1KB
}
void WorkerThread(PPCInterpreter_t*)
{
s_cameraEventData->type = CAMEventType::Decode;
s_cameraEventData->channel = 0;
s_cameraEventData->data = nullptr;
s_cameraEventData->errored = false;
PPCCoreCallback(s_instance.eventCallback, s_cameraEventData.GetMPTR());
CAMStatus CAMCheckMemSegmentation(void* startAddr, uint32 size)
{
if (!startAddr || size == 0)
return CAM_STATUS_INVALID_ARG;
return CAM_STATUS_SUCCESS;
}
while (!s_instance.isExiting)
{
coreinit::OSWaitEvent(s_cameraOpenEvent);
while (true)
{
if (!s_instance.isOpen || s_instance.isExiting)
{
// Fill leftover buffers before stopping
if (!s_instance.inTargetBuffers.HasData())
break;
}
s_cameraEventData->type = CAMEventType::Decode;
s_cameraEventData->channel = 0;
sint32 CAMInit(uint32 cameraId, const CAMInitInfo_t* initInfo, betype<CAMStatus>* error)
{
*error = CAM_STATUS_SUCCESS;
std::scoped_lock lock(s_instance.mutex);
if (s_instance.initialized)
{
*error = CAM_STATUS_DEVICE_IN_USE;
return -1;
}
const auto surfaceBuffer = s_instance.inTargetBuffers.Pop();
if (surfaceBuffer.IsNull())
{
s_cameraEventData->data = nullptr;
s_cameraEventData->errored = true;
}
else
{
CameraManager::FillNV12Buffer(
std::span<uint8, CameraManager::CAMERA_NV12_BUFFER_SIZE>(
surfaceBuffer.GetPtr(), CameraManager::CAMERA_NV12_BUFFER_SIZE));
s_cameraEventData->data = surfaceBuffer;
s_cameraEventData->errored = false;
}
PPCCoreCallback(s_instance.eventCallback, s_cameraEventData.GetMPTR());
coreinit::OSSleepTicks(Espresso::TIMER_CLOCK / (s_instance.fps - 1));
}
}
coreinit::OSExitThread(0);
}
if (!initInfo || !initInfo->workMemoryData ||
!match_any_of(initInfo->forceDisplay, CAMForceDisplay::None, CAMForceDisplay::DRC) ||
!match_any_of(initInfo->fps, CAMFps::_15, CAMFps::_30) ||
initInfo->imageInfo.type != CAMImageType::Default)
{
*error = CAM_STATUS_INVALID_ARG;
return -1;
}
CameraManager::Init();
sint32 CAMGetMemReq(const CAMImageInfo* info)
{
if (!info)
return CAM_STATUS_INVALID_ARG;
return 1 * 1024; // always return 1KB
}
cemu_assert_debug(initInfo->forceDisplay != CAMForceDisplay::DRC);
cemu_assert_debug(initInfo->workMemorySize != 0);
cemu_assert_debug(initInfo->imageInfo.type == CAMImageType::Default);
CAMStatus CAMCheckMemSegmentation(void* startAddr, uint32 size)
{
if (!startAddr || size == 0)
return CAM_STATUS_INVALID_ARG;
return CAM_STATUS_SUCCESS;
}
s_instance.isExiting = false;
s_instance.fps = initInfo->fps == CAMFps::_15 ? 15 : 30;
s_instance.initialized = true;
s_instance.eventCallback = initInfo->callback;
sint32 CAMInit(uint32 cameraId, const CAMInitInfo_t* initInfo, betype<CAMStatus>* error)
{
*error = CAM_STATUS_SUCCESS;
std::scoped_lock lock(s_instance.mutex);
if (s_instance.initialized)
{
*error = CAM_STATUS_DEVICE_IN_USE;
return -1;
}
coreinit::OSInitEvent(s_cameraOpenEvent, coreinit::OSEvent::EVENT_STATE::STATE_NOT_SIGNALED, coreinit::OSEvent::EVENT_MODE::MODE_AUTO);
if (!initInfo || !initInfo->workMemoryData ||
!match_any_of(initInfo->forceDisplay, CAMForceDisplay::None, CAMForceDisplay::DRC) ||
!match_any_of(initInfo->fps, CAMFps::_15, CAMFps::_30) ||
initInfo->imageInfo.type != CAMImageType::Default)
{
*error = CAM_STATUS_INVALID_ARG;
return -1;
}
CameraManager::Init();
coreinit::__OSCreateThreadType(
s_cameraWorkerThread, RPLLoader_MakePPCCallable(WorkerThread), 0, nullptr,
s_cameraWorkerThreadStack.GetPtr() + s_cameraWorkerThreadStack.GetByteSize(), s_cameraWorkerThreadStack.GetByteSize(),
0x10, initInfo->threadFlags & 7, OSThread_t::THREAD_TYPE::TYPE_DRIVER);
s_cameraWorkerThreadNameBuffer->assign("CameraWorkerThread");
coreinit::OSSetThreadName(s_cameraWorkerThread.GetPtr(), s_cameraWorkerThreadNameBuffer->c_str());
coreinit::OSResumeThread(s_cameraWorkerThread.GetPtr());
return CAM_STATUS_SUCCESS;
}
cemu_assert_debug(initInfo->forceDisplay != CAMForceDisplay::DRC);
cemu_assert_debug(initInfo->workMemorySize != 0);
cemu_assert_debug(initInfo->imageInfo.type == CAMImageType::Default);
CAMStatus CAMClose(sint32 camHandle)
{
if (camHandle != CAM_HANDLE)
return CAM_STATUS_INVALID_HANDLE;
{
std::scoped_lock lock(s_instance.mutex);
if (!s_instance.initialized || !s_instance.isOpen)
return CAM_STATUS_UNINITIALIZED;
s_instance.isOpen = false;
}
CameraManager::Close();
return CAM_STATUS_SUCCESS;
}
s_instance.isExiting = false;
s_instance.fps = initInfo->fps == CAMFps::_15 ? 15 : 30;
s_instance.initialized = true;
s_instance.eventCallback = initInfo->callback;
CAMStatus CAMOpen(sint32 camHandle)
{
if (camHandle != CAM_HANDLE)
return CAM_STATUS_INVALID_HANDLE;
auto lock = std::scoped_lock(s_instance.mutex);
if (!s_instance.initialized)
return CAM_STATUS_UNINITIALIZED;
if (s_instance.isOpen)
return CAM_STATUS_DEVICE_IN_USE;
CameraManager::Open();
s_instance.isOpen = true;
coreinit::OSSignalEvent(s_cameraOpenEvent);
s_instance.inTargetBuffers.Clear();
s_instance.outTargetBuffers.Clear();
return CAM_STATUS_SUCCESS;
}
coreinit::OSInitEvent(s_cameraOpenEvent, coreinit::OSEvent::EVENT_STATE::STATE_NOT_SIGNALED,
coreinit::OSEvent::EVENT_MODE::MODE_AUTO);
CAMStatus CAMSubmitTargetSurface(sint32 camHandle, CAMTargetSurface* targetSurface)
{
if (camHandle != CAM_HANDLE)
return CAM_STATUS_INVALID_HANDLE;
if (!targetSurface || targetSurface->data.IsNull() || targetSurface->size < 1)
return CAM_STATUS_INVALID_ARG;
cemu_assert_debug(targetSurface->size >= ((CAMERA_HEIGHT * CAMERA_PITCH * 3) >> 1));
auto lock = std::scoped_lock(s_instance.mutex);
if (!s_instance.initialized)
return CAM_STATUS_UNINITIALIZED;
if (!s_instance.inTargetBuffers.Push(targetSurface->data))
return CAM_STATUS_SURFACE_QUEUE_FULL;
return CAM_STATUS_SUCCESS;
}
coreinit::__OSCreateThreadType(
s_cameraWorkerThread, RPLLoader_MakePPCCallable(WorkerThread), 0, nullptr,
s_cameraWorkerThreadStack.GetPtr() + s_cameraWorkerThreadStack.GetByteSize(),
s_cameraWorkerThreadStack.GetByteSize(),
0x10, initInfo->threadFlags & 7, OSThread_t::THREAD_TYPE::TYPE_DRIVER);
s_cameraWorkerThreadNameBuffer->assign("CameraWorkerThread");
coreinit::OSSetThreadName(s_cameraWorkerThread.GetPtr(), s_cameraWorkerThreadNameBuffer->c_str());
coreinit::OSResumeThread(s_cameraWorkerThread.GetPtr());
return CAM_STATUS_SUCCESS;
}
void CAMExit(sint32 camHandle)
{
if (camHandle != CAM_HANDLE)
return;
std::scoped_lock lock(s_instance.mutex);
if (!s_instance.initialized)
return;
s_instance.isExiting = true;
if (s_instance.isOpen)
CAMClose(camHandle);
coreinit::OSSignalEvent(s_cameraOpenEvent.GetPtr());
coreinit::OSJoinThread(s_cameraWorkerThread, nullptr);
s_instance.initialized = false;
}
CAMStatus CAMClose(sint32 camHandle)
{
if (camHandle != CAM_HANDLE)
return CAM_STATUS_INVALID_HANDLE;
{
std::scoped_lock lock(s_instance.mutex);
if (!s_instance.initialized || !s_instance.isOpen)
return CAM_STATUS_UNINITIALIZED;
s_instance.isOpen = false;
}
CameraManager::Close();
return CAM_STATUS_SUCCESS;
}
void reset()
{
CAMExit(0);
}
CAMStatus CAMOpen(sint32 camHandle)
{
if (camHandle != CAM_HANDLE)
return CAM_STATUS_INVALID_HANDLE;
auto lock = std::scoped_lock(s_instance.mutex);
if (!s_instance.initialized)
return CAM_STATUS_UNINITIALIZED;
if (s_instance.isOpen)
return CAM_STATUS_DEVICE_IN_USE;
CameraManager::Open();
s_instance.isOpen = true;
coreinit::OSSignalEvent(s_cameraOpenEvent);
s_instance.inTargetBuffers.Clear();
s_instance.outTargetBuffers.Clear();
return CAM_STATUS_SUCCESS;
}
class : public COSModule
{
public:
std::string_view GetName() override
{
return "camera";
}
CAMStatus CAMSubmitTargetSurface(sint32 camHandle, CAMTargetSurface* targetSurface)
{
if (camHandle != CAM_HANDLE)
return CAM_STATUS_INVALID_HANDLE;
if (!targetSurface || targetSurface->data.IsNull() || targetSurface->size < 1)
return CAM_STATUS_INVALID_ARG;
cemu_assert_debug(targetSurface->size >= CameraManager::CAMERA_NV12_BUFFER_SIZE);
auto lock = std::scoped_lock(s_instance.mutex);
if (!s_instance.initialized)
return CAM_STATUS_UNINITIALIZED;
if (!s_instance.inTargetBuffers.Push(targetSurface->data))
return CAM_STATUS_SURFACE_QUEUE_FULL;
return CAM_STATUS_SUCCESS;
}
void RPLMapped() override
{
cafeExportRegister("camera", CAMGetMemReq, LogType::Placeholder);
cafeExportRegister("camera", CAMCheckMemSegmentation, LogType::Placeholder);
cafeExportRegister("camera", CAMInit, LogType::Placeholder);
cafeExportRegister("camera", CAMExit, LogType::Placeholder);
cafeExportRegister("camera", CAMOpen, LogType::Placeholder);
cafeExportRegister("camera", CAMClose, LogType::Placeholder);
cafeExportRegister("camera", CAMSubmitTargetSurface, LogType::Placeholder);
};
void CAMExit(sint32 camHandle)
{
if (camHandle != CAM_HANDLE)
return;
std::scoped_lock lock(s_instance.mutex);
if (!s_instance.initialized)
return;
s_instance.isExiting = true;
if (s_instance.isOpen)
CAMClose(camHandle);
coreinit::OSSignalEvent(s_cameraOpenEvent.GetPtr());
coreinit::OSJoinThread(s_cameraWorkerThread, nullptr);
s_instance.initialized = false;
}
void rpl_entry(uint32 moduleHandle, coreinit::RplEntryReason reason) override
{
if (reason == coreinit::RplEntryReason::Loaded)
{
reset();
}
else if (reason == coreinit::RplEntryReason::Unloaded)
{
// todo
}
}
}s_COScameraModule;
void reset()
{
CAMExit(0);
}
COSModule* GetModule()
{
return &s_COScameraModule;
}
class : public COSModule
{
public:
std::string_view GetName() override
{
return "camera";
}
void RPLMapped() override
{
cafeExportRegister("camera", CAMGetMemReq, LogType::Placeholder);
cafeExportRegister("camera", CAMCheckMemSegmentation, LogType::Placeholder);
cafeExportRegister("camera", CAMInit, LogType::Placeholder);
cafeExportRegister("camera", CAMExit, LogType::Placeholder);
cafeExportRegister("camera", CAMOpen, LogType::Placeholder);
cafeExportRegister("camera", CAMClose, LogType::Placeholder);
cafeExportRegister("camera", CAMSubmitTargetSurface, LogType::Placeholder);
};
void rpl_entry(uint32 moduleHandle, coreinit::RplEntryReason reason) override
{
if (reason == coreinit::RplEntryReason::Loaded)
{
reset();
}
else if (reason == coreinit::RplEntryReason::Unloaded)
{
// todo
}
}
} s_COScameraModule;
COSModule* GetModule()
{
return &s_COScameraModule;
}
}

View File

@ -11,218 +11,227 @@
#include <openpnp-capture.h>
constexpr unsigned CAMERA_WIDTH = 640;
constexpr unsigned CAMERA_HEIGHT = 480;
constexpr unsigned CAMERA_PITCH = 768;
namespace CameraManager
{
std::mutex s_mutex;
CapContext s_ctx;
std::optional<CapDeviceID> s_device;
std::optional<CapStream> s_stream;
std::array<uint8, CAMERA_WIDTH * CAMERA_HEIGHT * 3> s_rgbBuffer;
std::array<uint8, CAMERA_PITCH * CAMERA_HEIGHT * 3 / 2> s_nv12Buffer;
int s_refCount = 0;
std::thread s_captureThread;
std::atomic_bool s_capturing = false;
std::atomic_bool s_running = false;
std::mutex s_mutex;
CapContext s_ctx;
std::optional<CapDeviceID> s_device;
std::optional<CapStream> s_stream;
std::array<uint8, CAMERA_RGB_BUFFER_SIZE> s_rgbBuffer;
std::array<uint8, CAMERA_NV12_BUFFER_SIZE> s_nv12Buffer;
int s_refCount = 0;
std::thread s_captureThread;
std::atomic_bool s_capturing = false;
std::atomic_bool s_running = false;
std::string FourCC(uint32le value)
{
return {
static_cast<char>((value >> 0) & 0xFF),
static_cast<char>((value >> 8) & 0xFF),
static_cast<char>((value >> 16) & 0xFF),
static_cast<char>((value >> 24) & 0xFF)};
}
std::string FourCC(uint32le value)
{
return {
static_cast<char>((value >> 0) & 0xFF),
static_cast<char>((value >> 8) & 0xFF),
static_cast<char>((value >> 16) & 0xFF),
static_cast<char>((value >> 24) & 0xFF)
};
}
void CaptureLogFunction(uint32_t level, const char* string)
{
cemuLog_log(LogType::InputAPI, "OpenPNPCapture: {}: {}", level, string);
}
void CaptureLogFunction(uint32_t level, const char* string)
{
cemuLog_log(LogType::InputAPI, "OpenPNPCapture: {}: {}", level, string);
}
std::optional<CapFormatID> FindCorrectFormat()
{
const auto device = *s_device;
cemuLog_log(LogType::InputAPI, "Video capture device '{}' available formats:", Cap_getDeviceName(s_ctx, device));
const auto formatCount = Cap_getNumFormats(s_ctx, device);
for (int32_t formatId = 0; formatId < formatCount; ++formatId)
{
CapFormatInfo formatInfo;
if (Cap_getFormatInfo(s_ctx, device, formatId, &formatInfo) != CAPRESULT_OK)
continue;
cemuLog_log(LogType::Force, "{}: {} {}x{} @ {} fps, {} bpp", formatId, FourCC(formatInfo.fourcc), formatInfo.width, formatInfo.height, formatInfo.fps, formatInfo.bpp);
if (formatInfo.width == CAMERA_WIDTH && formatInfo.height == CAMERA_HEIGHT)
{
cemuLog_log(LogType::Force, "Selected video format {}", formatId);
return formatId;
}
}
cemuLog_log(LogType::Force, "Failed to find suitable video format");
return std::nullopt;
}
std::optional<CapFormatID> FindCorrectFormat()
{
const auto device = *s_device;
cemuLog_log(LogType::InputAPI, "Video capture device '{}' available formats:",
Cap_getDeviceName(s_ctx, device));
const auto formatCount = Cap_getNumFormats(s_ctx, device);
for (int32_t formatId = 0; formatId < formatCount; ++formatId)
{
CapFormatInfo formatInfo;
if (Cap_getFormatInfo(s_ctx, device, formatId, &formatInfo) != CAPRESULT_OK)
continue;
cemuLog_log(LogType::Force, "{}: {} {}x{} @ {} fps, {} bpp", formatId, FourCC(formatInfo.fourcc),
formatInfo.width, formatInfo.height, formatInfo.fps, formatInfo.bpp);
if (formatInfo.width == CAMERA_WIDTH && formatInfo.height == CAMERA_HEIGHT)
{
cemuLog_log(LogType::Force, "Selected video format {}", formatId);
return formatId;
}
}
cemuLog_log(LogType::Force, "Failed to find suitable video format");
return std::nullopt;
}
void CaptureWorker()
{
SetThreadName("CameraManager");
while (s_running)
{
while (s_capturing)
{
s_mutex.lock();
if (s_stream && Cap_hasNewFrame(s_ctx, *s_stream) &&
Cap_captureFrame(s_ctx, *s_stream, s_rgbBuffer.data(), s_rgbBuffer.size()) == CAPRESULT_OK)
Rgb2Nv12(s_rgbBuffer.data(), CAMERA_WIDTH, CAMERA_HEIGHT, s_nv12Buffer.data(), CAMERA_PITCH);
s_mutex.unlock();
std::this_thread::sleep_for(std::chrono::milliseconds(30));
}
std::this_thread::sleep_for(std::chrono::seconds(1));
std::this_thread::yield();
}
}
void OpenStream()
{
const auto formatId = FindCorrectFormat();
if (!formatId)
return;
const auto stream = Cap_openStream(s_ctx, *s_device, *formatId);
if (stream == -1)
return;
s_capturing = true;
s_stream = stream;
}
void CloseStream()
{
s_capturing = false;
if (s_stream)
{
Cap_closeStream(s_ctx, *s_stream);
s_stream = std::nullopt;
}
}
void ResetBuffers()
{
std::ranges::fill(s_rgbBuffer, 0);
constexpr auto pixCount = CAMERA_HEIGHT * CAMERA_PITCH;
std::ranges::fill_n(s_nv12Buffer.begin(), pixCount, 16);
std::ranges::fill_n(s_nv12Buffer.begin() + pixCount, (pixCount / 2), 128);
}
void CaptureWorker()
{
SetThreadName("CameraManager");
while (s_running)
{
while (s_capturing)
{
s_mutex.lock();
if (s_stream && Cap_hasNewFrame(s_ctx, *s_stream) &&
Cap_captureFrame(s_ctx, *s_stream, s_rgbBuffer.data(), s_rgbBuffer.size()) == CAPRESULT_OK)
Rgb2Nv12(s_rgbBuffer.data(), CAMERA_WIDTH, CAMERA_HEIGHT, s_nv12Buffer.data(), CAMERA_PITCH);
s_mutex.unlock();
std::this_thread::sleep_for(std::chrono::milliseconds(30));
}
std::this_thread::sleep_for(std::chrono::seconds(1));
std::this_thread::yield();
}
}
void Init()
{
{
std::scoped_lock lock(s_mutex);
if (s_running)
return;
s_running = true;
s_ctx = Cap_createContext();
Cap_setLogLevel(4);
Cap_installCustomLogFunction(CaptureLogFunction);
}
void OpenStream()
{
const auto formatId = FindCorrectFormat();
if (!formatId)
return;
const auto stream = Cap_openStream(s_ctx, *s_device, *formatId);
if (stream == -1)
return;
s_capturing = true;
s_stream = stream;
}
s_captureThread = std::thread(&CaptureWorker);
void CloseStream()
{
s_capturing = false;
if (s_stream)
{
Cap_closeStream(s_ctx, *s_stream);
s_stream = std::nullopt;
}
}
const auto uniqueId = GetConfig().camera_id.GetValue();
if (!uniqueId.empty())
{
const auto devices = EnumerateDevices();
for (CapDeviceID deviceId = 0; deviceId < devices.size(); ++deviceId)
{
if (devices[deviceId].uniqueId == uniqueId)
{
s_device = deviceId;
return;
}
}
}
ResetBuffers();
}
void Deinit()
{
CloseStream();
Cap_releaseContext(s_ctx);
s_running = false;
s_captureThread.join();
}
void FillNV12Buffer(uint8* nv12Buffer)
{
std::scoped_lock lock(s_mutex);
std::ranges::copy(s_nv12Buffer, nv12Buffer);
}
void ResetBuffers()
{
std::ranges::fill(s_rgbBuffer, 0);
constexpr auto pixCount = CAMERA_HEIGHT * CAMERA_PITCH;
std::ranges::fill_n(s_nv12Buffer.begin(), pixCount, 16);
std::ranges::fill_n(s_nv12Buffer.begin() + pixCount, (pixCount / 2), 128);
}
void FillRGBBuffer(uint8* rgbBuffer)
{
std::scoped_lock lock(s_mutex);
std::ranges::copy(s_rgbBuffer, rgbBuffer);
}
void SetDevice(uint32 deviceNo)
{
std::scoped_lock lock(s_mutex);
CloseStream();
if (deviceNo == DEVICE_NONE)
{
s_device = std::nullopt;
ResetBuffers();
return;
}
s_device = deviceNo;
if (s_refCount != 0)
OpenStream();
}
void Open()
{
std::scoped_lock lock(s_mutex);
if (s_device && s_refCount == 0)
{
OpenStream();
}
s_refCount += 1;
}
void Close()
{
std::scoped_lock lock(s_mutex);
if (s_refCount == 0)
return;
s_refCount -= 1;
if (s_refCount != 0)
return;
CloseStream();
}
std::vector<DeviceInfo> EnumerateDevices()
{
std::scoped_lock lock(s_mutex);
std::vector<DeviceInfo> infos;
const auto deviceCount = Cap_getDeviceCount(s_ctx);
cemuLog_log(LogType::InputAPI, "Available video capture devices:");
for (CapDeviceID deviceNo = 0; deviceNo < deviceCount; ++deviceNo)
{
const auto uniqueId = Cap_getDeviceUniqueID(s_ctx, deviceNo);
const auto name = Cap_getDeviceName(s_ctx, deviceNo);
DeviceInfo info;
info.uniqueId = uniqueId;
void Init()
{
{
std::scoped_lock lock(s_mutex);
if (s_running)
return;
s_running = true;
s_ctx = Cap_createContext();
Cap_setLogLevel(4);
Cap_installCustomLogFunction(CaptureLogFunction);
}
if (name)
info.name = fmt::format("{}: {}", deviceNo, name);
else
info.name = fmt::format("{}: Unknown", deviceNo);
infos.push_back(info);
cemuLog_log(LogType::InputAPI, "{}", info.name);
}
if (infos.empty())
cemuLog_log(LogType::InputAPI, "No available video capture devices");
return infos;
}
void SaveDevice()
{
std::scoped_lock lock(s_mutex);
const std::string cameraId = s_device ? Cap_getDeviceUniqueID(s_ctx, *s_device) : "";
GetConfig().camera_id.SetValue(cameraId);
GetConfigHandle().Save();
}
s_captureThread = std::thread(&CaptureWorker);
std::optional<uint32> GetCurrentDevice()
{
return s_device;
}
} // namespace CameraManager
const auto uniqueId = GetConfig().camera_id.GetValue();
if (!uniqueId.empty())
{
const auto devices = EnumerateDevices();
for (CapDeviceID deviceId = 0; deviceId < devices.size(); ++deviceId)
{
if (devices[deviceId].uniqueId == uniqueId)
{
s_device = deviceId;
return;
}
}
}
ResetBuffers();
}
void Deinit()
{
CloseStream();
Cap_releaseContext(s_ctx);
s_running = false;
s_captureThread.join();
}
void FillNV12Buffer(std::span<uint8, CAMERA_NV12_BUFFER_SIZE> nv12Buffer)
{
std::scoped_lock lock(s_mutex);
std::ranges::copy(s_nv12Buffer, nv12Buffer.data());
}
void FillRGBBuffer(std::span<uint8, CAMERA_RGB_BUFFER_SIZE> rgbBuffer)
{
std::scoped_lock lock(s_mutex);
std::ranges::copy(s_rgbBuffer, rgbBuffer.data());
}
void SetDevice(uint32 deviceNo)
{
std::scoped_lock lock(s_mutex);
CloseStream();
if (deviceNo == DEVICE_NONE)
{
s_device = std::nullopt;
ResetBuffers();
return;
}
s_device = deviceNo;
if (s_refCount != 0)
OpenStream();
}
void Open()
{
std::scoped_lock lock(s_mutex);
if (s_device && s_refCount == 0)
{
OpenStream();
}
s_refCount += 1;
}
void Close()
{
std::scoped_lock lock(s_mutex);
if (s_refCount == 0)
return;
s_refCount -= 1;
if (s_refCount != 0)
return;
CloseStream();
}
std::vector<DeviceInfo> EnumerateDevices()
{
std::scoped_lock lock(s_mutex);
std::vector<DeviceInfo> infos;
const auto deviceCount = Cap_getDeviceCount(s_ctx);
cemuLog_log(LogType::InputAPI, "Available video capture devices:");
for (CapDeviceID deviceNo = 0; deviceNo < deviceCount; ++deviceNo)
{
const auto uniqueId = Cap_getDeviceUniqueID(s_ctx, deviceNo);
const auto name = Cap_getDeviceName(s_ctx, deviceNo);
DeviceInfo info;
info.uniqueId = uniqueId;
if (name)
info.name = fmt::format("{}: {}", deviceNo, name);
else
info.name = fmt::format("{}: Unknown", deviceNo);
infos.push_back(info);
cemuLog_log(LogType::InputAPI, "{}", info.name);
}
if (infos.empty())
cemuLog_log(LogType::InputAPI, "No available video capture devices");
return infos;
}
void SaveDevice()
{
std::scoped_lock lock(s_mutex);
const std::string cameraId = s_device ? Cap_getDeviceUniqueID(s_ctx, *s_device) : "";
GetConfig().camera_id.SetValue(cameraId);
GetConfigHandle().Save();
}
std::optional<uint32> GetCurrentDevice()
{
return s_device;
}
} // namespace CameraManager

View File

@ -5,11 +5,19 @@
namespace CameraManager
{
constexpr uint32 CAMERA_WIDTH = 640;
constexpr uint32 CAMERA_HEIGHT = 480;
constexpr uint32 CAMERA_PITCH = 768;
constexpr uint32 CAMERA_NV12_BUFFER_SIZE = (CAMERA_HEIGHT * CAMERA_PITCH * 3) >> 1;
constexpr uint32 CAMERA_RGB_BUFFER_SIZE = CAMERA_HEIGHT * CAMERA_WIDTH * 3;
struct DeviceInfo
{
std::string uniqueId;
std::string name;
};
constexpr static uint32 DEVICE_NONE = std::numeric_limits<uint32>::max();
void Init();
@ -17,11 +25,11 @@ namespace CameraManager
void Open();
void Close();
void FillNV12Buffer(uint8* nv12Buffer);
void FillRGBBuffer(uint8* rgbBuffer);
void FillNV12Buffer(std::span<uint8, CAMERA_NV12_BUFFER_SIZE> nv12Buffer);
void FillRGBBuffer(std::span<uint8, CAMERA_RGB_BUFFER_SIZE> rgbBuffer);
void SetDevice(uint32 deviceNo);
std::vector<DeviceInfo> EnumerateDevices();
void SaveDevice();
std::optional<uint32> GetCurrentDevice();
} // namespace CameraManager
} // namespace CameraManager

View File

@ -6,100 +6,101 @@
#include <wx/dcbuffer.h>
#include <wx/rawbmp.h>
constexpr unsigned CAMERA_WIDTH = 640;
constexpr unsigned CAMERA_HEIGHT = 480;
CameraSettingsWindow::CameraSettingsWindow(wxWindow* parent)
: wxDialog(parent, wxID_ANY, _("Camera settings"), wxDefaultPosition),
m_imageBitmap(CAMERA_WIDTH, CAMERA_HEIGHT, 24), m_imageBuffer(CAMERA_WIDTH * CAMERA_HEIGHT * 3)
: wxDialog(parent, wxID_ANY, _("Camera settings"), wxDefaultPosition),
m_imageBitmap(CameraManager::CAMERA_WIDTH, CameraManager::CAMERA_HEIGHT, 24),
m_imageBuffer(CameraManager::CAMERA_RGB_BUFFER_SIZE)
{
CameraManager::Init();
CameraManager::Open();
auto* rootSizer = new wxBoxSizer(wxVERTICAL);
{
auto* topSizer = new wxBoxSizer(wxHORIZONTAL);
{
m_cameraChoice = new wxChoice(this, wxID_ANY, wxDefaultPosition, {300, -1});
m_cameraChoice->Bind(wxEVT_CHOICE, &CameraSettingsWindow::OnSelectCameraChoice, this);
m_cameraChoice->SetToolTip(_("Cameras are only listed if they support 640x480"));
CameraManager::Init();
CameraManager::Open();
auto* rootSizer = new wxBoxSizer(wxVERTICAL);
{
auto* topSizer = new wxBoxSizer(wxHORIZONTAL);
{
m_cameraChoice = new wxChoice(this, wxID_ANY, wxDefaultPosition, {300, -1});
m_cameraChoice->Bind(wxEVT_CHOICE, &CameraSettingsWindow::OnSelectCameraChoice, this);
m_cameraChoice->SetToolTip(_("Cameras are only listed if they support 640x480"));
m_refreshButton = new wxButton(this, wxID_ANY, wxString::FromUTF8(""));
m_refreshButton->Fit();
m_refreshButton->Bind(wxEVT_BUTTON, &CameraSettingsWindow::OnRefreshPressed, this);
wxQueueEvent(m_refreshButton, new wxCommandEvent{wxEVT_BUTTON});
m_refreshButton = new wxButton(this, wxID_ANY, wxString::FromUTF8(""));
m_refreshButton->Fit();
m_refreshButton->Bind(wxEVT_BUTTON, &CameraSettingsWindow::OnRefreshPressed, this);
wxQueueEvent(m_refreshButton, new wxCommandEvent{wxEVT_BUTTON});
topSizer->Add(m_cameraChoice);
topSizer->Add(m_refreshButton);
}
topSizer->Add(m_cameraChoice);
topSizer->Add(m_refreshButton);
}
m_imageWindow = new wxWindow(this, wxID_ANY, wxDefaultPosition,
{CameraManager::CAMERA_WIDTH, CameraManager::CAMERA_HEIGHT});
m_imageWindow->SetBackgroundStyle(wxBG_STYLE_PAINT);
m_imageWindow = new wxWindow(this, wxID_ANY, wxDefaultPosition, {CAMERA_WIDTH, CAMERA_HEIGHT});
m_imageWindow->SetBackgroundStyle(wxBG_STYLE_PAINT);
rootSizer->Add(topSizer);
rootSizer->Add(m_imageWindow, wxEXPAND);
}
SetSizerAndFit(rootSizer);
m_imageUpdateTimer.Bind(wxEVT_TIMER, &CameraSettingsWindow::UpdateImage, this);
m_imageWindow->Bind(wxEVT_PAINT, &CameraSettingsWindow::DrawImage, this);
this->Bind(wxEVT_CLOSE_WINDOW, &CameraSettingsWindow::OnClose, this);
rootSizer->Add(topSizer);
rootSizer->Add(m_imageWindow, wxEXPAND);
}
SetSizerAndFit(rootSizer);
m_imageUpdateTimer.Bind(wxEVT_TIMER, &CameraSettingsWindow::UpdateImage, this);
m_imageWindow->Bind(wxEVT_PAINT, &CameraSettingsWindow::DrawImage, this);
this->Bind(wxEVT_CLOSE_WINDOW, &CameraSettingsWindow::OnClose, this);
m_imageUpdateTimer.Start(33, wxTIMER_CONTINUOUS);
m_imageUpdateTimer.Start(33, wxTIMER_CONTINUOUS);
}
void CameraSettingsWindow::OnSelectCameraChoice(wxCommandEvent&)
{
const auto selection = m_cameraChoice->GetSelection();
if (selection < 0)
return;
if (selection == 0)
CameraManager::SetDevice(CameraManager::DEVICE_NONE);
else
CameraManager::SetDevice(selection - 1);
const auto selection = m_cameraChoice->GetSelection();
if (selection < 0)
return;
if (selection == 0)
CameraManager::SetDevice(CameraManager::DEVICE_NONE);
else
CameraManager::SetDevice(selection - 1);
}
void CameraSettingsWindow::OnRefreshPressed(wxCommandEvent&)
{
wxArrayString choices = {_("None")};
for (const auto& entry : CameraManager::EnumerateDevices())
{
choices.push_back(entry.name);
}
m_cameraChoice->Set(choices);
if (auto currentDevice = CameraManager::GetCurrentDevice())
m_cameraChoice->SetSelection(*currentDevice + 1);
wxArrayString choices = {_("None")};
for (const auto& entry : CameraManager::EnumerateDevices())
{
choices.push_back(entry.name);
}
m_cameraChoice->Set(choices);
if (auto currentDevice = CameraManager::GetCurrentDevice())
m_cameraChoice->SetSelection(*currentDevice + 1);
}
void CameraSettingsWindow::UpdateImage(const wxTimerEvent&)
{
CameraManager::FillRGBBuffer(m_imageBuffer.data());
wxNativePixelData data{m_imageBitmap};
if (!data)
return;
wxNativePixelData::Iterator p{data};
for (auto row = 0u; row < CAMERA_HEIGHT; ++row)
{
const auto* rowPtr = m_imageBuffer.data() + row * CAMERA_WIDTH * 3;
wxNativePixelData::Iterator rowStart = p;
for (auto col = 0u; col < CAMERA_WIDTH; ++col, ++p)
{
auto* colour = rowPtr + col * 3;
p.Red() = colour[0];
p.Green() = colour[1];
p.Blue() = colour[2];
}
p = rowStart;
p.OffsetY(data, 1);
}
m_imageWindow->Refresh();
CameraManager::FillRGBBuffer(std::span<uint8, CameraManager::CAMERA_RGB_BUFFER_SIZE>(m_imageBuffer));
wxNativePixelData data{m_imageBitmap};
if (!data)
return;
wxNativePixelData::Iterator p{data};
for (auto row = 0u; row < CameraManager::CAMERA_HEIGHT; ++row)
{
const auto* rowPtr = m_imageBuffer.data() + row * CameraManager::CAMERA_WIDTH * 3;
wxNativePixelData::Iterator rowStart = p;
for (auto col = 0u; col < CameraManager::CAMERA_WIDTH; ++col, ++p)
{
auto* colour = rowPtr + col * 3;
p.Red() = colour[0];
p.Green() = colour[1];
p.Blue() = colour[2];
}
p = rowStart;
p.OffsetY(data, 1);
}
m_imageWindow->Refresh();
}
void CameraSettingsWindow::DrawImage(const wxPaintEvent&)
{
wxAutoBufferedPaintDC dc{m_imageWindow};
dc.DrawBitmap(m_imageBitmap, 0, 0);
wxAutoBufferedPaintDC dc{m_imageWindow};
dc.DrawBitmap(m_imageBitmap, 0, 0);
}
void CameraSettingsWindow::OnClose(wxCloseEvent& event)
{
CameraManager::Close();
CameraManager::SaveDevice();
event.Skip();
}
CameraManager::Close();
CameraManager::SaveDevice();
event.Skip();
}