diff --git a/src/video_core/renderer_opengl/gl_rasterizer_cache.cpp b/src/video_core/renderer_opengl/gl_rasterizer_cache.cpp index b94446428d..39dcf71ce2 100644 --- a/src/video_core/renderer_opengl/gl_rasterizer_cache.cpp +++ b/src/video_core/renderer_opengl/gl_rasterizer_cache.cpp @@ -564,6 +564,12 @@ void RasterizerCacheOpenGL::CopySurface(const Surface& src_surface, const Surfac CachedSurface::CachedSurface(const SurfaceParams& params) : params{params}, gl_target{SurfaceTargetToGL(params.target)}, cached_size_in_bytes{params.size_in_bytes}, RasterizerCacheObject{params.host_ptr} { + + const auto optional_cpu_addr{ + Core::System::GetInstance().GPU().MemoryManager().GpuToCpuAddress(params.gpu_addr)}; + ASSERT_MSG(optional_cpu_addr, "optional_cpu_addr is invalid"); + cpu_addr = *optional_cpu_addr; + texture.Create(gl_target); // TODO(Rodrigo): Using params.GetRect() returns a different size than using its Mip*(0) @@ -603,20 +609,6 @@ CachedSurface::CachedSurface(const SurfaceParams& params) ApplyTextureDefaults(texture.handle, params.max_mip_level); OpenGL::LabelGLObject(GL_TEXTURE, texture.handle, params.gpu_addr, params.IdentityString()); - - // Clamp size to mapped GPU memory region - // TODO(bunnei): Super Mario Odyssey maps a 0x40000 byte region and then uses it for a 0x80000 - // R32F render buffer. We do not yet know if this is a game bug or something else, but this - // check is necessary to prevent flushing from overwriting unmapped memory. - - auto& memory_manager{Core::System::GetInstance().GPU().MemoryManager()}; - // const u64 max_size{memory_manager.GetRegionEnd(params.gpu_addr) - params.gpu_addr}; - // if (cached_size_in_bytes > max_size) { - // LOG_ERROR(HW_GPU, "Surface size {} exceeds region size {}", params.size_in_bytes, - // max_size); cached_size_in_bytes = max_size; - //} - - cpu_addr = *memory_manager.GpuToCpuAddress(params.gpu_addr); } MICROPROFILE_DEFINE(OpenGL_SurfaceLoad, "OpenGL", "Surface Load", MP_RGB(128, 192, 64)); @@ -925,7 +917,7 @@ void RasterizerCacheOpenGL::LoadSurface(const Surface& surface) { } Surface RasterizerCacheOpenGL::GetSurface(const SurfaceParams& params, bool preserve_contents) { - if (params.gpu_addr == 0 || params.height * params.width == 0) { + if (!params.IsValid()) { return {}; } diff --git a/src/video_core/renderer_opengl/gl_rasterizer_cache.h b/src/video_core/renderer_opengl/gl_rasterizer_cache.h index d76bc0ee7b..0efcafd077 100644 --- a/src/video_core/renderer_opengl/gl_rasterizer_cache.h +++ b/src/video_core/renderer_opengl/gl_rasterizer_cache.h @@ -109,6 +109,11 @@ struct SurfaceParams { return size; } + /// Returns true if the parameters constitute a valid rasterizer surface. + bool IsValid() const { + return gpu_addr && host_ptr && height && width; + } + /// Returns the exact size of the memory occupied by a layer in a texture in VRAM, including /// mipmaps. std::size_t LayerMemorySize() const {