4#pragma warning(disable: 4127)
5#include <glm/ext/matrix_clip_space.hpp>
9#include "LightSystem.h"
11#include "Rendering/ICapabilities.h"
14#include "Services/Time.h"
16#include "Scene/GetBounds.h"
18#include "Systems/Core/TransformSystem.h"
19#include "Systems/Core/CameraSystem.h"
21#include "Renderer/CullingManager.h"
22#include "Renderer/IRenderer.h"
24#include "Resources/TextureManager.h"
25#include "Resources/MaterialManager.h"
27#include "Utilities/Parsing.h"
28#include "Utilities/Math.h"
31#include <glm/gtx/compatibility.hpp>
32#include <glm/gtc/color_space.hpp>
41 glm::mat4 rawInverseProjection;
42 glm::mat4 rawInverseViewProjection;
45 static const glm::vec3 corners[] =
57 static const uint32_t edgeIndicesData[][2] = {
72 static void clipLineSegments(std::vector<glm::vec3>& E,
const glm::vec4 plane)
75 for (
size_t i = 0; i < E.size(); i += 2) {
76 const auto a = E[i + 0];
77 const auto b = E[i + 1];
78 const float d0 = glm::dot(glm::vec4(a, 1.f), plane);
79 const float d1 = glm::dot(glm::vec4(b, 1.f), plane);
85 auto num = glm::dot(d, glm::vec3(plane));
86 if (glm::abs(num) < 0.01f) {
91 if (b0 == b1)
continue;
95 auto t = -glm::dot(glm::vec4(a, 1.f), plane) / num;
100 assert((E.size() & 1) == 0);
104 static glm::mat4 rotateVecToY(
const glm::vec2& d)
106 auto l2 = glm::dot(d, d);
110 const auto r = 1.f / glm::sqrt(l2);
114 return glm::mat4( c, s, 0, 0,
120 static glm::mat4 createLightViewMatrix(
const glm::vec4 & ,
const glm::quat& rotation)
122 const glm::mat4 lightView = glm::mat4_cast(glm::conjugate(rotation));
124 auto lz = glm::abs(euclidean(lightView * glm::vec4(0, 0, 1, 1)));
126 glm::vec3 align_axis = glm::vec3(0, 1, 0);
127 if (lz.y > lz.x && lz.y > lz.z) {
129 align_axis = glm::vec3(0, 0, 1);
134 auto cascadeAxisLW = glm::vec3(glm::vec2(glm::mat3(lightView) * glm::vec3(align_axis)), 0.f);
135 return rotateVecToY(cascadeAxisLW) * lightView;
138 static glm::mat4 createRotaryAlignmentMatrix(
const glm::mat4& M)
140 auto ro = euclidean(M * glm::vec4(0, 0, 0, 1));
141 auto rx = euclidean(M * glm::vec4(1, 0, 0, 1)) - ro;
142 auto ry = euclidean(M * glm::vec4(0, 1, 0, 1)) - ro;
143 auto rz = euclidean(M * glm::vec4(0, 0, 1, 1)) - ro;
145 auto xz = glm::abs(rx.z);
146 auto yz = glm::abs(ry.z);
147 auto zz = glm::abs(rz.z);
150 if (xz < yz && xz < zz) {
159 return rotateVecToY(d);
162 static void createFrustumFitMatrix(
Context * context,
163 glm::mat4& rawCascadeProjectionMatrix,
164 glm::mat4& rawCascadeCullMatrix,
167 const std::vector<CamState>& camStates,
168 const glm::mat4& lightView,
169 const glm::vec4& cascadeLine,
170 const float n,
const float f,
173 glm::uvec2 &blueNoiseOffset)
176 auto toCull = createRotaryAlignmentMatrix(lightView * refCamData->inverseViewMatrix);
178 glm::vec3 min_lv(std::numeric_limits<float>::max());
179 glm::vec3 min_cv = min_lv;
180 glm::vec3 max_lv(-std::numeric_limits<float>::max());
181 glm::vec3 max_cv = max_lv;
183 for (
const auto & camState : camStates) {
184 const auto M = lightView * camState.rawInverseViewProjection;
185 if (camState.cameraData == refCamData) {
194 const auto zn = glm::dot(row2, glm::vec4(0, 0, -n, 1));
195 const auto wn = glm::dot(row3, glm::vec4(0, 0, -n, 1));
196 const auto zf = glm::dot(row2, glm::vec4(0, 0, -f, 1));
197 const auto wf = glm::dot(row3, glm::vec4(0, 0, -f, 1));
201 for (
unsigned i = 0; i < 4; i++) {
202 const auto pn = euclidean(M * glm::vec4(wn*glm::vec2(corners[i]), zn, wn));
203 const auto pf = euclidean(M * glm::vec4(wf*glm::vec2(corners[i]), zf, wf));
205 frustaPoints->points.push_back(pn);
206 frustaPoints->points.push_back(pf);
208 min_lv = glm::min(min_lv, glm::min(pn, pf));
209 max_lv = glm::max(max_lv, glm::max(pn, pf));
211 min_cv = glm::min(min_cv, glm::min(glm::vec3(toCull * glm::vec4(pn, 1)),
212 glm::vec3(toCull * glm::vec4(pf, 1))));
213 max_cv = glm::max(max_cv, glm::max(glm::vec3(toCull * glm::vec4(pn, 1)),
214 glm::vec3(toCull * glm::vec4(pf, 1))));
221 for (
unsigned i = 0; i < 8; i++) {
222 p[i] = euclidean(M * glm::vec4(corners[i], 1.f));
229 std::vector<glm::vec3> E;
230 for (
const auto & e : edgeIndicesData) {
231 E.push_back(p[e[0]]);
232 E.push_back(p[e[1]]);
235 clipLineSegments(E, lightView * glm::vec4(glm::vec3(cascadeLine), cascadeLine.w - n));
236 clipLineSegments(E, -lightView * glm::vec4(glm::vec3(cascadeLine), cascadeLine.w - f));
237 for (
const auto & pp : E) {
239 frustaPoints->points.push_back(pp);
241 min_lv = glm::min(min_lv, pp);
242 max_lv = glm::max(max_lv, pp);
244 min_cv = glm::min(min_cv, glm::vec3(toCull * glm::vec4(pp, 1)));
245 max_cv = glm::max(max_cv, glm::vec3(toCull * glm::vec4(pp, 1)));
250 frustaPoints->offsets.push_back(
unsigned(frustaPoints->points.size()));
256 const float size = std::max((max_lv.x - min_lv.x),
257 (max_lv.y - min_lv.y));
258 glm::vec2 center_lv = 0.5f * glm::vec2(min_lv + max_lv);
259 const float zNear = -max_lv.z - std::max(0.1f, frustumSlack) * (max_lv.z - min_lv.z);
260 const float zFar = -min_lv.z + frustumSlack * (max_lv.z - min_lv.z);
265 bool needNewFrustum = frustumSlack == 0.f;
266 const float factor = 0.5f * (1.f + frustumSlack);
267 const float greaterThanValue = glm::max(0.f, 1.f - 2.f * frustumSlack);
268 for (
unsigned i = 0; i < 8u; i++) {
269 glm::vec4 p = rawCascadeProjectionMatrix * glm::vec4(center_lv.x + 0.5 * (((i >> 0) & 1) ? -size : size),
270 center_lv.y + 0.5 * (((i >> 1) & 1) ? -size : size),
271 ((i >> 2) & 1) ? min_lv.z : max_lv.z,
274 bool xOk = ((-p.w <= p.x) && (p.x <= -greaterThanValue * p.w)) || ((greaterThanValue * p.w <= p.x) && (p.x <= p.w));
275 bool yOk = ((-p.w <= p.y) && (p.y <= -greaterThanValue * p.w)) || ((greaterThanValue * p.y <= p.y) && (p.y <= p.w));
276 bool zOk = ((-p.w <= p.z) && (p.z <= -0.5f * p.w)) || ((0.5f * p.z <= p.z) && (p.z <= p.w));
277 needNewFrustum = needNewFrustum || !(xOk && yOk && zOk);
280 if (needNewFrustum) {
285 const float steppyness = 5.f;
286 const float texelSize = std::exp2(std::ceil(log2(size / resolution) * steppyness) / steppyness);
287 const glm::dvec3 origin = context->transformSystem->
getOrigin();
288 const glm::dvec3 offset = glm::dmat3(lightView) * origin;
289 const double snappX = std::floor((center_lv.x + offset.x) / texelSize);
290 const double snappY = std::floor((center_lv.y + offset.y) / texelSize);
291 double snapX_ = std::fmod(snappX, 64.0);
292 if (snapX_ < 0.0) snapX_ += 64;
293 double snapY_ = std::fmod(snappY, 64.0);
294 if (snapY_ < 0.0) snapY_ += 64;
296 blueNoiseOffset = glm::uvec2(
static_cast<uint32_t
>(snapX_),
static_cast<uint32_t
>(snapY_));
297 blueNoiseOffset = glm::uvec2(blueNoiseOffset.x, (64u - blueNoiseOffset.y) % 64u);
301 const glm::vec2 viewportMin = center_lv - glm::vec2(factor * size);
302 const glm::vec2 viewportMax = center_lv + glm::vec2(factor * size);
304 frustaPoints->viewportMin = viewportMin;
305 frustaPoints->viewportMax = viewportMax;
307 rawCascadeProjectionMatrix = glm::ortho(viewportMin.x, viewportMax.x,
308 viewportMin.y, viewportMax.y,
311 rawCascadeCullMatrix = glm::ortho(min_cv.x, max_cv.x,
313 zNear, zFar) * toCull;
320 refCamData = &context->cameraSystem->getData(c);
324 refCamData = &context->cameraSystem->getMainCameraData();
327 for (
auto & we : light.
cameras) {
328 if (
auto e = we.lock(); e) {
330 camStates.emplace_back();
331 camStates.back().cameraData = &context->cameraSystem->getData(c);
335 if (camStates.empty() && refCamData) {
336 camStates.emplace_back();
337 camStates.back().cameraData = refCamData;
339 for (
auto & camState : camStates) {
340 camState.rawInverseViewProjection = glm::inverse(camState.cameraData->rawViewProjection);
344 static void calculateCascadeCount(
LightData & lightData,
float zNear,
float zFar,
float FOV)
346 if(!lightData.dynamicCascadeCount)
return;
348 float tana = tanf(a);
349 float ns = tana*zNear;
350 float fs = tana*zFar;
351 float r = fs/ns-1.0f;
352 lightData.numViewports = (uint16_t)ceil(r);
353 lightData.numViewports = std::max(lightData.numViewports, (uint16_t)1);
354 lightData.numViewports = std::min(lightData.numViewports, (uint16_t)lightData.maxViewports);
357 static void calculateSplits(
Context* context,
LightData & lightData,
float zNear,
float zFar)
359 const auto expFactor = context->
variables->get(
"shadows.cascades.expFactor")->getFloat();
360 const auto overlapFactor = 0.5f * context->
variables->get(
"shadows.cascades.overlapFactor")->getFloat();
363 const float cascadeWidth = 1.f /
static_cast<float>(lightData.numViewports);
364 if(lightData.numViewports == 1){
365 lightData.nearDepths[0] = zNear;
366 lightData.farDepths[0] = zFar;
369 for (
int i = 0; i < lightData.numViewports; ++i) {
370 const float iF = std::min(1.f, cascadeWidth * (
static_cast<float>(i) + 1.f + overlapFactor));
371 const float iN = std::max(0.f, cascadeWidth * (
static_cast<float>(i) - overlapFactor));
373 const float zExpNear = zNear * glm::pow(zFar / zNear, iN);
374 const float zLinearNear = zNear + iN * (zFar - zNear);
375 lightData.nearDepths[i] = glm::max(glm::lerp(zLinearNear, zExpNear, expFactor), zNear);
376 assert(std::isfinite(lightData.nearDepths[i]));
378 const float zExp = zNear * glm::pow(zFar / zNear, iF);
379 const float zLinear = zNear + iF * (zFar - zNear);
381 lightData.farDepths[i] = glm::lerp(zLinear, zExp, expFactor);
382 assert(std::isfinite(lightData.farDepths[i]));
387 static float calculateSplits(
Context * context,
LightData & lightData,
const glm::mat4& rotation,
const CameraData* refCamData,
const std::vector<CamState>& camStates,
const float maxShadowDistance)
390 const glm::mat4 & M = refCamData->inverseViewMatrix;
391 const glm::vec3 o = glm::vec3(M * glm::vec4(0, 0, 0, 1));
392 const glm::vec3 a = glm::normalize(glm::vec3(M * glm::vec4(0, 0, -1, 0)));
393 const float d = -glm::dot(o, a);
394 assert(std::isfinite(d));
395 lightData.cascadeLine = glm::vec4(a, d);
398 auto zNear = refCamData->nearDistance;
399 auto zFar = refCamData->farDistance;
400 for (
auto & camState : camStates) {
401 for (
auto c : corners) {
402 auto q = camState.rawInverseViewProjection * glm::vec4(c, 1.f);
403 if (std::numeric_limits<float>::epsilon() < q.w) {
404 auto t = glm::dot(a, (1.f / q.w)*glm::vec3(q)) + d;
405 assert(std::isfinite(t));
407 zNear = glm::min(zNear, t);
408 zFar = glm::max(zFar, t);
413 if(!lightData.tightShadowBounds){
414 calculateCascadeCount(lightData, zNear, zFar, refCamData->fieldOfView);
415 calculateSplits(context, lightData, refCamData->nearDistance, std::min(zFar, refCamData->nearDistance + maxShadowDistance));
416 return std::max(zNear, refCamData->nearDistance - maxShadowDistance);
419 const Cogs::Geometry::BoundingBox bbox = context->
bounds->getShadowBounds(context);
420 const glm::vec3 bbox_corners[] = {
421 glm::vec3(bbox.min.x, bbox.min.y, bbox.min.z),
422 glm::vec3(bbox.max.x, bbox.min.y, bbox.min.z),
423 glm::vec3(bbox.max.x, bbox.max.y, bbox.min.z),
424 glm::vec3(bbox.min.x, bbox.max.y, bbox.min.z),
425 glm::vec3(bbox.min.x, bbox.min.y, bbox.max.z),
426 glm::vec3(bbox.max.x, bbox.min.y, bbox.max.z),
427 glm::vec3(bbox.max.x, bbox.max.y, bbox.max.z),
428 glm::vec3(bbox.min.x, bbox.max.y, bbox.max.z),
430 const glm::vec2 viewport_corners[] = { {-1, -1}, {1, -1}, {1, 1}, {-1, 1} };
433 float zmin = std::numeric_limits<float>::max();
437 glm::vec3 lightDir = glm::mat3(rotation) * glm::vec3(0, 0, -1);
440 glm::vec3 planeTangent = glm::cross(a, lightDir);
441 glm::vec3 n = glm::normalize(glm::cross(lightDir, planeTangent));
443 for (
auto &camState : camStates) {
444 const CameraData &cameraData = *camState.cameraData;
445 const glm::vec3 ndcLightDir = euclidean(cameraData.
rawProjectionMatrix*glm::vec4(glm::mat3(cameraData.viewMatrix)*lightDir, 1.0f));
446 const float eps = 0.0001f;
447 const bool along = ndcLightDir.z <= 0.0f;
450 float azmin = std::numeric_limits<float>::max();
451 float azmax = -std::numeric_limits<float>::max();
452 for (
const glm::vec3 &p0 : bbox_corners){
453 float t = glm::dot(a, p0) + d;
454 azmin = std::min(azmin, t);
455 azmax = std::max(azmax, t);
460 if(std::abs(ndcLightDir.x)-eps <= 1.0f && std::abs(ndcLightDir.y)-eps <= 1.0f){
463 if(along) zmin = std::max(zmin, azmin);
464 else zmax = std::min(zmax, azmax);
469 for (
const glm::vec3 &p0 : bbox_corners){
471 for (
const glm::vec2 &corn : viewport_corners){
473 glm::vec3 l0 = glm::vec3(cameraData.inverseViewMatrix * glm::vec4(0, 0, 0, 1));
474 glm::vec3 l = glm::normalize(euclidean(camState.rawInverseViewProjection * glm::vec4(corn, 1.0f, 1.f))-l0);
476 float LdotN = glm::dot(l, n);
477 float ld = glm::dot(p0-l0, n)/LdotN;
478 glm::vec3 p = l0+ld*l;
479 float t = glm::dot(a, p) + d;
481 if(along) t = std::max(t, azmin);
482 else t = std::min(t, azmax);
484 t = std::max(0.0f, t);
485 zmin = std::min(zmin, t);
486 zmax = std::max(zmax, t);
491 zNear = glm::max(zNear, zmin);
492 zFar = glm::min(zFar, zmax);
494 zFar = glm::max(zNear, zFar);
496 zNear = std::max(zNear, refCamData->nearDistance);
497 zFar = std::min(zFar, refCamData->nearDistance + maxShadowDistance);
498 calculateCascadeCount(lightData, zNear, zFar, refCamData->fieldOfView);
499 calculateSplits(context, lightData, zNear, zFar);
519 const bool originOnTop = context->device->getCapabilities()->getDeviceCapabilities().OriginOnTop;
522 cascadeArray = context->textureManager->create();
523 cascadeArray->setName(
"Light.ShadowCascades");
526 bool useTextureCubeArrays = context->device->getCapabilities()->getDeviceCapabilities().TextureCubeArrays;
528 useTextureCubeArrays =
false;
532 cubeArray = context->textureManager->create();
533 if(useTextureCubeArrays)
534 cubeArray->setName(
"Light.ShadowCubeArray");
536 cubeArray->setName(
"Light.ShadowCube");
539 auto transformSystem = context->transformSystem;
540 auto variables = context->
variables.get();
542 softShadows = parseEnum<SoftShadows>(variables->get(
"shadows.softShadows",
"Default"));
543 auto shadowUpdate = parseEnum<ShadowUpdate>(variables->get(
"shadows.update",
"Default"));
544 const auto pointShadowResolution =
static_cast<unsigned>(std::max(1, variables->get(
"shadows.pointShadowResolution", 256)));
546 unsigned cascadeShadowResolution = (unsigned)std::max(0, variables->get(
"shadows.cascadeShadowResolution", 1024));
549 const float frustumSlack = glm::clamp(variables->get(
"shadows.frustumSlack", 0.1f), 0.f, 1.f);
551 const auto pointShadowFormat = parseTextureFormat(variables->get(
"shadows.pointShadowFormat",
"R32_TYPELESS"));
552 const auto cascadeShadowFormat = parseTextureFormat(variables->get(
"shadows.cascadeShadowFormat",
"R32_TYPELESS"));
553 const bool shadowsEnabled = variables->get(
"renderer.shadowsEnabled",
false);
555 const bool lightSystemRun = variables->getOrAdd(
"lightSystem.run",
true);
556 if (!lightSystemRun) {
560 bool anyChanged =
false;
561 for (
const auto & light : pool) {
565 lightsChanged |= anyChanged;
567 uint32_t cascadeInstances = 0;
568 uint32_t layerCount = 0;
570 uint32_t cubeInstances = 0;
571 for (
const auto & light : pool) {
574 auto & lightData = getData(&light);
575 lightData.enabled = light.
enabled;
579 lightData.lightColor = glm::vec4(glm::convertSRGBToLinear(glm::vec3(light.
lightColor)),
588 lightData.lightDirection = transformSystem->getLocalToWorld(transformComponent) * glm::vec4(0, 0, -1, 0);
589 lightData.lightDirection = glm::normalize(lightData.lightDirection);
591 lightData.lightPosition = glm::vec4(0, 0, 0, 0);
593 if (lightData.castShadows) {
595 uint32_t framesSinceDirty = context->
time->getFrame() - context->
engine->getLastDirtyFrame();
596 if (framesSinceDirty <= lightData.maxViewports) {
597 context->
engine->triggerUpdate();
600 auto passOptions = &lightData.passOptions;
601 passOptions->
setFlag(RenderPassOptions::Flags::NoDepthClip);
606 lightData.textureSize = cascadeShadowResolution;
607 lightData.shadowUpdate = shadowUpdate;
609 lightData.maxViewports = 4;
610 lightData.numViewports = lightData.maxViewports;
611 lightData.shadowTexture = cascadeArray;
612 lightData.arrayOffset = layerCount;
614 layerCount += lightData.maxViewports;
618 std::vector<CamState> cameras;
619 getCameras(context, refCamData, cameras, light);
620 auto nearest = calculateSplits(context, lightData, transformSystem->getLocalToWorld(transformComponent), refCamData, cameras, context->
variables->get(
"renderer.maxShadowDistance", 30000.0f));
622 for (
size_t i = 0; i < lightData.numViewports; ++i) {
623 auto frame = context->
time->getFrame();
624 if (lightData.shadowUpdate == ShadowUpdate::Partial) {
625 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
626 lightData.frameOffset[i] = (uint16_t)i;
628 else if (lightData.shadowUpdate == ShadowUpdate::Static) {
629 lightData.frameMod[i] = 0;
630 lightData.frameOffset[i] = 0;
632 else if (lightData.shadowUpdate == ShadowUpdate::StaticPartial) {
633 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
634 lightData.frameOffset[i] = (uint16_t)i;
636 else if (lightData.shadowUpdate == ShadowUpdate::None) {
637 lightData.frameMod[i] = 1;
638 lightData.frameOffset[i] =
static_cast<uint16_t
>(-1);
641 lightData.frameMod[i] = 0;
642 lightData.frameOffset[i] = 0;
644 if (lightData.frameMod[i] != 0) {
645 if ((frame % lightData.frameMod[i]) != lightData.frameOffset[i]) {
651 auto n = 0 < i ? lightData.nearDepths[i - 1] : nearest;
652 auto f = lightData.farDepths[i];
654 auto frustaPoints = lightData.frustaPointsCapture ? &lightData.frustaPoints[i] :
nullptr;
656 frustaPoints->points.clear();
657 frustaPoints->offsets.clear();
660 glm::mat4 lightView = createLightViewMatrix(lightData.cascadeLine, transformComponent->
rotation);
661 glm::uvec2 blueNoiseOffset;
662 glm::mat4 rawCascadeCullMatrix;
663 createFrustumFitMatrix(context, lightData.lightRawProjection[i], rawCascadeCullMatrix, frustaPoints, refCamData, cameras,
664 lightView, lightData.cascadeLine, n, f, frustumSlack, lightData.textureSize, blueNoiseOffset);
673 CameraData& lightCameraData = lightData.lightCameraData[i];
675 lightCameraData.layerMask = RenderLayers::Default;
677 lightCameraData.viewMatrix = lightView;
678 lightCameraData.projectionMatrix = cascadeProjectionMatrix;
680 lightCameraData.viewProjection = cascadeProjectionMatrix * lightView;
681 lightCameraData.inverseViewMatrix = glm::inverse(lightView);
682 lightCameraData.inverseViewProjectionMatrix = glm::inverse(lightCameraData.viewProjection);
683 lightCameraData.inverseProjectionMatrix = glm::inverse(cascadeProjectionMatrix);
686 lightCameraData.rawViewProjection = lightData.lightRawProjection[i] * lightView;
687 lightCameraData.rawViewCullMatrix = rawCascadeCullMatrix * lightView;
689 lightCameraData.passOptions = passOptions;
691 lightCameraData.viewportOrigin = { 0, 0 };
693 lightCameraData.viewportSize = { lightData.textureSize , lightData.textureSize };
694 lightCameraData.blueNoiseOffset = blueNoiseOffset;
697 lightCameraData.depthClamp = 0;
699 lightCameraData.frustum = Geometry::calculateFrustum<Geometry::Frustum, glm::mat4>(lightCameraData.viewProjection);
705 if (!useTextureCubeArrays && lightData.castShadows) {
708 lightData.castShadows =
false;
712 glm::vec3 lightPosition = transformSystem->getLocalToWorld(transformComponent) * glm::vec4(0, 0, 0, 1);
713 lightData.lightDirection = glm::vec4(0, 0, -1, 1);
714 lightData.lightPosition = glm::vec4(lightPosition, 1);
716 if (lightData.castShadows) {
718 auto passOptions = &lightData.passOptions;
719 passOptions->
unsetFlag(RenderPassOptions::Flags::NoDepthClip);
724 lightData.shadowUpdate = shadowUpdate;
725 lightData.shadowTexture = cubeArray;
726 lightData.maxViewports = 6;
727 lightData.numViewports = lightData.maxViewports;
728 lightData.arrayOffset = cubeInstances * lightData.numViewports;
734 lightData.nearDepths[0] = glm::max(0.1f, light.
range / 1000.0f);
737 lightData.farDepths[0] = light.
range;
739 for (
size_t i = 0; i < lightData.numViewports; ++i) {
740 auto frame = context->
time->getFrame();
741 if (lightData.shadowUpdate == ShadowUpdate::Partial) {
742 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
743 lightData.frameOffset[i] = (uint16_t)i;
745 else if (lightData.shadowUpdate == ShadowUpdate::Static) {
746 lightData.frameMod[i] = 0;
747 lightData.frameOffset[i] = 0;
749 else if (lightData.shadowUpdate == ShadowUpdate::StaticPartial) {
750 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
751 lightData.frameOffset[i] = (uint16_t)i;
753 else if (lightData.shadowUpdate == ShadowUpdate::None) {
754 lightData.frameMod[i] = 1;
755 lightData.frameOffset[i] =
static_cast<uint16_t
>(-1);
758 lightData.frameMod[i] = 0;
759 lightData.frameOffset[i] = 0;
761 if (lightData.frameMod[i] != 0) {
762 if ((frame % lightData.frameMod[i]) != lightData.frameOffset[i]) {
766 CameraData& lightCameraData = lightData.lightCameraData[i];
776 glm::mat4 lightProjection = glm::perspective(glm::pi<float>() / 2.0f,
778 lightData.nearDepths[0],
779 lightData.farDepths[0]);
781 lightProjection = glm::mat4(1.f, 0.f, 0.f, 0.f,
784 0.f, 0.f, 0.f, 1.f) * lightProjection;
785 lightCameraData.flipWindingOrder =
true;
789 glm::vec3 directions[6] = {
807 const glm::mat4 lightView = glm::lookAt(lightPosition, lightPosition + directions[i], ups[i]);
809 lightCameraData.layerMask = RenderLayers::Default;
810 lightCameraData.viewMatrix = lightView;
811 lightCameraData.projectionMatrix = lightProjection;
812 lightCameraData.inverseViewMatrix = glm::inverse(lightView);
813 lightCameraData.viewProjection = lightProjection * lightView;
814 lightCameraData.inverseViewProjectionMatrix = glm::inverse(lightCameraData.viewProjection);
815 lightCameraData.inverseProjectionMatrix = glm::inverse(lightProjection);
817 lightCameraData.rawViewProjection = lightProjection * lightCameraData.viewMatrix;
818 lightCameraData.rawViewCullMatrix = lightCameraData.rawViewProjection;
819 lightCameraData.passOptions = passOptions;
820 lightCameraData.viewportOrigin = { 0, 0 };
821 lightCameraData.viewportSize = { pointShadowResolution, pointShadowResolution };
822 lightCameraData.depthClamp = -std::numeric_limits<float>::max();
824 lightCameraData.frustum = Geometry::calculateFrustum<Geometry::Frustum, glm::mat4>(lightCameraData.viewProjection);
830 if (!cascadeInstances) cascadeInstances = 1;
832 if (cascadeInstances && (layerCount != currentLayerCount ||
833 cascadeShadowResolution != currentTextureSize ||
834 cascadeShadowFormat != currentShadowFormat))
836 assert(cascadeArray);
838 cascadeArray->description.target = ResourceDimensions::Texture2DArray;
839 cascadeArray->description.layers = layerCount;
840 cascadeArray->description.width = cascadeShadowResolution;
841 cascadeArray->description.height = cascadeShadowResolution;
842 cascadeArray->description.format = cascadeShadowFormat;
845 cascadeArray->setChanged();
847 currentLayerCount = layerCount;
848 currentTextureSize = cascadeShadowResolution;
849 currentShadowFormat = cascadeShadowFormat;
852 if (cubeInstances && (cubeInstances != currentCubeCount ||
853 pointShadowResolution != currentPointShadowResolution ||
854 pointShadowFormat != currentPointShadowFormat))
857 cubeArray->description.target = useTextureCubeArrays ? ResourceDimensions::TextureCubeArray : ResourceDimensions::TextureCube;
858 cubeArray->description.layers = useTextureCubeArrays ? cubeInstances : 1;
859 cubeArray->description.faces = 6;
860 cubeArray->description.width = pointShadowResolution;
861 cubeArray->description.height = pointShadowResolution;
862 cubeArray->description.format = pointShadowFormat;
865 cubeArray->setChanged();
867 currentCubeCount = cubeInstances;
868 currentPointShadowResolution = pointShadowResolution;
869 currentPointShadowFormat = pointShadowFormat;
872 for (
const auto & light : pool) {
873 auto & lightData = getData(&light);
875 if (!lightData.castShadows)
continue;
878 for (
size_t i = 0; i < lightData.numViewports; ++i) {
879 auto & lightCameraData = lightData.lightCameraData[i];
884 for (
size_t i = 0; i < 6; ++i) {
885 auto & lightCameraData = lightData.lightCameraData[i];
constexpr void unsetFlag(const uint32_t flag)
Unset the given flag. Does not remove the status of other than the given flags.
ComponentType * getComponent() const
constexpr void setFlag(const uint32_t flag)
Set the given flags. Does not override the currently set flags.
Context * context
Pointer to the Context instance the system lives in.
virtual void initialize(Context *context)
Initialize the system.
void update()
Updates the system state to that of the current frame.
A Context instance contains all the services, systems and runtime components needed to use Cogs.
std::unique_ptr< class CullingManager > cullingManager
CullingManager instance.
class IRenderer * renderer
Renderer.
std::unique_ptr< class Bounds > bounds
Bounds service instance.
std::unique_ptr< class Variables > variables
Variables service instance.
std::unique_ptr< class Time > time
Time service instance.
std::unique_ptr< class Engine > engine
Engine instance.
virtual glm::mat4 getProjectionMatrix(const glm::mat4 projectionMatrix)=0
Get an adjusted projection matrix used to render.
Defines a single light source and its behavior.
LightType lightType
The type of light.
float shadowBias
Constant term shadow map rasterization bias.
bool castShadows
If this light should cast shadows.
bool enabled
If the light is enabled.
float shadowBiasClamp
Shadow map bias rasterization clamp.
float shadowNearPlane
Shadow near plane. If set to 0 near plane is 1/1000th of the radius.
std::vector< WeakEntityPtr > cameras
Cameras from which the shadow maps will be used, defaults to main camera.
WeakEntityPtr lodReference
Camera entity of which its z-axis define the shadowmap cascade split axis, defaults to main camera.
float shadowSlopedBias
Linear term of shadow map rasterization bias.
float shadowIntensityOffset
Shadow intensity offset.
bool tightShadowBounds
If the shadows should have tight bounds.
glm::vec4 lightColor
Color contribution of the light.
bool dynamicCascadeCount
Dynamically determine how many cascades should be drawn.
LightingLayers lightingLayer
The lighting layer the light belongs to.
float range
Falloff range.
void initialize(Context *context) override
Initialize the system.
void preRender(Context *context)
Cull light system.
Contains the Engine, Renderer, resource managers and other systems needed to run Cogs....
@ Point
Point light source.
@ Directional
Directional light.
@ WebGPU
Graphics device using the WebGPU API Backend.
Contains data describing a Camera instance and its derived data structured such as matrix data and vi...
glm::mat4 rawProjectionMatrix
Projection matrix that has not been adjusted by the renderer, and is thus appropriate for direct scre...
Defines calculated light data.
@ DepthBuffer
The texture can be used as a depth target and have depth buffer values written into.
@ Texture
Texture usage, see Default.
@ CubeMap
The texture can be used as a cube map.