4#pragma warning(disable: 4127)
5#include <glm/ext/matrix_clip_space.hpp>
9#include "LightSystem.h"
11#include "Rendering/ICapabilities.h"
14#include "Services/Time.h"
16#include "Scene/GetBounds.h"
18#include "Systems/Core/TransformSystem.h"
19#include "Systems/Core/CameraSystem.h"
21#include "Renderer/CullingManager.h"
22#include "Renderer/IRenderer.h"
24#include "Resources/TextureManager.h"
25#include "Resources/MaterialManager.h"
27#include "Utilities/Parsing.h"
28#include "Utilities/Math.h"
31#include <glm/gtx/compatibility.hpp>
32#include <glm/gtc/color_space.hpp>
41 glm::mat4 rawInverseProjection;
42 glm::mat4 rawInverseViewProjection;
45 static const glm::vec3 corners[] =
57 static const uint32_t edgeIndicesData[][2] = {
72 static void clipLineSegments(std::vector<glm::vec3>& E,
const glm::vec4 plane)
75 for (
size_t i = 0; i < E.size(); i += 2) {
76 const auto a = E[i + 0];
77 const auto b = E[i + 1];
78 const float d0 = glm::dot(glm::vec4(a, 1.f), plane);
79 const float d1 = glm::dot(glm::vec4(b, 1.f), plane);
85 auto num = glm::dot(d, glm::vec3(plane));
86 if (glm::abs(num) < 0.01f) {
91 if (b0 == b1)
continue;
95 auto t = -glm::dot(glm::vec4(a, 1.f), plane) / num;
100 assert((E.size() & 1) == 0);
104 static glm::mat4 rotateVecToY(
const glm::vec2& d)
106 auto l2 = glm::dot(d, d);
110 const auto r = 1.f / glm::sqrt(l2);
114 return glm::mat4( c, s, 0, 0,
120 static glm::mat4 createLightViewMatrix(
const glm::vec4 & ,
const glm::quat& rotation)
122 const glm::mat4 lightView = glm::mat4_cast(glm::conjugate(rotation));
124 auto lz = glm::abs(euclidean(lightView * glm::vec4(0, 0, 1, 1)));
126 glm::vec3 align_axis = glm::vec3(0, 1, 0);
127 if (lz.y > lz.x && lz.y > lz.z) {
129 align_axis = glm::vec3(0, 0, 1);
134 auto cascadeAxisLW = glm::vec3(glm::vec2(glm::mat3(lightView) * glm::vec3(align_axis)), 0.f);
135 return rotateVecToY(cascadeAxisLW) * lightView;
138 static glm::mat4 createRotaryAlignmentMatrix(
const glm::mat4& M)
140 auto ro = euclidean(M * glm::vec4(0, 0, 0, 1));
141 auto rx = euclidean(M * glm::vec4(1, 0, 0, 1)) - ro;
142 auto ry = euclidean(M * glm::vec4(0, 1, 0, 1)) - ro;
143 auto rz = euclidean(M * glm::vec4(0, 0, 1, 1)) - ro;
145 auto xz = glm::abs(rx.z);
146 auto yz = glm::abs(ry.z);
147 auto zz = glm::abs(rz.z);
150 if (xz < yz && xz < zz) {
159 return rotateVecToY(d);
162 static void createFrustumFitMatrix(
Context * context,
163 glm::mat4& rawCascadeProjectionMatrix,
164 glm::mat4& rawCascadeCullMatrix,
167 const std::vector<CamState>& camStates,
168 const glm::mat4& lightView,
169 const glm::vec4& cascadeLine,
170 const float n,
const float f,
173 glm::uvec2 &blueNoiseOffset)
176 auto toCull = createRotaryAlignmentMatrix(lightView * refCamData->inverseViewMatrix);
178 glm::vec3 min_lv(std::numeric_limits<float>::max());
179 glm::vec3 min_cv = min_lv;
180 glm::vec3 max_lv(-std::numeric_limits<float>::max());
181 glm::vec3 max_cv = max_lv;
183 for (
const auto & camState : camStates) {
184 const auto M = lightView * camState.rawInverseViewProjection;
185 if (camState.cameraData == refCamData) {
194 const auto zn = glm::dot(row2, glm::vec4(0, 0, -n, 1));
195 const auto wn = glm::dot(row3, glm::vec4(0, 0, -n, 1));
196 const auto zf = glm::dot(row2, glm::vec4(0, 0, -f, 1));
197 const auto wf = glm::dot(row3, glm::vec4(0, 0, -f, 1));
201 for (
unsigned i = 0; i < 4; i++) {
202 const auto pn = euclidean(M * glm::vec4(wn*glm::vec2(corners[i]), zn, wn));
203 const auto pf = euclidean(M * glm::vec4(wf*glm::vec2(corners[i]), zf, wf));
205 frustaPoints->points.push_back(pn);
206 frustaPoints->points.push_back(pf);
208 min_lv = glm::min(min_lv, glm::min(pn, pf));
209 max_lv = glm::max(max_lv, glm::max(pn, pf));
211 min_cv = glm::min(min_cv, glm::min(glm::vec3(toCull * glm::vec4(pn, 1)),
212 glm::vec3(toCull * glm::vec4(pf, 1))));
213 max_cv = glm::max(max_cv, glm::max(glm::vec3(toCull * glm::vec4(pn, 1)),
214 glm::vec3(toCull * glm::vec4(pf, 1))));
221 for (
unsigned i = 0; i < 8; i++) {
222 p[i] = euclidean(M * glm::vec4(corners[i], 1.f));
229 std::vector<glm::vec3> E;
230 for (
const auto & e : edgeIndicesData) {
231 E.push_back(p[e[0]]);
232 E.push_back(p[e[1]]);
235 clipLineSegments(E, lightView * glm::vec4(glm::vec3(cascadeLine), cascadeLine.w - n));
236 clipLineSegments(E, -lightView * glm::vec4(glm::vec3(cascadeLine), cascadeLine.w - f));
237 for (
const auto & pp : E) {
239 frustaPoints->points.push_back(pp);
241 min_lv = glm::min(min_lv, pp);
242 max_lv = glm::max(max_lv, pp);
244 min_cv = glm::min(min_cv, glm::vec3(toCull * glm::vec4(pp, 1)));
245 max_cv = glm::max(max_cv, glm::vec3(toCull * glm::vec4(pp, 1)));
250 frustaPoints->offsets.push_back(
unsigned(frustaPoints->points.size()));
256 const float size = std::max((max_lv.x - min_lv.x),
257 (max_lv.y - min_lv.y));
258 glm::vec2 center_lv = 0.5f * glm::vec2(min_lv + max_lv);
259 const float zNear = -max_lv.z - std::max(0.1f, frustumSlack) * (max_lv.z - min_lv.z);
260 const float zFar = -min_lv.z + frustumSlack * (max_lv.z - min_lv.z);
265 bool needNewFrustum = frustumSlack == 0.f;
266 const float factor = 0.5f * (1.f + frustumSlack);
267 const float greaterThanValue = glm::max(0.f, 1.f - 2.f * frustumSlack);
268 for (
unsigned i = 0; i < 8u; i++) {
269 glm::vec4 p = rawCascadeProjectionMatrix * glm::vec4(center_lv.x + 0.5 * (((i >> 0) & 1) ? -size : size),
270 center_lv.y + 0.5 * (((i >> 1) & 1) ? -size : size),
271 ((i >> 2) & 1) ? min_lv.z : max_lv.z,
274 bool xOk = ((-p.w <= p.x) && (p.x <= -greaterThanValue * p.w)) || ((greaterThanValue * p.w <= p.x) && (p.x <= p.w));
275 bool yOk = ((-p.w <= p.y) && (p.y <= -greaterThanValue * p.w)) || ((greaterThanValue * p.y <= p.y) && (p.y <= p.w));
276 bool zOk = ((-p.w <= p.z) && (p.z <= -0.5f * p.w)) || ((0.5f * p.z <= p.z) && (p.z <= p.w));
277 needNewFrustum = needNewFrustum || !(xOk && yOk && zOk);
280 if (needNewFrustum) {
285 const float steppyness = 5.f;
286 const float texelSize = std::exp2(std::ceil(log2(size / resolution) * steppyness) / steppyness);
287 const glm::dvec3 origin = context->transformSystem->
getOrigin();
288 const glm::dvec3 offset = glm::dmat3(lightView) * origin;
289 const double snappX = std::floor((center_lv.x + offset.x) / texelSize);
290 const double snappY = std::floor((center_lv.y + offset.y) / texelSize);
291 double snapX_ = std::fmod(snappX, 64.0);
292 if (snapX_ < 0.0) snapX_ += 64;
293 double snapY_ = std::fmod(snappY, 64.0);
294 if (snapY_ < 0.0) snapY_ += 64;
296 blueNoiseOffset = glm::uvec2(
static_cast<uint32_t
>(snapX_),
static_cast<uint32_t
>(snapY_));
297 blueNoiseOffset = glm::uvec2(blueNoiseOffset.x, (64u - blueNoiseOffset.y) % 64u);
301 const glm::vec2 viewportMin = center_lv - glm::vec2(factor * size);
302 const glm::vec2 viewportMax = center_lv + glm::vec2(factor * size);
304 frustaPoints->viewportMin = viewportMin;
305 frustaPoints->viewportMax = viewportMax;
307 rawCascadeProjectionMatrix = glm::ortho(viewportMin.x, viewportMax.x,
308 viewportMin.y, viewportMax.y,
311 rawCascadeCullMatrix = glm::ortho(min_cv.x, max_cv.x,
313 zNear, zFar) * toCull;
320 refCamData = &context->cameraSystem->getData(c);
324 refCamData = &context->cameraSystem->getMainCameraData();
327 for (
auto & we : light.
cameras) {
328 if (
auto e = we.lock(); e) {
330 camStates.emplace_back();
331 camStates.back().cameraData = &context->cameraSystem->getData(c);
335 if (camStates.empty() && refCamData) {
336 camStates.emplace_back();
337 camStates.back().cameraData = refCamData;
339 for (
auto & camState : camStates) {
340 camState.rawInverseViewProjection = glm::inverse(camState.cameraData->rawViewProjection);
344 static void calculateCascadeCount(
LightData & lightData,
float zNear,
float zFar,
float FOV)
346 if(!lightData.dynamicCascadeCount)
return;
348 float tana = tanf(a);
349 float ns = tana*zNear;
350 float fs = tana*zFar;
351 float r = fs/ns-1.0f;
352 lightData.numViewports = (uint16_t)ceil(r);
353 lightData.numViewports = std::max(lightData.numViewports, (uint16_t)1);
354 lightData.numViewports = std::min(lightData.numViewports, (uint16_t)lightData.maxViewports);
357 static void calculateSplits(
Context* context,
LightData & lightData,
float zNear,
float zFar)
359 const auto expFactor = context->
variables->get(
"shadows.cascades.expFactor")->getFloat();
360 const auto overlapFactor = 0.5f * context->
variables->get(
"shadows.cascades.overlapFactor")->getFloat();
363 const float cascadeWidth = 1.f /
static_cast<float>(lightData.numViewports);
364 if(lightData.numViewports == 1){
365 lightData.nearDepths[0] = zNear;
366 lightData.farDepths[0] = zFar;
369 for (
int i = 0; i < lightData.numViewports; ++i) {
370 const float iF = std::min(1.f, cascadeWidth * (
static_cast<float>(i) + 1.f + overlapFactor));
371 const float iN = std::max(0.f, cascadeWidth * (
static_cast<float>(i) - overlapFactor));
373 const float zExpNear = zNear * glm::pow(zFar / zNear, iN);
374 const float zLinearNear = zNear + iN * (zFar - zNear);
375 lightData.nearDepths[i] = glm::max(glm::lerp(zLinearNear, zExpNear, expFactor), zNear);
376 assert(std::isfinite(lightData.nearDepths[i]));
378 const float zExp = zNear * glm::pow(zFar / zNear, iF);
379 const float zLinear = zNear + iF * (zFar - zNear);
381 lightData.farDepths[i] = glm::lerp(zLinear, zExp, expFactor);
382 assert(std::isfinite(lightData.farDepths[i]));
387 static float calculateSplits(
Context * context,
LightData & lightData,
const glm::mat4& rotation,
const CameraData* refCamData,
const std::vector<CamState>& camStates,
const float maxShadowDistance)
390 const glm::mat4 & M = refCamData->inverseViewMatrix;
391 const glm::vec3 o = glm::vec3(M * glm::vec4(0, 0, 0, 1));
392 const glm::vec3 a = glm::normalize(glm::vec3(M * glm::vec4(0, 0, -1, 0)));
393 const float d = -glm::dot(o, a);
394 assert(std::isfinite(d));
395 lightData.cascadeLine = glm::vec4(a, d);
398 auto zNear = refCamData->nearDistance;
399 auto zFar = refCamData->farDistance;
400 for (
auto & camState : camStates) {
401 for (
auto c : corners) {
402 auto q = camState.rawInverseViewProjection * glm::vec4(c, 1.f);
403 if (std::numeric_limits<float>::epsilon() < q.w) {
404 auto t = glm::dot(a, (1.f / q.w)*glm::vec3(q)) + d;
405 assert(std::isfinite(t));
407 zNear = glm::min(zNear, t);
408 zFar = glm::max(zFar, t);
413 if(!lightData.tightShadowBounds){
414 calculateCascadeCount(lightData, zNear, zFar, refCamData->fieldOfView);
415 calculateSplits(context, lightData, refCamData->nearDistance, std::min(zFar, refCamData->nearDistance + maxShadowDistance));
416 return std::max(zNear, refCamData->nearDistance - maxShadowDistance);
419 const Cogs::Geometry::BoundingBox bbox = context->
bounds->getShadowBounds(context);
420 const glm::vec3 bbox_corners[] = {
421 glm::vec3(bbox.min.x, bbox.min.y, bbox.min.z),
422 glm::vec3(bbox.max.x, bbox.min.y, bbox.min.z),
423 glm::vec3(bbox.max.x, bbox.max.y, bbox.min.z),
424 glm::vec3(bbox.min.x, bbox.max.y, bbox.min.z),
425 glm::vec3(bbox.min.x, bbox.min.y, bbox.max.z),
426 glm::vec3(bbox.max.x, bbox.min.y, bbox.max.z),
427 glm::vec3(bbox.max.x, bbox.max.y, bbox.max.z),
428 glm::vec3(bbox.min.x, bbox.max.y, bbox.max.z),
430 const glm::vec2 viewport_corners[] = { {-1, -1}, {1, -1}, {1, 1}, {-1, 1} };
433 float zmin = std::numeric_limits<float>::max();
437 glm::vec3 lightDir = glm::mat3(rotation) * glm::vec3(0, 0, -1);
440 glm::vec3 planeTangent = glm::cross(a, lightDir);
441 glm::vec3 n = glm::normalize(glm::cross(lightDir, planeTangent));
443 for (
auto &camState : camStates) {
444 const CameraData &cameraData = *camState.cameraData;
445 const glm::vec3 ndcLightDir = euclidean(cameraData.
rawProjectionMatrix*glm::vec4(glm::mat3(cameraData.viewMatrix)*lightDir, 1.0f));
446 const float eps = 0.0001f;
447 const bool along = ndcLightDir.z <= 0.0f;
450 float azmin = std::numeric_limits<float>::max();
451 float azmax = -std::numeric_limits<float>::max();
452 for (
const glm::vec3 &p0 : bbox_corners){
453 float t = glm::dot(a, p0) + d;
454 azmin = std::min(azmin, t);
455 azmax = std::max(azmax, t);
460 if(std::abs(ndcLightDir.x)-eps <= 1.0f && std::abs(ndcLightDir.y)-eps <= 1.0f){
463 if(along) zmin = std::max(zmin, azmin);
464 else zmax = std::min(zmax, azmax);
469 for (
const glm::vec3 &p0 : bbox_corners){
471 for (
const glm::vec2 &corn : viewport_corners){
473 glm::vec3 l0 = glm::vec3(cameraData.inverseViewMatrix * glm::vec4(0, 0, 0, 1));
474 glm::vec3 l = glm::normalize(euclidean(camState.rawInverseViewProjection * glm::vec4(corn, 1.0f, 1.f))-l0);
476 float LdotN = glm::dot(l, n);
477 float ld = glm::dot(p0-l0, n)/LdotN;
478 glm::vec3 p = l0+ld*l;
479 float t = glm::dot(a, p) + d;
481 if(along) t = std::max(t, azmin);
482 else t = std::min(t, azmax);
484 t = std::max(0.0f, t);
485 zmin = std::min(zmin, t);
486 zmax = std::max(zmax, t);
491 zNear = glm::max(zNear, zmin);
492 zFar = glm::min(zFar, zmax);
494 zFar = glm::max(zNear, zFar);
496 zNear = std::max(zNear, refCamData->nearDistance);
497 zFar = std::min(zFar, refCamData->nearDistance + maxShadowDistance);
498 calculateCascadeCount(lightData, zNear, zFar, refCamData->fieldOfView);
499 calculateSplits(context, lightData, zNear, zFar);
519 const bool originOnTop = context->device->getCapabilities()->getDeviceCapabilities().OriginOnTop;
522 cascadeArray = context->textureManager->create();
523 cascadeArray->setName(
"Light.ShadowCascades");
526 const bool useTextureCubeArrays = context->device->getCapabilities()->getDeviceCapabilities().TextureCubeArrays;
528 cubeArray = context->textureManager->create();
529 cubeArray->setName(
"Light.ShadowCubeArray");
532 auto transformSystem = context->transformSystem;
533 auto variables = context->
variables.get();
535 softShadows = parseEnum<SoftShadows>(variables->get(
"shadows.softShadows",
"Default"));
536 auto shadowUpdate = parseEnum<ShadowUpdate>(variables->get(
"shadows.update",
"Default"));
537 const auto pointShadowResolution =
static_cast<unsigned>(std::max(1, variables->get(
"shadows.pointShadowResolution", 256)));
539 unsigned cascadeShadowResolution = (unsigned)std::max(0, variables->get(
"shadows.cascadeShadowResolution", 1024));
542 const float frustumSlack = glm::clamp(variables->get(
"shadows.frustumSlack", 0.1f), 0.f, 1.f);
544 const auto pointShadowFormat = parseTextureFormat(variables->get(
"shadows.pointShadowFormat",
"R32_TYPELESS"));
545 const auto cascadeShadowFormat = parseTextureFormat(variables->get(
"shadows.cascadeShadowFormat",
"R32_TYPELESS"));
546 const bool shadowsEnabled = variables->get(
"renderer.shadowsEnabled",
false);
548 const bool lightSystemRun = variables->getOrAdd(
"lightSystem.run",
true);
549 if (!lightSystemRun) {
553 bool anyChanged =
false;
554 for (
const auto & light : pool) {
558 lightsChanged |= anyChanged;
560 uint32_t cascadeInstances = 0;
561 uint32_t layerCount = 0;
563 uint32_t cubeInstances = 0;
564 for (
const auto & light : pool) {
567 auto & lightData = getData(&light);
568 lightData.enabled = light.
enabled;
572 lightData.lightColor = glm::vec4(glm::convertSRGBToLinear(glm::vec3(light.
lightColor)),
581 lightData.lightDirection = transformSystem->getLocalToWorld(transformComponent) * glm::vec4(0, 0, -1, 0);
582 lightData.lightDirection = glm::normalize(lightData.lightDirection);
584 lightData.lightPosition = glm::vec4(0, 0, 0, 0);
586 if (lightData.castShadows) {
588 uint32_t framesSinceDirty = context->
time->getFrame() - context->
engine->getLastDirtyFrame();
589 if (framesSinceDirty <= lightData.maxViewports) {
590 context->
engine->triggerUpdate();
593 auto passOptions = &lightData.passOptions;
594 passOptions->
setFlag(RenderPassOptions::Flags::NoDepthClip);
599 lightData.textureSize = cascadeShadowResolution;
600 lightData.shadowUpdate = shadowUpdate;
602 lightData.maxViewports = 4;
603 lightData.numViewports = lightData.maxViewports;
604 lightData.shadowTexture = cascadeArray;
605 lightData.arrayOffset = layerCount;
607 layerCount += lightData.maxViewports;
611 std::vector<CamState> cameras;
612 getCameras(context, refCamData, cameras, light);
613 auto nearest = calculateSplits(context, lightData, transformSystem->getLocalToWorld(transformComponent), refCamData, cameras, context->
variables->get(
"renderer.maxShadowDistance", 30000.0f));
615 for (
size_t i = 0; i < lightData.numViewports; ++i) {
616 auto frame = context->
time->getFrame();
617 if (lightData.shadowUpdate == ShadowUpdate::Partial) {
618 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
619 lightData.frameOffset[i] = (uint16_t)i;
621 else if (lightData.shadowUpdate == ShadowUpdate::Static) {
622 lightData.frameMod[i] = 0;
623 lightData.frameOffset[i] = 0;
625 else if (lightData.shadowUpdate == ShadowUpdate::StaticPartial) {
626 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
627 lightData.frameOffset[i] = (uint16_t)i;
629 else if (lightData.shadowUpdate == ShadowUpdate::None) {
630 lightData.frameMod[i] = 1;
631 lightData.frameOffset[i] =
static_cast<uint16_t
>(-1);
634 lightData.frameMod[i] = 0;
635 lightData.frameOffset[i] = 0;
637 if (lightData.frameMod[i] != 0) {
638 if ((frame % lightData.frameMod[i]) != lightData.frameOffset[i]) {
644 auto n = 0 < i ? lightData.nearDepths[i - 1] : nearest;
645 auto f = lightData.farDepths[i];
647 auto frustaPoints = lightData.frustaPointsCapture ? &lightData.frustaPoints[i] :
nullptr;
649 frustaPoints->points.clear();
650 frustaPoints->offsets.clear();
653 glm::mat4 lightView = createLightViewMatrix(lightData.cascadeLine, transformComponent->
rotation);
654 glm::uvec2 blueNoiseOffset;
655 glm::mat4 rawCascadeCullMatrix;
656 createFrustumFitMatrix(context, lightData.lightRawProjection[i], rawCascadeCullMatrix, frustaPoints, refCamData, cameras,
657 lightView, lightData.cascadeLine, n, f, frustumSlack, lightData.textureSize, blueNoiseOffset);
666 CameraData& lightCameraData = lightData.lightCameraData[i];
668 lightCameraData.layerMask = RenderLayers::Default;
670 lightCameraData.viewMatrix = lightView;
671 lightCameraData.projectionMatrix = cascadeProjectionMatrix;
673 lightCameraData.viewProjection = cascadeProjectionMatrix * lightView;
674 lightCameraData.inverseViewMatrix = glm::inverse(lightView);
675 lightCameraData.inverseViewProjectionMatrix = glm::inverse(lightCameraData.viewProjection);
676 lightCameraData.inverseProjectionMatrix = glm::inverse(cascadeProjectionMatrix);
679 lightCameraData.rawViewProjection = lightData.lightRawProjection[i] * lightView;
680 lightCameraData.rawViewCullMatrix = rawCascadeCullMatrix * lightView;
682 lightCameraData.passOptions = passOptions;
684 lightCameraData.viewportOrigin = { 0, 0 };
686 lightCameraData.viewportSize = { lightData.textureSize , lightData.textureSize };
687 lightCameraData.blueNoiseOffset = blueNoiseOffset;
690 lightCameraData.depthClamp = 0;
692 lightCameraData.frustum = Geometry::calculateFrustum<Geometry::Frustum, glm::mat4>(lightCameraData.viewProjection);
698 if (!useTextureCubeArrays && lightData.castShadows) {
701 lightData.castShadows =
false;
705 glm::vec3 lightPosition = transformSystem->getLocalToWorld(transformComponent) * glm::vec4(0, 0, 0, 1);
706 lightData.lightDirection = glm::vec4(0, 0, -1, 1);
707 lightData.lightPosition = glm::vec4(lightPosition, 1);
709 if (lightData.castShadows) {
711 auto passOptions = &lightData.passOptions;
712 passOptions->
unsetFlag(RenderPassOptions::Flags::NoDepthClip);
717 lightData.shadowUpdate = shadowUpdate;
718 lightData.shadowTexture = cubeArray;
719 lightData.maxViewports = 6;
720 lightData.numViewports = lightData.maxViewports;
721 lightData.arrayOffset = cubeInstances * lightData.numViewports;
727 lightData.nearDepths[0] = glm::max(0.1f, light.
range / 1000.0f);
730 lightData.farDepths[0] = light.
range;
732 for (
size_t i = 0; i < lightData.numViewports; ++i) {
733 auto frame = context->
time->getFrame();
734 if (lightData.shadowUpdate == ShadowUpdate::Partial) {
735 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
736 lightData.frameOffset[i] = (uint16_t)i;
738 else if (lightData.shadowUpdate == ShadowUpdate::Static) {
739 lightData.frameMod[i] = 0;
740 lightData.frameOffset[i] = 0;
742 else if (lightData.shadowUpdate == ShadowUpdate::StaticPartial) {
743 lightData.frameMod[i] = (uint16_t)lightData.numViewports;
744 lightData.frameOffset[i] = (uint16_t)i;
746 else if (lightData.shadowUpdate == ShadowUpdate::None) {
747 lightData.frameMod[i] = 1;
748 lightData.frameOffset[i] =
static_cast<uint16_t
>(-1);
751 lightData.frameMod[i] = 0;
752 lightData.frameOffset[i] = 0;
754 if (lightData.frameMod[i] != 0) {
755 if ((frame % lightData.frameMod[i]) != lightData.frameOffset[i]) {
759 CameraData& lightCameraData = lightData.lightCameraData[i];
769 glm::mat4 lightProjection = glm::perspective(glm::pi<float>() / 2.0f,
771 lightData.nearDepths[0],
772 lightData.farDepths[0]);
774 lightProjection = glm::mat4(1.f, 0.f, 0.f, 0.f,
777 0.f, 0.f, 0.f, 1.f) * lightProjection;
778 lightCameraData.flipWindingOrder =
true;
782 glm::vec3 directions[6] = {
800 const glm::mat4 lightView = glm::lookAt(lightPosition, lightPosition + directions[i], ups[i]);
802 lightCameraData.layerMask = RenderLayers::Default;
803 lightCameraData.viewMatrix = lightView;
804 lightCameraData.projectionMatrix = lightProjection;
805 lightCameraData.inverseViewMatrix = glm::inverse(lightView);
806 lightCameraData.viewProjection = lightProjection * lightView;
807 lightCameraData.inverseViewProjectionMatrix = glm::inverse(lightCameraData.viewProjection);
808 lightCameraData.inverseProjectionMatrix = glm::inverse(lightProjection);
810 lightCameraData.rawViewProjection = lightProjection * lightCameraData.viewMatrix;
811 lightCameraData.rawViewCullMatrix = lightCameraData.rawViewProjection;
812 lightCameraData.passOptions = passOptions;
813 lightCameraData.viewportOrigin = { 0, 0 };
814 lightCameraData.viewportSize = { pointShadowResolution, pointShadowResolution };
815 lightCameraData.depthClamp = -std::numeric_limits<float>::max();
817 lightCameraData.frustum = Geometry::calculateFrustum<Geometry::Frustum, glm::mat4>(lightCameraData.viewProjection);
823 if (!cascadeInstances) cascadeInstances = 1;
825 if (cascadeInstances && (layerCount != currentLayerCount ||
826 cascadeShadowResolution != currentTextureSize ||
827 cascadeShadowFormat != currentShadowFormat))
829 assert(cascadeArray);
831 cascadeArray->description.target = ResourceDimensions::Texture2DArray;
832 cascadeArray->description.layers = layerCount;
833 cascadeArray->description.width = cascadeShadowResolution;
834 cascadeArray->description.height = cascadeShadowResolution;
835 cascadeArray->description.format = cascadeShadowFormat;
838 cascadeArray->setChanged();
840 currentLayerCount = layerCount;
841 currentTextureSize = cascadeShadowResolution;
842 currentShadowFormat = cascadeShadowFormat;
845 if (cubeInstances && (cubeInstances != currentCubeCount ||
846 pointShadowResolution != currentPointShadowResolution ||
847 pointShadowFormat != currentPointShadowFormat))
850 cubeArray->description.target = useTextureCubeArrays ? ResourceDimensions::TextureCubeArray : ResourceDimensions::TextureCube;
851 cubeArray->description.layers = useTextureCubeArrays ? cubeInstances : 1;
852 cubeArray->description.faces = 6;
853 cubeArray->description.width = pointShadowResolution;
854 cubeArray->description.height = pointShadowResolution;
855 cubeArray->description.format = pointShadowFormat;
858 cubeArray->setChanged();
860 currentCubeCount = cubeInstances;
861 currentPointShadowResolution = pointShadowResolution;
862 currentPointShadowFormat = pointShadowFormat;
865 for (
const auto & light : pool) {
866 auto & lightData = getData(&light);
868 if (!lightData.castShadows)
continue;
871 for (
size_t i = 0; i < lightData.numViewports; ++i) {
872 auto & lightCameraData = lightData.lightCameraData[i];
877 for (
size_t i = 0; i < 6; ++i) {
878 auto & lightCameraData = lightData.lightCameraData[i];
constexpr void unsetFlag(const uint32_t flag)
Unset the given flag. Does not remove the status of other than the given flags.
ComponentType * getComponent() const
constexpr void setFlag(const uint32_t flag)
Set the given flags. Does not override the currently set flags.
Context * context
Pointer to the Context instance the system lives in.
virtual void initialize(Context *context)
Initialize the system.
void update()
Updates the system state to that of the current frame.
A Context instance contains all the services, systems and runtime components needed to use Cogs.
std::unique_ptr< class CullingManager > cullingManager
CullingManager instance.
class IRenderer * renderer
Renderer.
std::unique_ptr< class Bounds > bounds
Bounds service instance.
std::unique_ptr< class Variables > variables
Variables service instance.
std::unique_ptr< class Time > time
Time service instance.
std::unique_ptr< class Engine > engine
Engine instance.
virtual glm::mat4 getProjectionMatrix(const glm::mat4 projectionMatrix)=0
Get an adjusted projection matrix used to render.
Defines a single light source and its behavior.
LightType lightType
The type of light.
float shadowBias
Constant term shadow map rasterization bias.
bool castShadows
If this light should cast shadows.
bool enabled
If the light is enabled.
float shadowBiasClamp
Shadow map bias rasterization clamp.
float shadowNearPlane
Shadow near plane. If set to 0 near plane is 1/1000th of the radius.
std::vector< WeakEntityPtr > cameras
Cameras from which the shadow maps will be used, defaults to main camera.
WeakEntityPtr lodReference
Camera entity of which its z-axis define the shadowmap cascade split axis, defaults to main camera.
float shadowSlopedBias
Linear term of shadow map rasterization bias.
float shadowIntensityOffset
Shadow intensity offset.
bool tightShadowBounds
If the shadows should have tight bounds.
glm::vec4 lightColor
Color contribution of the light.
bool dynamicCascadeCount
Dynamically determine how many cascades should be drawn.
LightingLayers lightingLayer
The lighting layer the light belongs to.
float range
Falloff range.
void initialize(Context *context) override
Initialize the system.
void preRender(Context *context)
Cull light system.
Contains the Engine, Renderer, resource managers and other systems needed to run Cogs....
@ Point
Point light source.
@ Directional
Directional light.
Contains data describing a Camera instance and its derived data structured such as matrix data and vi...
glm::mat4 rawProjectionMatrix
Projection matrix that has not been adjusted by the renderer, and is thus appropriate for direct scre...
Defines calculated light data.
@ DepthBuffer
The texture can be used as a depth target and have depth buffer values written into.
@ Texture
Texture usage, see Default.
@ CubeMap
The texture can be used as a cube map.