1#include "ModelManager.h"
6#include "DataFetcherManager.h"
7#include "ResourceStore.h"
9#include "Services/TaskManager.h"
10#include "Services/Variables.h"
11#include "Services/Features.h"
13#include "Platform/Instrumentation.h"
15#include "Foundation/Logging/Logger.h"
16#include "Foundation/Platform/Timer.h"
23 constexpr Cogs::StringView forceFetchesInMainThreadName =
"resources.models.forceFetchesInMainThread";
24 constexpr Cogs::StringView timeLimitName =
"resources.models.mainThreadTimeLimitMs";
25 constexpr Cogs::StringView itemLimitName =
"resources.models.mainThreadItemLimit";
35 std::vector<Model *> deletionModels;
37 bool updateAsync =
false;
42 main(
std::this_thread::get_id()),
46 if (!context->
variables->exist(timeLimitName)) {
47 context->
variables->set(timeLimitName, 1.f);
49 if (!context->
variables->exist(itemLimitName)) {
50 context->
variables->set(itemLimitName, 10);
56 context->taskManager->wait(data->modelsGroup);
58 if (!context->variables->exist(forceFetchesInMainThreadName)) {
59 context->variables->set(forceFetchesInMainThreadName,
false);
70 loadInfo.modelFlags = flags;
72 if (context->variables->get(
"resources.models.autoReload",
false)) {
73 loadInfo.
loadFlags |= ResourceLoadFlags::AutoReload;
76 return loadResource(&loadInfo);
81 if (fetchedItems.empty())
return;
83 double timeLimitSeconds = 0.001 * context->variables->get(timeLimitName, 0.f);
84 int itemLimit = context->variables->get(itemLimitName, 0);
88 while (!fetchedItems.empty()) {
91 if (processFetchedItem(item.loadedLoader, item.loadInfo, std::move(item.data))) {
95 if ((0 < itemLimit) && (itemLimit <= modelsLoaded)) {
99 if ((0.f < timeLimitSeconds) && (timeLimitSeconds <= processTimer.elapsedSeconds())) {
107 if (!fetchedItems.empty()) {
109 context->engine->setDirty();
116 bool notCancelled =
false;
118 LockGuard guard(fetchIds.lock);
119 if (
auto it = fetchIds.map.find(
reinterpret_cast<size_t>(loadInfo->
handle.get())); it != fetchIds.map.end()) {
120 fetchIds.map.erase(it);
125 bool success =
false;
126 Model* model = get(loadInfo->
handle);
128 LOG_TRACE(logger,
"Abandoned model received in async callback, skipping further processing");
129 loadInfo->
handle->setFailedLoad();
138 model->setFailedLoad();
140 else if (loadedLoader->load(context, *loadInfo, std::move(data))) {
145 model->setFailedLoad();
147 setProcessed(loadInfo, !loadInfo->loadSync());
156 LOG_TRACE(logger,
"Abandoned model, skipping loading");
157 loadInfo->
handle->setFailedLoad();
158 setProcessed(loadInfo,
false);
163 if (!loadInfo->loadSync()
164 && context->features->prefers(PlatformPreference::AsyncFetch)
165 && !context->resourceStore->hasResource(loadInfo->
resourcePath)
166 && loadInfo->protocol != ResourceProtocol::Archive)
176 if (
size_t off = loadInfo->
resourcePath.find_last_of(
'.'); off != std::string::npos) {
178 bool stripSuffix =
true;
198 if (contentsHints != Cogs::FileContentsHints::None) {
203 if (loader = findLoader(loadInfo); loader) {
206 if (loadedLoader ==
nullptr) {
208 contentsHints = Cogs::FileContentsHints::None;
218 if (loadedLoader ==
nullptr) {
220 loader = findLoader(loadInfo);
221 if (loader ==
nullptr) {
222 LOG_ERROR(logger,
"No loader found for model %s.", loadInfo->
resourcePath.c_str());
224 model->setFailedLoad();
237 FileContents::Callback handleResult = [
this, ctx = context, loadedLoader, loadInfo](std::unique_ptr<FileContents> data)
240 if (main == std::this_thread::get_id()) {
241 fetchedItems.push(
FetchedItem{ .data = std::move(data), .loadInfo = loadInfo, .loadedLoader = loadedLoader });
242 ctx->engine->setDirty();
245 processFetchedItem(loadedLoader, loadInfo, std::move(data));
249 if (context->variables->get(forceFetchesInMainThreadName,
false)) {
250 FileContents::Callback handleResultInMainThread = [ctx = context, handleResult](std::unique_ptr<FileContents> data) {
251 auto task = [handleResult, dataPtr = data.release()]() { handleResult(std::unique_ptr<Cogs::FileContents>(dataPtr)); };
252 ctx->engine->runTaskInMainThread(std::move(task));
254 handleResult = handleResultInMainThread;
260 size_t modelKey =
reinterpret_cast<size_t>(loadInfo->
handle.get());
262 LockGuard guard(fetchIds.lock);
263 fetchIds.map[modelKey] = DataFetcherManager::NoFetchId;
266 DataFetcherManager::FetchId fetchId = DataFetcherManager::fetchAsync(context, path.empty() ? loadInfo->
resourcePath : path, handleResult, 0, 0,
true, contentsHints);
268 LockGuard guard(fetchIds.lock);
269 if (
auto it = fetchIds.map.find(modelKey); it != fetchIds.map.end()) {
271 it->second = fetchId;
282 if (!checkPreloaded(loadInfo)) {
291 if (loader ==
nullptr) {
292 loader = findLoader(loadInfo);
293 if (loader ==
nullptr) {
294 LOG_ERROR(logger,
"No loader found for model %s.", loadInfo->
resourceName.c_str());
296 model->setFailedLoad();
301 auto loadResource = [=,
this]()
mutable
305 LOG_DEBUG(logger,
"Abandoned model read, skipping further processing");
306 loadInfo->
handle->setFailedLoad();
309 bool success =
false;
311 loader = findLoader(loadInfo);
313 LOG_ERROR(logger,
"No loader found for model %s.", loadInfo->
resourceName.c_str());
318 success = loader->load(context, *loadInfo);
321 if (success ==
false) {
322 LOG_ERROR(logger,
"Error loading model from %s.", loadInfo->
resourceName.c_str());
323 model->setFailedLoad();
326 setProcessed(loadInfo, !loadInfo->loadSync());
329 if (loadInfo->loadSync() || !context->features->prefers(PlatformPreference::BackgroundTasks)) {
338void Cogs::Core::ModelManager::cancelModelLoad(
ModelHandle handle)
342 DataFetcherManager::FetchId
id = DataFetcherManager::NoFetchId;
344 LockGuard guard(fetchIds.lock);
345 if (
auto it = fetchIds.map.find(
reinterpret_cast<size_t>(handle.get())); it != fetchIds.map.end()) {
347 fetchIds.map.erase(it);
351 DataFetcherManager::cancelAsyncFetch(context,
id);
355void Cogs::Core::ModelManager::handleReload(ResourceHandleBase handle)
357 ModelHandle model(handle);
359 ModelLoadInfo & loadInfo = *createLoadInfo();
360 loadInfo.resourceId = model->getId();
361 loadInfo.resourcePath = model->getSource().to_string();
362 loadInfo.resourceName = model->getName().to_string();
364 loadInfo.handle = model;
366 loadResource(&loadInfo);
371 CpuInstrumentationScope(SCOPE_RESOURCES,
"ModelManager::processDeletion");
373 data->updateAsync = context->variables->get(
"resources.models.updateAsync",
false);
375 auto t = Timer::startNew();
377 ResourceManager::processDeletion();
379 if (!data->deletionModels.empty()) {
380 context->taskManager->enqueueChild(data->modelsGroup, [
this, deletionModels = std::move(data->deletionModels)]()
382 CpuInstrumentationScope(SCOPE_RESOURCES,
"ModelManager::processDeletionTask");
384 for (auto model : deletionModels) {
385 destroyLocked(model);
390 auto elapsed = t.elapsedSeconds();
392 if (elapsed > 0.005) {
393 LOG_DEBUG(logger,
"ModelManager::processDeletion elapsed: %f", elapsed);
399 if (!data->updateAsync) {
402 data->deletionModels.emplace_back(model);
406void Cogs::Core::ModelManager::destroyInternal(
ResourceBase * resource)
408 if (!data->updateAsync) {
409 ResourceManager::destroyInternal(resource);
A Context instance contains all the services, systems and runtime components needed to use Cogs.
std::unique_ptr< class TaskManager > taskManager
TaskManager service instance.
std::unique_ptr< class Variables > variables
Variables service instance.
ModelHandle loadModel(const StringView &resourceName, ResourceId resourceId, ModelLoadFlags flags)
Load a Model resource from the named resource given.
ModelManager(Context *context)
Constructs a ModelManager in the given context.
~ModelManager()
Destructs the ModelManager.
void postProcessLoading() override final
Hook for resource managers to run code at the tail of processLoading.
void handleLoad(ModelLoadInfo *loadInfo) override
Overridden to handle loading Model resources.
void handleDeletion(Model *model) override
Handler for deletion of resources.
void processDeletion() override
Process resources pending deallocation.
The generic resource manager provides a base implementation for specialized resource managers to buil...
virtual void handleDeletion(ResourceType *)
Handler for deletion of resources.
static constexpr TaskQueueId ResourceQueue
Resource task queue.
Log implementation class.
Provides a weakly referenced view over the contents of a string.
size_t hashLowercase(size_t hashValue=Cogs::hash()) const noexcept
Get the hash code of the string converted to lowercase.
std::string to_string() const
String conversion method.
Contains the Engine, Renderer, resource managers and other systems needed to run Cogs....
ModelLoadFlags
Model loading flags. May be combined with resource loading flags.
ResourceLoadFlags
Flags for describing how to load a resource.
constexpr Log getLogger(const char(&name)[LEN]) noexcept
constexpr size_t hash() noexcept
Simple getter function that returns the initial value for fnv1a hashing.
@ BrotliDecompress
A hint that the contents are Brotli (Google) compressed and is allowed to be decompressed during tran...
@ ZStdDecompress
A hint that the contents are Zstandard (Facebook) compressed and is allowed to be decompressed during...
Model resources define a template for a set of connected entities, with resources such as meshes,...
Base class for engine resources.
uint32_t referenceCount() const
Get the current reference count.
std::string resourcePath
Resource path. Used to locate resource.
std::string resourceName
Desired resource name. If no name is given, a default name will be chosen.
ResourceId resourceId
Unique resource identifier. Must be unique among resources of the same kind.
ResourceHandleBase handle
Handle to resource structure for holding actual resource data.
ResourceLoadFlags loadFlags
Desired loading flags. Used to specify how the resource will be loaded.
Task id struct used to identify unique Task instances.