Cogs.Core
ModelManager.cpp
1#include "ModelManager.h"
2
3#include "Context.h"
4#include "Model.h"
5
6#include "DataFetcherManager.h"
7#include "ResourceStore.h"
8
9#include "Services/TaskManager.h"
10#include "Services/Variables.h"
11#include "Services/Features.h"
12
13#include "Platform/Instrumentation.h"
14
15#include "Foundation/Logging/Logger.h"
16#include "Foundation/Platform/Timer.h"
17
18
19namespace
20{
21 Cogs::Logging::Log logger = Cogs::Logging::getLogger("ModelManager");
22
23 constexpr Cogs::StringView forceFetchesInMainThreadName = "resources.models.forceFetchesInMainThread";
24 constexpr Cogs::StringView timeLimitName = "resources.models.mainThreadTimeLimitMs";
25 constexpr Cogs::StringView itemLimitName = "resources.models.mainThreadItemLimit";
26
27}
28
29namespace Cogs::Core
30{
32 {
33 TaskId modelsGroup;
34
35 std::vector<Model *> deletionModels;
36
37 bool updateAsync = false;
38 };
39}
40
42 main(std::this_thread::get_id()),
43 data(std::make_unique<ModelManagerData>())
44{
45 data->modelsGroup = context->taskManager->createGroup(TaskManager::ResourceQueue);
46 if (!context->variables->exist(timeLimitName)) {
47 context->variables->set(timeLimitName, 1.f);
48 }
49 if (!context->variables->exist(itemLimitName)) {
50 context->variables->set(itemLimitName, 10);
51 }
52}
53
55{
56 context->taskManager->wait(data->modelsGroup);
57
58 if (!context->variables->exist(forceFetchesInMainThreadName)) {
59 context->variables->set(forceFetchesInMainThreadName, false);
60 }
61 reportLeaks("Model");
62}
63
65{
66 ModelLoadInfo & loadInfo = *createLoadInfo();
67 loadInfo.resourceId = id;
68 loadInfo.resourcePath = resourceName.to_string();
69 loadInfo.loadFlags = (ResourceLoadFlags)flags;
70 loadInfo.modelFlags = flags;
71
72 if (context->variables->get("resources.models.autoReload", false)) {
73 loadInfo.loadFlags |= ResourceLoadFlags::AutoReload;
74 }
75
76 return loadResource(&loadInfo);
77}
78
80{
81 if (fetchedItems.empty()) return;
82
83 double timeLimitSeconds = 0.001 * context->variables->get(timeLimitName, 0.f);
84 int itemLimit = context->variables->get(itemLimitName, 0);
85
86 int modelsLoaded = 0;
87 Cogs::Timer processTimer = Cogs::Timer::startNew();
88 while (!fetchedItems.empty()) {
89 FetchedItem item = std::move(fetchedItems.front());
90 fetchedItems.pop();
91 if (processFetchedItem(item.loadedLoader, item.loadInfo, std::move(item.data))) {
92
93 // Model loaded successfully
94 modelsLoaded++;
95 if ((0 < itemLimit) && (itemLimit <= modelsLoaded)) {
96 // Hit item limit, stop processing this frame.
97 break;
98 }
99 if ((0.f < timeLimitSeconds) && (timeLimitSeconds <= processTimer.elapsedSeconds())) {
100 // Hit time limit, stop processing this frame.
101 break;
102 }
103 }
104 }
105
106 // If we are not done, trigger a new frame so we can continue.
107 if (!fetchedItems.empty()) {
108 // Not done, we need another frame where we can continue.
109 context->engine->setDirty();
110 }
111
112}
113
114bool Cogs::Core::ModelManager::processFetchedItem(ILoadedModelLoader* loadedLoader, ModelLoadInfo* loadInfo, std::unique_ptr<FileContents> data)
115{
116 bool notCancelled = false;
117 { // Remove entry of fetch id
118 LockGuard guard(fetchIds.lock);
119 if (auto it = fetchIds.map.find(reinterpret_cast<size_t>(loadInfo->handle.get())); it != fetchIds.map.end()) {
120 fetchIds.map.erase(it);
121 notCancelled = true;
122 }
123 }
124
125 bool success = false;
126 Model* model = get(loadInfo->handle);
127 if (loadInfo->handle->referenceCount() <= 1) {
128 LOG_TRACE(logger, "Abandoned model received in async callback, skipping further processing");
129 loadInfo->handle->setFailedLoad();
130 }
131 else if (!data) {
132 if (notCancelled) {
133 LOG_ERROR(logger, "Error loading model from %s.", loadInfo->resourceName.empty() ? loadInfo->resourcePath.c_str() : loadInfo->resourceName.c_str());
134 }
135 else {
136 LOG_TRACE(logger, "Cancelled model from %s.", loadInfo->resourceName.empty() ? loadInfo->resourcePath.c_str() : loadInfo->resourceName.c_str());
137 }
138 model->setFailedLoad();
139 }
140 else if (loadedLoader->load(context, *loadInfo, std::move(data))) {
141 success = true;
142 }
143 else {
144 LOG_ERROR(logger, "Error loading model from %s.", loadInfo->resourceName.empty() ? loadInfo->resourcePath.c_str() : loadInfo->resourceName.c_str());
145 model->setFailedLoad();
146 }
147 setProcessed(loadInfo, !loadInfo->loadSync());
148 return success;
149}
150
151
153{
154 // If no-one has a reference to this model except us, we just abandon it.
155 if (loadInfo->handle->referenceCount() <= 1) {
156 LOG_TRACE(logger, "Abandoned model, skipping loading");
157 loadInfo->handle->setFailedLoad();
158 setProcessed(loadInfo, false);
159 return;
160 }
161
162 IModelLoader* loader = nullptr;
163 if (!loadInfo->loadSync()
164 && context->features->prefers(PlatformPreference::AsyncFetch)
165 && !context->resourceStore->hasResource(loadInfo->resourcePath)
166 && loadInfo->protocol != ResourceProtocol::Archive)
167 {
168 // We will try to load this asynchronously if the loader supports it.
169
170 std::string path;
171 Cogs::FileContentsHints contentsHints = Cogs::FileContentsHints::None;
172 ILoadedModelLoader* loadedLoader = nullptr;
173
174 // If model has a compressions suffix we can automatically decompress, we can try to
175 // load without it
176 if (size_t off = loadInfo->resourcePath.find_last_of('.'); off != std::string::npos) {
177
178 bool stripSuffix = true;
179 switch (Cogs::StringView(loadInfo->resourcePath.data() + off).hashLowercase()) {
180 case Cogs::hash(".cogsbin"):
181 // We allow .cogsbin files that are really .cogsbin.zst files inside asset hierarchies,
182 // see Documentation/AssetSystem.md for more details.
184 stripSuffix = false;
185 break;
186 case Cogs::hash(".zst"):
187 case Cogs::hash(".zstd"):
189 break;
190 case Cogs::hash(".br"):
192 break;
193 default:
194 break;
195 }
196
197 // Try to find loader that handle file without suffix
198 if (contentsHints != Cogs::FileContentsHints::None) {
199 if (stripSuffix) {
200 path = std::move(loadInfo->resourcePath);
201 loadInfo->resourcePath = path.substr(0, off);
202 }
203 if (loader = findLoader(loadInfo); loader) {
204 loadedLoader = dynamic_cast<ILoadedModelLoader*>(loader);
205 }
206 if (loadedLoader == nullptr) {
207 // No supported loader, revert changes
208 contentsHints = Cogs::FileContentsHints::None;
209 if (stripSuffix) {
210 loadInfo->resourcePath = std::move(path);
211 }
212 path.clear();
213 loader = nullptr;
214 }
215 }
216 }
217
218 if (loadedLoader == nullptr) {
219 // Try to find loader with full suffix
220 loader = findLoader(loadInfo);
221 if (loader == nullptr) {
222 LOG_ERROR(logger, "No loader found for model %s.", loadInfo->resourcePath.c_str());
223 Model* model = get(loadInfo->handle);
224 model->setFailedLoad();
225 return;
226 }
227 loadedLoader = dynamic_cast<ILoadedModelLoader*>(loader);
228 }
229
230 if (loadedLoader) {
231
232 // We have a loader that supports to get a memory blob instead of reading a file from disk or resource store.
233
234 // TODO: Investigate chain of data propagation. There is a copy in the emscripten fetch handler, a subsequent copy
235 // when model is loaded. And the loader may spawn tasks (though they are currently synchronous in emscripten), so
236 // we really should take ownership somehow.
237 FileContents::Callback handleResult = [this, ctx = context, loadedLoader, loadInfo](std::unique_ptr<FileContents> data)
238 {
239 // If we are in the main thread, we queue the response to be processed during the engine update
240 if (main == std::this_thread::get_id()) {
241 fetchedItems.push(FetchedItem{ .data = std::move(data), .loadInfo = loadInfo, .loadedLoader = loadedLoader });
242 ctx->engine->setDirty();
243 return;
244 }
245 processFetchedItem(loadedLoader, loadInfo, std::move(data));
246 };
247
248 // Opt-in option to force fetches onto the main thread, useful for debugging that code path on desktop with threads availabe.
249 if (context->variables->get(forceFetchesInMainThreadName, false)) {
250 FileContents::Callback handleResultInMainThread = [ctx = context, handleResult](std::unique_ptr<FileContents> data) {
251 auto task = [handleResult, dataPtr = data.release()]() { handleResult(std::unique_ptr<Cogs::FileContents>(dataPtr)); };
252 ctx->engine->runTaskInMainThread(std::move(task));
253 };
254 handleResult = handleResultInMainThread;
255 }
256
257 // We have a potential race condition since the callback that removes the
258 // cancellation id can either run during the fetch call or after, so we add
259 // an item now so we can detect and handle this situation.
260 size_t modelKey = reinterpret_cast<size_t>(loadInfo->handle.get());
261 {
262 LockGuard guard(fetchIds.lock);
263 fetchIds.map[modelKey] = DataFetcherManager::NoFetchId;
264 }
265
266 DataFetcherManager::FetchId fetchId = DataFetcherManager::fetchAsync(context, path.empty() ? loadInfo->resourcePath : path, handleResult, 0, 0, true, contentsHints);
267 {
268 LockGuard guard(fetchIds.lock);
269 if (auto it = fetchIds.map.find(modelKey); it != fetchIds.map.end()) {
270 // callback has not yet removed the entry, update it.
271 it->second = fetchId;
272 }
273 }
274
275 return; // handleResult continuation will update ResourceState
276 }
277 else {
278
279#ifdef EMSCRIPTEN
280 // We have a loader that requires reading from disk or resource store,
281 // check if we have it already or trigger a fetch and re-queue model
282 if (!checkPreloaded(loadInfo)) {
283 return; // Not yet in resource store
284 }
285#endif
286
287 // Model in resource store, load it with the code below
288 }
289 }
290
291 if (loader == nullptr) {
292 loader = findLoader(loadInfo);
293 if (loader == nullptr) {
294 LOG_ERROR(logger, "No loader found for model %s.", loadInfo->resourceName.c_str());
295 Model* model = get(loadInfo->handle);
296 model->setFailedLoad();
297 return;
298 }
299 }
300
301 auto loadResource = [=, this]() mutable
302 {
303 Model* model = get(loadInfo->handle);
304 if (loadInfo->handle->referenceCount() <= 1) {
305 LOG_DEBUG(logger, "Abandoned model read, skipping further processing");
306 loadInfo->handle->setFailedLoad();
307 }
308 else {
309 bool success = false;
310 if (!loader) {
311 loader = findLoader(loadInfo);
312 if (!loader) {
313 LOG_ERROR(logger, "No loader found for model %s.", loadInfo->resourceName.c_str());
314 }
315 }
316
317 if (loader) {
318 success = loader->load(context, *loadInfo);
319 }
320
321 if (success == false) {
322 LOG_ERROR(logger, "Error loading model from %s.", loadInfo->resourceName.c_str());
323 model->setFailedLoad();
324 }
325 }
326 setProcessed(loadInfo, !loadInfo->loadSync());
327 };
328
329 if (loadInfo->loadSync() || !context->features->prefers(PlatformPreference::BackgroundTasks)) {
330 loadResource();
331 }
332 else {
333 context->taskManager->enqueue(TaskManager::ResourceQueue, loadResource);
334 }
335}
336
337
338void Cogs::Core::ModelManager::cancelModelLoad(ModelHandle handle)
339{
340 if (!handle) return;
341
342 DataFetcherManager::FetchId id = DataFetcherManager::NoFetchId;
343 {
344 LockGuard guard(fetchIds.lock);
345 if (auto it = fetchIds.map.find(reinterpret_cast<size_t>(handle.get())); it != fetchIds.map.end()) {
346 id = it->second;
347 fetchIds.map.erase(it);
348 }
349 else return;
350 }
351 DataFetcherManager::cancelAsyncFetch(context, id);
352}
353
354
355void Cogs::Core::ModelManager::handleReload(ResourceHandleBase handle)
356{
357 ModelHandle model(handle);
358
359 ModelLoadInfo & loadInfo = *createLoadInfo();
360 loadInfo.resourceId = model->getId();
361 loadInfo.resourcePath = model->getSource().to_string();
362 loadInfo.resourceName = model->getName().to_string();
363 loadInfo.loadFlags = ResourceLoadFlags::Reload;
364 loadInfo.handle = model;
365
366 loadResource(&loadInfo);
367}
368
370{
371 CpuInstrumentationScope(SCOPE_RESOURCES, "ModelManager::processDeletion");
372
373 data->updateAsync = context->variables->get("resources.models.updateAsync", false);
374
375 auto t = Timer::startNew();
376
377 ResourceManager::processDeletion();
378
379 if (!data->deletionModels.empty()) {
380 context->taskManager->enqueueChild(data->modelsGroup, [this, deletionModels = std::move(data->deletionModels)]()
381 {
382 CpuInstrumentationScope(SCOPE_RESOURCES, "ModelManager::processDeletionTask");
383
384 for (auto model : deletionModels) {
385 destroyLocked(model);
386 }
387 });
388 }
389
390 auto elapsed = t.elapsedSeconds();
391
392 if (elapsed > 0.005) {
393 LOG_DEBUG(logger, "ModelManager::processDeletion elapsed: %f", elapsed);
394 }
395}
396
398{
399 if (!data->updateAsync) {
401 } else {
402 data->deletionModels.emplace_back(model);
403 }
404}
405
406void Cogs::Core::ModelManager::destroyInternal(ResourceBase * resource)
407{
408 if (!data->updateAsync) {
409 ResourceManager::destroyInternal(resource);
410 }
411}
A Context instance contains all the services, systems and runtime components needed to use Cogs.
Definition: Context.h:83
std::unique_ptr< class TaskManager > taskManager
TaskManager service instance.
Definition: Context.h:186
std::unique_ptr< class Variables > variables
Variables service instance.
Definition: Context.h:180
ModelHandle loadModel(const StringView &resourceName, ResourceId resourceId, ModelLoadFlags flags)
Load a Model resource from the named resource given.
ModelManager(Context *context)
Constructs a ModelManager in the given context.
~ModelManager()
Destructs the ModelManager.
void postProcessLoading() override final
Hook for resource managers to run code at the tail of processLoading.
void handleLoad(ModelLoadInfo *loadInfo) override
Overridden to handle loading Model resources.
void handleDeletion(Model *model) override
Handler for deletion of resources.
void processDeletion() override
Process resources pending deallocation.
The generic resource manager provides a base implementation for specialized resource managers to buil...
virtual void handleDeletion(ResourceType *)
Handler for deletion of resources.
static constexpr TaskQueueId ResourceQueue
Resource task queue.
Definition: TaskManager.h:232
Log implementation class.
Definition: LogManager.h:139
Provides a weakly referenced view over the contents of a string.
Definition: StringView.h:24
size_t hashLowercase(size_t hashValue=Cogs::hash()) const noexcept
Get the hash code of the string converted to lowercase.
Definition: StringView.cpp:13
std::string to_string() const
String conversion method.
Definition: StringView.cpp:9
Old timer class.
Definition: Timer.h:37
Contains the Engine, Renderer, resource managers and other systems needed to run Cogs....
ModelLoadFlags
Model loading flags. May be combined with resource loading flags.
ResourceLoadFlags
Flags for describing how to load a resource.
Definition: ResourceFlags.h:16
constexpr Log getLogger(const char(&name)[LEN]) noexcept
Definition: LogManager.h:180
constexpr size_t hash() noexcept
Simple getter function that returns the initial value for fnv1a hashing.
Definition: HashFunctions.h:62
FileContentsHints
Definition: FileContents.h:11
@ BrotliDecompress
A hint that the contents are Brotli (Google) compressed and is allowed to be decompressed during tran...
@ ZStdDecompress
A hint that the contents are Zstandard (Facebook) compressed and is allowed to be decompressed during...
STL namespace.
Model resources define a template for a set of connected entities, with resources such as meshes,...
Definition: Model.h:56
Base class for engine resources.
Definition: ResourceBase.h:107
uint32_t referenceCount() const
Get the current reference count.
Definition: ResourceBase.h:360
std::string resourcePath
Resource path. Used to locate resource.
std::string resourceName
Desired resource name. If no name is given, a default name will be chosen.
ResourceId resourceId
Unique resource identifier. Must be unique among resources of the same kind.
ResourceHandleBase handle
Handle to resource structure for holding actual resource data.
ResourceLoadFlags loadFlags
Desired loading flags. Used to specify how the resource will be loaded.
Task id struct used to identify unique Task instances.
Definition: TaskManager.h:20