Skip to content

Commit

Permalink
BugFix: FLAMEGPU_ENABLE_GLM was broken
Browse files Browse the repository at this point in the history
This was broken by #1007 earlier today

Also removed a leftover file from the same PR, and improved the syntax of a few includes.

Closes #1011
  • Loading branch information
Robadob committed Dec 12, 2022
1 parent 313e1dc commit 7878870
Show file tree
Hide file tree
Showing 4 changed files with 16 additions and 1,031 deletions.
24 changes: 13 additions & 11 deletions include/flamegpu/detail/type_decode.h
Original file line number Diff line number Diff line change
@@ -1,9 +1,21 @@
#ifndef INCLUDE_FLAMEGPU_DETAIL_TYPE_DECODE_H_
#define INCLUDE_FLAMEGPU_DETAIL_TYPE_DECODE_H_

#if defined(FLAMEGPU_USE_GLM) || defined(GLM_VERSION)
#ifndef GLM_VERSION
#ifdef __CUDACC__
#ifdef __NVCC_DIAG_PRAGMA_SUPPORT__
#pragma nv_diag_suppress = esa_on_defaulted_function_ignored
#else
#pragma diag_suppress = esa_on_defaulted_function_ignored
#endif // __NVCC_DIAG_PRAGMA_SUPPORT__
#endif // __CUDACC__
#include <glm/glm.hpp>
#endif
#endif

namespace flamegpu {
namespace detail {

/**
* This struct allows us to natively decode GLM types to their type + length
*/
Expand All @@ -16,16 +28,6 @@ struct type_decode {
};

#if defined(FLAMEGPU_USE_GLM) || defined(GLM_VERSION)
#ifndef GLM_VERSION
#ifdef __CUDACC__
#ifdef __NVCC_DIAG_PRAGMA_SUPPORT__
#pragma nv_diag_suppress = esa_on_defaulted_function_ignored
#else
#pragma diag_suppress = esa_on_defaulted_function_ignored
#endif // __NVCC_DIAG_PRAGMA_SUPPORT__
#endif // __CUDACC__
#include <glm/glm.hpp>
#endif
/**
* GLM specialisation, only enabled if GLM is present
*/
Expand Down
4 changes: 2 additions & 2 deletions include/flamegpu/model/SubModelDescription.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
#include <string>
#include <memory>

#include "LayerDescription.h"
#include "DependencyNode.h"
#include "flamegpu/model/LayerDescription.h"
#include "flamegpu/model/DependencyNode.h"

namespace flamegpu {

Expand Down
Loading

0 comments on commit 7878870

Please sign in to comment.