mirror of
https://github.com/godotengine/godot.git
synced 2024-11-23 12:43:43 +00:00
Recast: Update to upstream commit 5a870d4 (2022)
(cherry picked from commit 4635f24322
)
This commit is contained in:
parent
723a65dd75
commit
6be616bd16
2
thirdparty/README.md
vendored
2
thirdparty/README.md
vendored
@ -485,7 +485,7 @@ Files extracted from upstream source:
|
||||
## recastnavigation
|
||||
|
||||
- Upstream: https://github.com/recastnavigation/recastnavigation
|
||||
- Version: git (57610fa6ef31b39020231906f8c5d40eaa8294ae, 2019)
|
||||
- Version: git (5a870d427e47abd4a8e4ce58a95582ec049434d5, 2022)
|
||||
- License: zlib
|
||||
|
||||
Files extracted from upstream source:
|
||||
|
@ -22,7 +22,7 @@
|
||||
#include <stddef.h>
|
||||
#include <stdint.h>
|
||||
|
||||
#include <RecastAssert.h>
|
||||
#include "RecastAssert.h"
|
||||
|
||||
/// Provides hint values to the memory allocator on how long the
|
||||
/// memory is expected to be used.
|
||||
@ -106,6 +106,8 @@ class rcVectorBase {
|
||||
// Creates an array of the given size, copies all of this vector's data into it, and returns it.
|
||||
T* allocate_and_copy(rcSizeType size);
|
||||
void resize_impl(rcSizeType size, const T* value);
|
||||
// Requires: min_capacity > m_cap.
|
||||
rcSizeType get_new_capacity(rcSizeType min_capacity);
|
||||
public:
|
||||
typedef rcSizeType size_type;
|
||||
typedef T value_type;
|
||||
@ -196,8 +198,7 @@ void rcVectorBase<T, H>::push_back(const T& value) {
|
||||
return;
|
||||
}
|
||||
|
||||
rcAssert(RC_SIZE_MAX / 2 >= m_size);
|
||||
rcSizeType new_cap = m_size ? 2*m_size : 1;
|
||||
const rcSizeType new_cap = get_new_capacity(m_cap + 1);
|
||||
T* data = allocate_and_copy(new_cap);
|
||||
// construct between allocate and destroy+free in case value is
|
||||
// in this vector.
|
||||
@ -208,25 +209,44 @@ void rcVectorBase<T, H>::push_back(const T& value) {
|
||||
rcFree(m_data);
|
||||
m_data = data;
|
||||
}
|
||||
|
||||
template <typename T, rcAllocHint H>
|
||||
rcSizeType rcVectorBase<T, H>::get_new_capacity(rcSizeType min_capacity) {
|
||||
rcAssert(min_capacity <= RC_SIZE_MAX);
|
||||
if (rcUnlikely(m_cap >= RC_SIZE_MAX / 2))
|
||||
return RC_SIZE_MAX;
|
||||
return 2 * m_cap > min_capacity ? 2 * m_cap : min_capacity;
|
||||
}
|
||||
|
||||
template <typename T, rcAllocHint H>
|
||||
void rcVectorBase<T, H>::resize_impl(rcSizeType size, const T* value) {
|
||||
if (size < m_size) {
|
||||
destroy_range(size, m_size);
|
||||
m_size = size;
|
||||
} else if (size > m_size) {
|
||||
T* new_data = allocate_and_copy(size);
|
||||
// We defer deconstructing/freeing old data until after constructing
|
||||
// new elements in case "value" is there.
|
||||
if (value) {
|
||||
construct_range(new_data + m_size, new_data + size, *value);
|
||||
if (size <= m_cap) {
|
||||
if (value) {
|
||||
construct_range(m_data + m_size, m_data + size, *value);
|
||||
} else {
|
||||
construct_range(m_data + m_size, m_data + size);
|
||||
}
|
||||
m_size = size;
|
||||
} else {
|
||||
construct_range(new_data + m_size, new_data + size);
|
||||
const rcSizeType new_cap = get_new_capacity(size);
|
||||
T* new_data = allocate_and_copy(new_cap);
|
||||
// We defer deconstructing/freeing old data until after constructing
|
||||
// new elements in case "value" is there.
|
||||
if (value) {
|
||||
construct_range(new_data + m_size, new_data + size, *value);
|
||||
} else {
|
||||
construct_range(new_data + m_size, new_data + size);
|
||||
}
|
||||
destroy_range(0, m_size);
|
||||
rcFree(m_data);
|
||||
m_data = new_data;
|
||||
m_cap = new_cap;
|
||||
m_size = size;
|
||||
}
|
||||
destroy_range(0, m_size);
|
||||
rcFree(m_data);
|
||||
m_data = new_data;
|
||||
m_cap = size;
|
||||
m_size = size;
|
||||
}
|
||||
}
|
||||
template <typename T, rcAllocHint H>
|
||||
@ -303,6 +323,7 @@ public:
|
||||
rcIntArray(int n) : m_impl(n, 0) {}
|
||||
void push(int item) { m_impl.push_back(item); }
|
||||
void resize(int size) { m_impl.resize(size); }
|
||||
void clear() { m_impl.clear(); }
|
||||
int pop()
|
||||
{
|
||||
int v = m_impl.back();
|
||||
|
@ -921,8 +921,8 @@ bool rcBuildContours(rcContext* ctx, rcCompactHeightfield& chf,
|
||||
continue;
|
||||
const unsigned char area = chf.areas[i];
|
||||
|
||||
verts.resize(0);
|
||||
simplified.resize(0);
|
||||
verts.clear();
|
||||
simplified.clear();
|
||||
|
||||
ctx->startTimer(RC_TIMER_BUILD_CONTOURS_TRACE);
|
||||
walkContour(x, y, i, chf, flags, verts);
|
||||
|
@ -653,8 +653,8 @@ static bool buildPolyDetail(rcContext* ctx, const float* in, const int nin,
|
||||
for (int i = 0; i < nin; ++i)
|
||||
rcVcopy(&verts[i*3], &in[i*3]);
|
||||
|
||||
edges.resize(0);
|
||||
tris.resize(0);
|
||||
edges.clear();
|
||||
tris.clear();
|
||||
|
||||
const float cs = chf.cs;
|
||||
const float ics = 1.0f/cs;
|
||||
@ -803,7 +803,7 @@ static bool buildPolyDetail(rcContext* ctx, const float* in, const int nin,
|
||||
int x1 = (int)ceilf(bmax[0]/sampleDist);
|
||||
int z0 = (int)floorf(bmin[2]/sampleDist);
|
||||
int z1 = (int)ceilf(bmax[2]/sampleDist);
|
||||
samples.resize(0);
|
||||
samples.clear();
|
||||
for (int z = z0; z < z1; ++z)
|
||||
{
|
||||
for (int x = x0; x < x1; ++x)
|
||||
@ -864,8 +864,8 @@ static bool buildPolyDetail(rcContext* ctx, const float* in, const int nin,
|
||||
|
||||
// Create new triangulation.
|
||||
// TODO: Incremental add instead of full rebuild.
|
||||
edges.resize(0);
|
||||
tris.resize(0);
|
||||
edges.clear();
|
||||
tris.clear();
|
||||
delaunayHull(ctx, nverts, verts, nhull, hull, tris, edges);
|
||||
}
|
||||
}
|
||||
@ -935,7 +935,7 @@ static void seedArrayWithPolyCenter(rcContext* ctx, const rcCompactHeightfield&
|
||||
pcy /= npoly;
|
||||
|
||||
// Use seeds array as a stack for DFS
|
||||
array.resize(0);
|
||||
array.clear();
|
||||
array.push(startCellX);
|
||||
array.push(startCellY);
|
||||
array.push(startSpanIndex);
|
||||
@ -1001,7 +1001,7 @@ static void seedArrayWithPolyCenter(rcContext* ctx, const rcCompactHeightfield&
|
||||
rcSwap(dirs[directDir], dirs[3]);
|
||||
}
|
||||
|
||||
array.resize(0);
|
||||
array.clear();
|
||||
// getHeightData seeds are given in coordinates with borders
|
||||
array.push(cx+bs);
|
||||
array.push(cy+bs);
|
||||
@ -1030,7 +1030,7 @@ static void getHeightData(rcContext* ctx, const rcCompactHeightfield& chf,
|
||||
// Note: Reads to the compact heightfield are offset by border size (bs)
|
||||
// since border size offset is already removed from the polymesh vertices.
|
||||
|
||||
queue.resize(0);
|
||||
queue.clear();
|
||||
// Set all heights to RC_UNSET_HEIGHT.
|
||||
memset(hp.data, 0xff, sizeof(unsigned short)*hp.width*hp.height);
|
||||
|
||||
|
@ -650,7 +650,7 @@ static bool mergeRegions(rcRegion& rega, rcRegion& regb)
|
||||
return false;
|
||||
|
||||
// Merge neighbours.
|
||||
rega.connections.resize(0);
|
||||
rega.connections.clear();
|
||||
for (int i = 0, ni = acon.size(); i < ni-1; ++i)
|
||||
rega.connections.push(acon[(insa+1+i) % ni]);
|
||||
|
||||
@ -876,8 +876,8 @@ static bool mergeAndFilterRegions(rcContext* ctx, int minRegionArea, int mergeRe
|
||||
// Also keep track of the regions connects to a tile border.
|
||||
bool connectsToBorder = false;
|
||||
int spanCount = 0;
|
||||
stack.resize(0);
|
||||
trace.resize(0);
|
||||
stack.clear();
|
||||
trace.clear();
|
||||
|
||||
reg.visited = true;
|
||||
stack.push(i);
|
||||
@ -1068,7 +1068,7 @@ static bool mergeAndFilterLayerRegions(rcContext* ctx, int minRegionArea,
|
||||
{
|
||||
const rcCompactCell& c = chf.cells[x+y*w];
|
||||
|
||||
lregs.resize(0);
|
||||
lregs.clear();
|
||||
|
||||
for (int i = (int)c.index, ni = (int)(c.index+c.count); i < ni; ++i)
|
||||
{
|
||||
@ -1139,7 +1139,7 @@ static bool mergeAndFilterLayerRegions(rcContext* ctx, int minRegionArea,
|
||||
// Start search.
|
||||
root.id = layerId;
|
||||
|
||||
stack.resize(0);
|
||||
stack.clear();
|
||||
stack.push(i);
|
||||
|
||||
while (stack.size() > 0)
|
||||
|
Loading…
Reference in New Issue
Block a user