mirror of
https://github.com/CHOMPStation2/CHOMPStation2.git
synced 2025-12-11 10:43:20 +00:00
Merge pull request #7360 from SkyMarshal/astar-fix
Fix/Cleanup for A* implementation
This commit is contained in:
@@ -38,148 +38,144 @@ length to avoid portals or something i guess?? Not that they're counted right no
|
||||
|
||||
|
||||
PriorityQueue
|
||||
var/L[]
|
||||
var/cmp
|
||||
var/list/queue
|
||||
var/proc/comparison_function
|
||||
|
||||
New(compare)
|
||||
L = new()
|
||||
cmp = compare
|
||||
proc
|
||||
IsEmpty()
|
||||
return !L.len
|
||||
Enqueue(d)
|
||||
var/i
|
||||
var/j
|
||||
L.Add(d)
|
||||
i = L.len
|
||||
j = i>>1
|
||||
while(i > 1 && call(cmp)(L[j],L[i]) > 0)
|
||||
L.Swap(i,j)
|
||||
i = j
|
||||
j >>= 1
|
||||
queue = list()
|
||||
comparison_function = compare
|
||||
|
||||
Dequeue()
|
||||
if(!L.len) return 0
|
||||
. = L[1]
|
||||
Remove(1)
|
||||
proc/IsEmpty()
|
||||
return !queue.len
|
||||
|
||||
Remove(i)
|
||||
if(i > L.len) return 0
|
||||
L.Swap(i,L.len)
|
||||
L.Cut(L.len)
|
||||
if(i < L.len)
|
||||
_Fix(i)
|
||||
_Fix(i)
|
||||
var/child = i + i
|
||||
var/item = L[i]
|
||||
while(child <= L.len)
|
||||
if(child + 1 <= L.len && call(cmp)(L[child],L[child + 1]) > 0)
|
||||
child++
|
||||
if(call(cmp)(item,L[child]) > 0)
|
||||
L[i] = L[child]
|
||||
i = child
|
||||
else
|
||||
break
|
||||
child = i + i
|
||||
L[i] = item
|
||||
List()
|
||||
var/ret[] = new()
|
||||
var/copy = L.Copy()
|
||||
while(!IsEmpty())
|
||||
ret.Add(Dequeue())
|
||||
L = copy
|
||||
return ret
|
||||
RemoveItem(i)
|
||||
var/ind = L.Find(i)
|
||||
if(ind)
|
||||
Remove(ind)
|
||||
PathNode
|
||||
var/datum/source
|
||||
var/PathNode/prevNode
|
||||
var/f
|
||||
var/g
|
||||
var/h
|
||||
var/nt // Nodes traversed
|
||||
New(s,p,pg,ph,pnt)
|
||||
source = s
|
||||
prevNode = p
|
||||
g = pg
|
||||
h = ph
|
||||
f = g + h
|
||||
source.bestF = f
|
||||
nt = pnt
|
||||
proc/Enqueue(var/data)
|
||||
queue.Add(data)
|
||||
var/index = queue.len
|
||||
|
||||
datum
|
||||
var/bestF
|
||||
proc
|
||||
PathWeightCompare(PathNode/a, PathNode/b)
|
||||
return a.f - b.f
|
||||
//From what I can tell, this automagically sorts the added data into the correct location.
|
||||
while(index > 2 && call(comparison_function)(queue[index / 2], queue[index]) > 0)
|
||||
queue.Swap(index, index / 2)
|
||||
index /= 2
|
||||
|
||||
AStar(start,end,adjacent,dist,maxnodes,maxnodedepth = 30,mintargetdist,minnodedist,id=null, var/turf/exclude=null)
|
||||
proc/Dequeue()
|
||||
if(!queue.len)
|
||||
return 0
|
||||
return Remove(1)
|
||||
|
||||
// world << "A*: [start] [end] [adjacent] [dist] [maxnodes] [maxnodedepth] [mintargetdist], [minnodedist] [id]"
|
||||
var/PriorityQueue/open = new /PriorityQueue(/proc/PathWeightCompare)
|
||||
var/closed[] = new()
|
||||
var/path[]
|
||||
start = get_turf(start)
|
||||
if(!start) return 0
|
||||
proc/Remove(var/index)
|
||||
if(index > queue.len)
|
||||
return 0
|
||||
|
||||
open.Enqueue(new /PathNode(start,null,0,call(start,dist)(end)))
|
||||
var/thing = queue[index]
|
||||
queue.Swap(index, queue.len)
|
||||
queue.Cut(queue.len)
|
||||
if(index < queue.len)
|
||||
FixQueue(index)
|
||||
return thing
|
||||
|
||||
while(!open.IsEmpty() && !path)
|
||||
{
|
||||
var/PathNode/cur = open.Dequeue()
|
||||
closed.Add(cur.source)
|
||||
proc/FixQueue(var/index)
|
||||
var/child = 2 * index
|
||||
var/item = queue[index]
|
||||
|
||||
var/closeenough
|
||||
if(mintargetdist)
|
||||
closeenough = call(cur.source,dist)(end) <= mintargetdist
|
||||
|
||||
if(cur.source == end || closeenough)
|
||||
path = new()
|
||||
path.Add(cur.source)
|
||||
while(cur.prevNode)
|
||||
cur = cur.prevNode
|
||||
path.Add(cur.source)
|
||||
while(child <= queue.len)
|
||||
if(child < queue.len && call(comparison_function)(queue[child], queue[child + 1]) > 0)
|
||||
child++
|
||||
if(call(comparison_function)(item, queue[child]) > 0)
|
||||
queue[index] = queue[child]
|
||||
index = child
|
||||
else
|
||||
break
|
||||
child = 2 * index
|
||||
queue[index] = item
|
||||
|
||||
var/L[] = call(cur.source,adjacent)(id)
|
||||
if(minnodedist && maxnodedepth)
|
||||
if(call(cur.source,minnodedist)(end) + cur.nt >= maxnodedepth)
|
||||
continue
|
||||
else if(maxnodedepth)
|
||||
if(cur.nt >= maxnodedepth)
|
||||
continue
|
||||
proc/List()
|
||||
return queue.Copy()
|
||||
|
||||
for(var/datum/d in L)
|
||||
if(d == exclude)
|
||||
continue
|
||||
var/ng = cur.g + call(cur.source,dist)(d)
|
||||
if(d.bestF)
|
||||
if(ng + call(d,dist)(end) < d.bestF)
|
||||
for(var/i = 1; i <= open.L.len; i++)
|
||||
var/PathNode/n = open.L[i]
|
||||
if(n.source == d)
|
||||
open.Remove(i)
|
||||
break
|
||||
proc/Length()
|
||||
return queue.len
|
||||
|
||||
proc/RemoveItem(data)
|
||||
var/index = queue.Find(data)
|
||||
if(index)
|
||||
return Remove(index)
|
||||
|
||||
PathNode
|
||||
var/datum/position
|
||||
var/PathNode/previous_node
|
||||
|
||||
var/best_estimated_cost
|
||||
var/estimated_cost
|
||||
var/known_cost
|
||||
var/cost
|
||||
var/nodes_traversed
|
||||
|
||||
New(_position, _previous_node, _known_cost, _cost, _nodes_traversed)
|
||||
position = _position
|
||||
previous_node = _previous_node
|
||||
|
||||
known_cost = _known_cost
|
||||
cost = _cost
|
||||
estimated_cost = cost + known_cost
|
||||
|
||||
best_estimated_cost = estimated_cost
|
||||
nodes_traversed = _nodes_traversed
|
||||
|
||||
proc/PathWeightCompare(PathNode/a, PathNode/b)
|
||||
return a.estimated_cost - b.estimated_cost
|
||||
|
||||
proc/AStar(var/start, var/end, var/proc/adjacent, var/proc/dist, var/max_nodes, var/max_node_depth = 30, var/min_target_dist = 0, var/min_node_dist, var/id, var/datum/exclude)
|
||||
var/PriorityQueue/open = new /PriorityQueue(/proc/PathWeightCompare)
|
||||
var/list/closed = list()
|
||||
var/list/path
|
||||
var/list/path_node_by_position = list()
|
||||
start = get_turf(start)
|
||||
if(!start)
|
||||
return 0
|
||||
|
||||
open.Enqueue(new /PathNode(start, null, 0, call(start, dist)(end), 0))
|
||||
|
||||
while(!open.IsEmpty() && !path)
|
||||
var/PathNode/current = open.Dequeue()
|
||||
closed.Add(current.position)
|
||||
|
||||
if(current.position == end || call(current.position, dist)(end) <= min_target_dist)
|
||||
path = new /list(current.nodes_traversed + 1)
|
||||
path[path.len] = current.position
|
||||
var/index = path.len - 1
|
||||
|
||||
while(current.previous_node)
|
||||
current = current.previous_node
|
||||
path[index--] = current.position
|
||||
break
|
||||
|
||||
if(min_node_dist && max_node_depth)
|
||||
if(call(current.position, min_node_dist)(end) + current.nodes_traversed >= max_node_depth)
|
||||
continue
|
||||
|
||||
if(max_node_depth)
|
||||
if(current.nodes_traversed >= max_node_depth)
|
||||
continue
|
||||
|
||||
for(var/datum/datum in call(current.position, adjacent)(id))
|
||||
if(datum == exclude)
|
||||
continue
|
||||
|
||||
var/best_estimated_cost = current.estimated_cost + call(current.position, dist)(datum)
|
||||
|
||||
//handle removal of sub-par positions
|
||||
if(datum in path_node_by_position)
|
||||
var/PathNode/target = path_node_by_position[datum]
|
||||
if(target.best_estimated_cost)
|
||||
if(best_estimated_cost + call(datum, dist)(end) < target.best_estimated_cost)
|
||||
open.RemoveItem(target)
|
||||
else
|
||||
continue
|
||||
|
||||
open.Enqueue(new /PathNode(d,cur,ng,call(d,dist)(end),cur.nt+1))
|
||||
if(maxnodes && open.L.len > maxnodes)
|
||||
open.L.Cut(open.L.len)
|
||||
}
|
||||
var/PathNode/next_node = new (datum, current, best_estimated_cost, call(datum, dist)(end), current.nodes_traversed + 1)
|
||||
path_node_by_position[datum] = next_node
|
||||
open.Enqueue(next_node)
|
||||
|
||||
var/PathNode/temp
|
||||
while(!open.IsEmpty())
|
||||
temp = open.Dequeue()
|
||||
temp.source.bestF = 0
|
||||
while(closed.len)
|
||||
temp = closed[closed.len]
|
||||
temp.bestF = 0
|
||||
closed.Cut(closed.len)
|
||||
if(max_nodes && open.Length() > max_nodes)
|
||||
open.Remove(open.Length())
|
||||
|
||||
if(path)
|
||||
for(var/i = 1; i <= path.len/2; i++)
|
||||
path.Swap(i,path.len-i+1)
|
||||
|
||||
return path
|
||||
return path
|
||||
|
||||
Reference in New Issue
Block a user