mirror of
https://github.com/CHOMPStation2/CHOMPStation2.git
synced 2025-12-11 18:53:06 +00:00
Port current SSgarbage from /tg/
This commit is contained in:
@@ -22,3 +22,21 @@
|
|||||||
#define USING_MAP_DATUM /datum/map
|
#define USING_MAP_DATUM /datum/map
|
||||||
#define MAP_OVERRIDE 1
|
#define MAP_OVERRIDE 1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
///Used to find the sources of harddels, quite laggy, don't be surpised if it freezes your client for a good while
|
||||||
|
//#define REFERENCE_TRACKING
|
||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
|
||||||
|
///Should we be logging our findings or not
|
||||||
|
#define REFERENCE_TRACKING_LOG
|
||||||
|
|
||||||
|
///Used for doing dry runs of the reference finder, to test for feature completeness
|
||||||
|
//#define REFERENCE_TRACKING_DEBUG
|
||||||
|
|
||||||
|
///Run a lookup on things hard deleting by default.
|
||||||
|
//#define GC_FAILURE_HARD_LOOKUP
|
||||||
|
#ifdef GC_FAILURE_HARD_LOOKUP
|
||||||
|
#define FIND_REF_NO_CHECK_TICK
|
||||||
|
#endif //ifdef GC_FAILURE_HARD_LOOKUP
|
||||||
|
|
||||||
|
#endif //ifdef REFERENCE_TRACKING
|
||||||
|
|||||||
@@ -1,22 +1,36 @@
|
|||||||
//defines that give qdel hints. these can be given as a return in destory() or by calling
|
//defines that give qdel hints. these can be given as a return in destory() or by calling
|
||||||
|
|
||||||
#define QDEL_HINT_QUEUE 0 //qdel should queue the object for deletion.
|
/// `qdel` should queue the object for deletion.
|
||||||
#define QDEL_HINT_LETMELIVE 1 //qdel should let the object live after calling destory.
|
#define QDEL_HINT_QUEUE 0
|
||||||
#define QDEL_HINT_IWILLGC 2 //functionally the same as the above. qdel should assume the object will gc on its own, and not check it.
|
/// `qdel` should let the object live after calling [/atom/proc/Destroy].
|
||||||
#define QDEL_HINT_HARDDEL 3 //qdel should assume this object won't gc, and queue a hard delete using a hard reference.
|
#define QDEL_HINT_LETMELIVE 1
|
||||||
#define QDEL_HINT_HARDDEL_NOW 4 //qdel should assume this object won't gc, and hard del it post haste.
|
/// Functionally the same as the above. `qdel` should assume the object will gc on its own, and not check it.
|
||||||
#define QDEL_HINT_FINDREFERENCE 5 //functionally identical to QDEL_HINT_QUEUE if TESTING is not enabled in _compiler_options.dm.
|
#define QDEL_HINT_IWILLGC 2
|
||||||
//if TESTING is enabled, qdel will call this object's find_references() verb.
|
/// Qdel should assume this object won't GC, and queue a hard delete using a hard reference.
|
||||||
//defines for the gc_destroyed var
|
#define QDEL_HINT_HARDDEL 3
|
||||||
|
// Qdel should assume this object won't gc, and hard delete it posthaste.
|
||||||
|
#define QDEL_HINT_HARDDEL_NOW 4
|
||||||
|
|
||||||
#define GC_QUEUE_PREQUEUE 1
|
#ifdef REFERENCE_TRACKING
|
||||||
#define GC_QUEUE_CHECK 2
|
/** If REFERENCE_TRACKING is enabled, qdel will call this object's find_references() verb.
|
||||||
#define GC_QUEUE_HARDDELETE 3
|
*
|
||||||
#define GC_QUEUE_COUNT 3 //increase this when adding more steps.
|
* Functionally identical to [QDEL_HINT_QUEUE] if [GC_FAILURE_HARD_LOOKUP] is not enabled in _compiler_options.dm.
|
||||||
|
*/
|
||||||
|
#define QDEL_HINT_FINDREFERENCE 5
|
||||||
|
/// Behavior as [QDEL_HINT_FINDREFERENCE], but only if the GC fails and a hard delete is forced.
|
||||||
|
#define QDEL_HINT_IFFAIL_FINDREFERENCE 6
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#define GC_QUEUE_CHECK 1
|
||||||
|
#define GC_QUEUE_HARDDELETE 2
|
||||||
|
#define GC_QUEUE_COUNT 2 //increase this when adding more steps.
|
||||||
|
|
||||||
|
#define QDEL_ITEM_ADMINS_WARNED (1<<0) //! Set when admins are told about lag causing qdels in this type.
|
||||||
|
#define QDEL_ITEM_SUSPENDED_FOR_LAG (1<<1) //! Set when a type can no longer be hard deleted on failure because of lag it causes while this happens.
|
||||||
|
|
||||||
|
// Defines for the [gc_destroyed][/datum/var/gc_destroyed] var.
|
||||||
#define GC_QUEUED_FOR_QUEUING -1
|
#define GC_QUEUED_FOR_QUEUING -1
|
||||||
#define GC_QUEUED_FOR_HARD_DEL -2
|
#define GC_CURRENTLY_BEING_QDELETED -2
|
||||||
#define GC_CURRENTLY_BEING_QDELETED -3
|
|
||||||
|
|
||||||
#define QDELING(X) (X.gc_destroyed)
|
#define QDELING(X) (X.gc_destroyed)
|
||||||
#define QDELETED(X) (!X || X.gc_destroyed)
|
#define QDELETED(X) (!X || X.gc_destroyed)
|
||||||
|
|||||||
@@ -55,6 +55,7 @@ var/global/list/runlevel_flags = list(RUNLEVEL_LOBBY, RUNLEVEL_SETUP, RUNLEVEL_G
|
|||||||
#define INIT_ORDER_WEBHOOKS 50
|
#define INIT_ORDER_WEBHOOKS 50
|
||||||
#define INIT_ORDER_DBCORE 41 //CHOMPEdit
|
#define INIT_ORDER_DBCORE 41 //CHOMPEdit
|
||||||
#define INIT_ORDER_SQLITE 40
|
#define INIT_ORDER_SQLITE 40
|
||||||
|
#define INIT_ORDER_GARBAGE 39
|
||||||
#define INIT_ORDER_MEDIA_TRACKS 38 // Gotta get that lobby music up, yo
|
#define INIT_ORDER_MEDIA_TRACKS 38 // Gotta get that lobby music up, yo
|
||||||
#define INIT_ORDER_INPUT 37
|
#define INIT_ORDER_INPUT 37
|
||||||
#define INIT_ORDER_CHEMISTRY 35
|
#define INIT_ORDER_CHEMISTRY 35
|
||||||
|
|||||||
@@ -149,7 +149,6 @@
|
|||||||
return
|
return
|
||||||
qdel(query_insert)
|
qdel(query_insert)
|
||||||
|
|
||||||
|
|
||||||
/proc/log_emote(text, mob/speaker)
|
/proc/log_emote(text, mob/speaker)
|
||||||
if (config.log_emote)
|
if (config.log_emote)
|
||||||
WRITE_LOG(diary, "EMOTE: [speaker.simple_info_line()]: [html_decode(text)]")
|
WRITE_LOG(diary, "EMOTE: [speaker.simple_info_line()]: [html_decode(text)]")
|
||||||
@@ -207,7 +206,6 @@
|
|||||||
//GLOB.round_text_log += "<font size=1><span style=\"color:#7e668c\"><b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>DEADSAY:</u> - [text]</span></font>"
|
//GLOB.round_text_log += "<font size=1><span style=\"color:#7e668c\"><b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>DEADSAY:</u> - [text]</span></font>"
|
||||||
//CHOMPEdit End
|
//CHOMPEdit End
|
||||||
|
|
||||||
|
|
||||||
/proc/log_ghostemote(text, mob/speaker)
|
/proc/log_ghostemote(text, mob/speaker)
|
||||||
if (config.log_emote)
|
if (config.log_emote)
|
||||||
WRITE_LOG(diary, "DEADEMOTE: [speaker.simple_info_line()]: [html_decode(text)]")
|
WRITE_LOG(diary, "DEADEMOTE: [speaker.simple_info_line()]: [html_decode(text)]")
|
||||||
@@ -233,6 +231,7 @@
|
|||||||
/proc/log_pda(text, mob/speaker)
|
/proc/log_pda(text, mob/speaker)
|
||||||
if (config.log_pda)
|
if (config.log_pda)
|
||||||
WRITE_LOG(diary, "PDA: [speaker.simple_info_line()]: [html_decode(text)]")
|
WRITE_LOG(diary, "PDA: [speaker.simple_info_line()]: [html_decode(text)]")
|
||||||
|
<<<<<<< HEAD
|
||||||
//CHOMPEdit Begin
|
//CHOMPEdit Begin
|
||||||
if(speaker.client)
|
if(speaker.client)
|
||||||
if(!SSdbcore.IsConnected())
|
if(!SSdbcore.IsConnected())
|
||||||
@@ -251,6 +250,18 @@
|
|||||||
//GLOB.round_text_log += "<b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>MSG:</u> - <span style=\"color:[COLOR_GREEN]\">[text]</span>"
|
//GLOB.round_text_log += "<b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>MSG:</u> - <span style=\"color:[COLOR_GREEN]\">[text]</span>"
|
||||||
//CHOMPEdit End
|
//CHOMPEdit End
|
||||||
|
|
||||||
|
||||||| parent of f69e6ff65b... Merge pull request #10979 from VOREStation/Arokha/tggarbo
|
||||||
|
|
||||||
|
speaker.dialogue_log += "<b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>MSG:</u> - <span style=\"color:[COLOR_GREEN]\">[text]</span>"
|
||||||
|
GLOB.round_text_log += "<b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>MSG:</u> - <span style=\"color:[COLOR_GREEN]\">[text]</span>"
|
||||||
|
|
||||||
|
|
||||||
|
=======
|
||||||
|
|
||||||
|
speaker.dialogue_log += "<b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>MSG:</u> - <span style=\"color:[COLOR_GREEN]\">[text]</span>"
|
||||||
|
GLOB.round_text_log += "<b>([time_stamp()])</b> (<b>[speaker]/[speaker.client]</b>) <u>MSG:</u> - <span style=\"color:[COLOR_GREEN]\">[text]</span>"
|
||||||
|
|
||||||
|
>>>>>>> f69e6ff65b... Merge pull request #10979 from VOREStation/Arokha/tggarbo
|
||||||
/proc/log_to_dd(text)
|
/proc/log_to_dd(text)
|
||||||
to_world_log(text) //this comes before the config check because it can't possibly runtime
|
to_world_log(text) //this comes before the config check because it can't possibly runtime
|
||||||
if(config.log_world_output)
|
if(config.log_world_output)
|
||||||
@@ -274,6 +285,12 @@
|
|||||||
/proc/log_unit_test(text)
|
/proc/log_unit_test(text)
|
||||||
to_world_log("## UNIT_TEST: [text]")
|
to_world_log("## UNIT_TEST: [text]")
|
||||||
|
|
||||||
|
#ifdef REFERENCE_TRACKING_LOG
|
||||||
|
#define log_reftracker(msg) log_world("## REF SEARCH [msg]")
|
||||||
|
#else
|
||||||
|
#define log_reftracker(msg)
|
||||||
|
#endif
|
||||||
|
|
||||||
/proc/log_tgui(user_or_client, text)
|
/proc/log_tgui(user_or_client, text)
|
||||||
var/entry = ""
|
var/entry = ""
|
||||||
if(!user_or_client)
|
if(!user_or_client)
|
||||||
|
|||||||
@@ -1,3 +1,6 @@
|
|||||||
|
#define MILISECOND * 0.01
|
||||||
|
#define MILLISECONDS * 0.01
|
||||||
|
|
||||||
#define SECOND *10
|
#define SECOND *10
|
||||||
#define SECONDS *10
|
#define SECONDS *10
|
||||||
|
|
||||||
@@ -20,6 +23,10 @@
|
|||||||
#define TICKS2DS(T) ((T) TICKS) // Convert ticks to deciseconds
|
#define TICKS2DS(T) ((T) TICKS) // Convert ticks to deciseconds
|
||||||
#define DS2NEARESTTICK(DS) TICKS2DS(-round(-(DS2TICKS(DS))))
|
#define DS2NEARESTTICK(DS) TICKS2DS(-round(-(DS2TICKS(DS))))
|
||||||
|
|
||||||
|
#define MS2DS(T) ((T) MILLISECONDS)
|
||||||
|
|
||||||
|
#define DS2MS(T) ((T) * 100)
|
||||||
|
|
||||||
var/world_startup_time
|
var/world_startup_time
|
||||||
|
|
||||||
/proc/get_game_time()
|
/proc/get_game_time()
|
||||||
|
|||||||
@@ -1,33 +1,32 @@
|
|||||||
//
|
|
||||||
// Garbage Collector Subsystem - Implements qdel() and the GC queue
|
|
||||||
//
|
|
||||||
SUBSYSTEM_DEF(garbage)
|
SUBSYSTEM_DEF(garbage)
|
||||||
name = "Garbage"
|
name = "Garbage"
|
||||||
priority = FIRE_PRIORITY_GARBAGE
|
priority = FIRE_PRIORITY_GARBAGE
|
||||||
wait = 2 SECONDS
|
wait = 2 SECONDS
|
||||||
flags = SS_POST_FIRE_TIMING|SS_BACKGROUND|SS_NO_INIT
|
flags = SS_POST_FIRE_TIMING|SS_BACKGROUND|SS_NO_INIT
|
||||||
runlevels = RUNLEVELS_DEFAULT | RUNLEVEL_LOBBY
|
runlevels = RUNLEVELS_DEFAULT | RUNLEVEL_LOBBY
|
||||||
|
init_order = INIT_ORDER_GARBAGE
|
||||||
|
|
||||||
var/list/collection_timeout = list(0, 2 MINUTES, 10 SECONDS) // deciseconds to wait before moving something up in the queue to the next level
|
var/list/collection_timeout = list(2 MINUTES, 10 SECONDS) // deciseconds to wait before moving something up in the queue to the next level
|
||||||
|
|
||||||
|
//Stat tracking
|
||||||
var/delslasttick = 0 // number of del()'s we've done this tick
|
var/delslasttick = 0 // number of del()'s we've done this tick
|
||||||
var/gcedlasttick = 0 // number of things that gc'ed last tick
|
var/gcedlasttick = 0 // number of things that gc'ed last tick
|
||||||
var/totaldels = 0
|
var/totaldels = 0
|
||||||
var/totalgcs = 0
|
var/totalgcs = 0
|
||||||
|
|
||||||
var/highest_del_time = 0
|
var/highest_del_ms = 0
|
||||||
var/highest_del_tickusage = 0
|
var/highest_del_type_string = ""
|
||||||
|
|
||||||
var/list/pass_counts
|
var/list/pass_counts
|
||||||
var/list/fail_counts
|
var/list/fail_counts
|
||||||
|
|
||||||
var/list/items = list() // Holds our qdel_item statistics datums
|
var/list/items = list() // Holds our qdel_item statistics datums
|
||||||
|
|
||||||
// List of Queues
|
//Queue
|
||||||
// Each queue is a list of refID's of things that should be garbage collected
|
|
||||||
// refID's are associated with the time at which they time out and need to be manually del()
|
|
||||||
// we do this so we aren't constantly locating them and preventing them from being gc'd
|
|
||||||
var/list/queues
|
var/list/queues
|
||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
var/list/reference_find_on_fail = list()
|
||||||
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/datum/controller/subsystem/garbage/PreInit()
|
/datum/controller/subsystem/garbage/PreInit()
|
||||||
@@ -57,7 +56,7 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
msg += "TGR:[round((totalgcs/(totaldels+totalgcs))*100, 0.01)]%"
|
msg += "TGR:[round((totalgcs/(totaldels+totalgcs))*100, 0.01)]%"
|
||||||
msg += " P:[pass_counts.Join(",")]"
|
msg += " P:[pass_counts.Join(",")]"
|
||||||
msg += "|F:[fail_counts.Join(",")]"
|
msg += "|F:[fail_counts.Join(",")]"
|
||||||
..(msg)
|
return ..()
|
||||||
|
|
||||||
/datum/controller/subsystem/garbage/Shutdown()
|
/datum/controller/subsystem/garbage/Shutdown()
|
||||||
//Adds the del() log to the qdel log file
|
//Adds the del() log to the qdel log file
|
||||||
@@ -68,13 +67,18 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
for(var/path in items)
|
for(var/path in items)
|
||||||
var/datum/qdel_item/I = items[path]
|
var/datum/qdel_item/I = items[path]
|
||||||
dellog += "Path: [path]"
|
dellog += "Path: [path]"
|
||||||
|
if (I.qdel_flags & QDEL_ITEM_SUSPENDED_FOR_LAG)
|
||||||
|
dellog += "\tSUSPENDED FOR LAG"
|
||||||
if (I.failures)
|
if (I.failures)
|
||||||
dellog += "\tFailures: [I.failures]"
|
dellog += "\tFailures: [I.failures]"
|
||||||
dellog += "\tqdel() Count: [I.qdels]"
|
dellog += "\tqdel() Count: [I.qdels]"
|
||||||
dellog += "\tDestroy() Cost: [I.destroy_time]ms"
|
dellog += "\tDestroy() Cost: [I.destroy_time]ms"
|
||||||
if (I.hard_deletes)
|
if (I.hard_deletes)
|
||||||
dellog += "\tTotal Hard Deletes [I.hard_deletes]"
|
dellog += "\tTotal Hard Deletes: [I.hard_deletes]"
|
||||||
dellog += "\tTime Spent Hard Deleting: [I.hard_delete_time]ms"
|
dellog += "\tTime Spent Hard Deleting: [I.hard_delete_time]ms"
|
||||||
|
dellog += "\tHighest Time Spent Hard Deleting: [I.hard_delete_max]ms"
|
||||||
|
if (I.hard_deletes_over_threshold)
|
||||||
|
dellog += "\tHard Deletes Over Threshold: [I.hard_deletes_over_threshold]"
|
||||||
if (I.slept_destroy)
|
if (I.slept_destroy)
|
||||||
dellog += "\tSleeps: [I.slept_destroy]"
|
dellog += "\tSleeps: [I.slept_destroy]"
|
||||||
if (I.no_respect_force)
|
if (I.no_respect_force)
|
||||||
@@ -85,41 +89,18 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
|
|
||||||
/datum/controller/subsystem/garbage/fire()
|
/datum/controller/subsystem/garbage/fire()
|
||||||
//the fact that this resets its processing each fire (rather then resume where it left off) is intentional.
|
//the fact that this resets its processing each fire (rather then resume where it left off) is intentional.
|
||||||
var/queue = GC_QUEUE_PREQUEUE
|
var/queue = GC_QUEUE_CHECK
|
||||||
|
|
||||||
while (state == SS_RUNNING)
|
while (state == SS_RUNNING)
|
||||||
switch (queue)
|
switch (queue)
|
||||||
if (GC_QUEUE_PREQUEUE)
|
|
||||||
HandlePreQueue()
|
|
||||||
queue = GC_QUEUE_PREQUEUE+1
|
|
||||||
if (GC_QUEUE_CHECK)
|
if (GC_QUEUE_CHECK)
|
||||||
HandleQueue(GC_QUEUE_CHECK)
|
HandleQueue(GC_QUEUE_CHECK)
|
||||||
queue = GC_QUEUE_CHECK+1
|
queue = GC_QUEUE_CHECK+1
|
||||||
if (GC_QUEUE_HARDDELETE)
|
if (GC_QUEUE_HARDDELETE)
|
||||||
HandleQueue(GC_QUEUE_HARDDELETE)
|
HandleQueue(GC_QUEUE_HARDDELETE)
|
||||||
break
|
|
||||||
|
|
||||||
if (state == SS_PAUSED) //make us wait again before the next run.
|
if (state == SS_PAUSED) //make us wait again before the next run.
|
||||||
state = SS_RUNNING
|
state = SS_RUNNING
|
||||||
|
|
||||||
//If you see this proc high on the profile, what you are really seeing is the garbage collection/soft delete overhead in byond.
|
|
||||||
//Don't attempt to optimize, not worth the effort.
|
|
||||||
/datum/controller/subsystem/garbage/proc/HandlePreQueue()
|
|
||||||
var/list/tobequeued = queues[GC_QUEUE_PREQUEUE]
|
|
||||||
var/static/count = 0
|
|
||||||
if (count)
|
|
||||||
var/c = count
|
|
||||||
count = 0 //so if we runtime on the Cut, we don't try again.
|
|
||||||
tobequeued.Cut(1,c+1)
|
|
||||||
|
|
||||||
for (var/ref in tobequeued)
|
|
||||||
count++
|
|
||||||
Queue(ref, GC_QUEUE_PREQUEUE+1)
|
|
||||||
if (MC_TICK_CHECK)
|
|
||||||
break
|
break
|
||||||
if (count)
|
|
||||||
tobequeued.Cut(1,count+1)
|
|
||||||
count = 0
|
|
||||||
|
|
||||||
/datum/controller/subsystem/garbage/proc/HandleQueue(level = GC_QUEUE_CHECK)
|
/datum/controller/subsystem/garbage/proc/HandleQueue(level = GC_QUEUE_CHECK)
|
||||||
if (level == GC_QUEUE_CHECK)
|
if (level == GC_QUEUE_CHECK)
|
||||||
@@ -137,18 +118,21 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
|
|
||||||
lastlevel = level
|
lastlevel = level
|
||||||
|
|
||||||
for (var/refID in queue)
|
//We do this rather then for(var/refID in queue) because that sort of for loop copies the whole list.
|
||||||
if (!refID)
|
//Normally this isn't expensive, but the gc queue can grow to 40k items, and that gets costly/causes overrun.
|
||||||
|
for (var/i in 1 to length(queue))
|
||||||
|
var/list/L = queue[i]
|
||||||
|
if (length(L) < 2)
|
||||||
count++
|
count++
|
||||||
if (MC_TICK_CHECK)
|
if (MC_TICK_CHECK)
|
||||||
break
|
return
|
||||||
continue
|
continue
|
||||||
|
|
||||||
var/GCd_at_time = queue[refID]
|
var/GCd_at_time = L[1]
|
||||||
if(GCd_at_time > cut_off_time)
|
if(GCd_at_time > cut_off_time)
|
||||||
break // Everything else is newer, skip them
|
break // Everything else is newer, skip them
|
||||||
count++
|
count++
|
||||||
|
var/refID = L[2]
|
||||||
var/datum/D
|
var/datum/D
|
||||||
D = locate(refID)
|
D = locate(refID)
|
||||||
|
|
||||||
@@ -156,50 +140,74 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
++gcedlasttick
|
++gcedlasttick
|
||||||
++totalgcs
|
++totalgcs
|
||||||
pass_counts[level]++
|
pass_counts[level]++
|
||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
reference_find_on_fail -= refID //It's deleted we don't care anymore.
|
||||||
|
#endif
|
||||||
if (MC_TICK_CHECK)
|
if (MC_TICK_CHECK)
|
||||||
break
|
return
|
||||||
continue
|
continue
|
||||||
|
|
||||||
// Something's still referring to the qdel'd object.
|
// Something's still referring to the qdel'd object.
|
||||||
fail_counts[level]++
|
fail_counts[level]++
|
||||||
|
|
||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
var/ref_searching = FALSE
|
||||||
|
#endif
|
||||||
|
|
||||||
switch (level)
|
switch (level)
|
||||||
if (GC_QUEUE_CHECK)
|
if (GC_QUEUE_CHECK)
|
||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
if(reference_find_on_fail[refID])
|
||||||
|
INVOKE_ASYNC(D, /datum/proc/find_references)
|
||||||
|
ref_searching = TRUE
|
||||||
#ifdef GC_FAILURE_HARD_LOOKUP
|
#ifdef GC_FAILURE_HARD_LOOKUP
|
||||||
D.find_references()
|
else
|
||||||
|
INVOKE_ASYNC(D, /datum/proc/find_references)
|
||||||
|
ref_searching = TRUE
|
||||||
|
#endif
|
||||||
|
reference_find_on_fail -= refID
|
||||||
#endif
|
#endif
|
||||||
var/type = D.type
|
var/type = D.type
|
||||||
var/datum/qdel_item/I = items[type]
|
var/datum/qdel_item/I = items[type]
|
||||||
var/extrainfo = "--"
|
|
||||||
if(istype(D,/image))
|
log_world("## TESTING: GC: -- \ref[D] | [type] was unable to be GC'd --")
|
||||||
var/image/img = D
|
#ifdef TESTING
|
||||||
var/icon/ico = img.icon
|
for(var/c in GLOB.admins) //Using testing() here would fill the logs with ADMIN_VV garbage
|
||||||
extrainfo = "L:[img.loc] -- I:[ico] -- IS:[img.icon_state] --"
|
var/client/admin = c
|
||||||
testing("GC: -- \ref[D] | [type] was unable to be GC'd [extrainfo]")
|
if(!check_rights_for(admin, R_ADMIN))
|
||||||
|
continue
|
||||||
|
to_chat(admin, "## TESTING: GC: -- [ADMIN_VV(D)] | [type] was unable to be GC'd --")
|
||||||
|
#endif
|
||||||
I.failures++
|
I.failures++
|
||||||
|
|
||||||
|
if (I.qdel_flags & QDEL_ITEM_SUSPENDED_FOR_LAG)
|
||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
if(ref_searching)
|
||||||
|
return //ref searching intentionally cancels all further fires while running so things that hold references don't end up getting deleted, so we want to return here instead of continue
|
||||||
|
#endif
|
||||||
|
continue
|
||||||
if (GC_QUEUE_HARDDELETE)
|
if (GC_QUEUE_HARDDELETE)
|
||||||
HardDelete(D)
|
HardDelete(D)
|
||||||
if (MC_TICK_CHECK)
|
if (MC_TICK_CHECK)
|
||||||
break
|
return
|
||||||
continue
|
continue
|
||||||
|
|
||||||
Queue(D, level+1)
|
Queue(D, level+1)
|
||||||
|
|
||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
if(ref_searching)
|
||||||
|
return
|
||||||
|
#endif
|
||||||
|
|
||||||
if (MC_TICK_CHECK)
|
if (MC_TICK_CHECK)
|
||||||
break
|
return
|
||||||
if (count)
|
if (count)
|
||||||
queue.Cut(1,count+1)
|
queue.Cut(1,count+1)
|
||||||
count = 0
|
count = 0
|
||||||
|
|
||||||
/datum/controller/subsystem/garbage/proc/PreQueue(datum/D)
|
|
||||||
if (D.gc_destroyed == GC_CURRENTLY_BEING_QDELETED)
|
|
||||||
queues[GC_QUEUE_PREQUEUE] += D
|
|
||||||
D.gc_destroyed = GC_QUEUED_FOR_QUEUING
|
|
||||||
|
|
||||||
/datum/controller/subsystem/garbage/proc/Queue(datum/D, level = GC_QUEUE_CHECK)
|
/datum/controller/subsystem/garbage/proc/Queue(datum/D, level = GC_QUEUE_CHECK)
|
||||||
if (isnull(D))
|
if (isnull(D))
|
||||||
return
|
return
|
||||||
if (D.gc_destroyed == GC_QUEUED_FOR_HARD_DEL)
|
|
||||||
level = GC_QUEUE_HARDDELETE
|
|
||||||
if (level > GC_QUEUE_COUNT)
|
if (level > GC_QUEUE_COUNT)
|
||||||
HardDelete(D)
|
HardDelete(D)
|
||||||
return
|
return
|
||||||
@@ -208,89 +216,88 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
|
|
||||||
D.gc_destroyed = gctime
|
D.gc_destroyed = gctime
|
||||||
var/list/queue = queues[level]
|
var/list/queue = queues[level]
|
||||||
if (queue[refid])
|
|
||||||
queue -= refid // Removing any previous references that were GC'd so that the current object will be at the end of the list.
|
|
||||||
|
|
||||||
queue[refid] = gctime
|
queue[++queue.len] = list(gctime, refid) // not += for byond reasons
|
||||||
|
|
||||||
//this is mainly to separate things profile wise.
|
//this is mainly to separate things profile wise.
|
||||||
/datum/controller/subsystem/garbage/proc/HardDelete(datum/D)
|
/datum/controller/subsystem/garbage/proc/HardDelete(datum/D)
|
||||||
var/time = world.timeofday
|
|
||||||
var/tick = TICK_USAGE
|
|
||||||
var/ticktime = world.time
|
|
||||||
++delslasttick
|
++delslasttick
|
||||||
++totaldels
|
++totaldels
|
||||||
var/type = D.type
|
var/type = D.type
|
||||||
var/refID = "\ref[D]"
|
var/refID = "\ref[D]"
|
||||||
|
|
||||||
|
var/tick_usage = TICK_USAGE
|
||||||
del(D)
|
del(D)
|
||||||
|
tick_usage = TICK_USAGE_TO_MS(tick_usage)
|
||||||
tick = (TICK_USAGE-tick+((world.time-ticktime)/world.tick_lag*100))
|
|
||||||
|
|
||||||
var/datum/qdel_item/I = items[type]
|
var/datum/qdel_item/I = items[type]
|
||||||
|
|
||||||
I.hard_deletes++
|
I.hard_deletes++
|
||||||
I.hard_delete_time += TICK_DELTA_TO_MS(tick)
|
I.hard_delete_time += tick_usage
|
||||||
|
if (tick_usage > I.hard_delete_max)
|
||||||
|
I.hard_delete_max = tick_usage
|
||||||
|
if (tick_usage > highest_del_ms)
|
||||||
|
highest_del_ms = tick_usage
|
||||||
|
highest_del_type_string = "[type]"
|
||||||
|
|
||||||
|
var/time = MS2DS(tick_usage)
|
||||||
|
|
||||||
if (tick > highest_del_tickusage)
|
if (time > 0.1 SECONDS)
|
||||||
highest_del_tickusage = tick
|
|
||||||
time = world.timeofday - time
|
|
||||||
if (!time && TICK_DELTA_TO_MS(tick) > 1)
|
|
||||||
time = TICK_DELTA_TO_MS(tick)/100
|
|
||||||
if (time > highest_del_time)
|
|
||||||
highest_del_time = time
|
|
||||||
if (time > 20) //VOREStation Edit
|
|
||||||
log_game("Error: [type]([refID]) took longer than 2 seconds to delete (took [time/10] seconds to delete)") //VOREStation Edit
|
|
||||||
message_admins("Error: [type]([refID]) took longer than 2 seconds to delete (took [time/10] seconds to delete).") //VOREStation Edit
|
|
||||||
postpone(time)
|
postpone(time)
|
||||||
|
var/threshold = 0.5 // Default, make a config
|
||||||
/datum/controller/subsystem/garbage/proc/HardQueue(datum/D)
|
if (threshold && (time > threshold SECONDS))
|
||||||
if (D.gc_destroyed == GC_CURRENTLY_BEING_QDELETED)
|
if (!(I.qdel_flags & QDEL_ITEM_ADMINS_WARNED))
|
||||||
queues[GC_QUEUE_PREQUEUE] += D
|
log_and_message_admins("Error: [type]([refID]) took longer than [threshold] seconds to delete (took [round(time/10, 0.1)] seconds to delete)")
|
||||||
D.gc_destroyed = GC_QUEUED_FOR_HARD_DEL
|
I.qdel_flags |= QDEL_ITEM_ADMINS_WARNED
|
||||||
|
I.hard_deletes_over_threshold++
|
||||||
|
var/overrun_limit = 0 // Default, make a config
|
||||||
|
if (overrun_limit && I.hard_deletes_over_threshold >= overrun_limit)
|
||||||
|
I.qdel_flags |= QDEL_ITEM_SUSPENDED_FOR_LAG
|
||||||
|
|
||||||
/datum/controller/subsystem/garbage/Recover()
|
/datum/controller/subsystem/garbage/Recover()
|
||||||
if (istype(SSgarbage.queues))
|
if (istype(SSgarbage.queues))
|
||||||
for (var/i in 1 to SSgarbage.queues.len)
|
for (var/i in 1 to SSgarbage.queues.len)
|
||||||
queues[i] |= SSgarbage.queues[i]
|
queues[i] |= SSgarbage.queues[i]
|
||||||
|
|
||||||
|
/// Qdel Item: Holds statistics on each type that passes thru qdel
|
||||||
/datum/qdel_item
|
/datum/qdel_item
|
||||||
var/name = ""
|
var/name = "" //!Holds the type as a string for this type
|
||||||
var/qdels = 0 //Total number of times it's passed thru qdel.
|
var/qdels = 0 //!Total number of times it's passed thru qdel.
|
||||||
var/destroy_time = 0 //Total amount of milliseconds spent processing this type's Destroy()
|
var/destroy_time = 0 //!Total amount of milliseconds spent processing this type's Destroy()
|
||||||
var/failures = 0 //Times it was queued for soft deletion but failed to soft delete.
|
var/failures = 0 //!Times it was queued for soft deletion but failed to soft delete.
|
||||||
var/hard_deletes = 0 //Different from failures because it also includes QDEL_HINT_HARDDEL deletions
|
var/hard_deletes = 0 //!Different from failures because it also includes QDEL_HINT_HARDDEL deletions
|
||||||
var/hard_delete_time = 0//Total amount of milliseconds spent hard deleting this type.
|
var/hard_delete_time = 0 //!Total amount of milliseconds spent hard deleting this type.
|
||||||
var/no_respect_force = 0//Number of times it's not respected force=TRUE
|
var/hard_delete_max = 0 //!Highest time spent hard_deleting this in ms.
|
||||||
var/no_hint = 0 //Number of times it's not even bother to give a qdel hint
|
var/hard_deletes_over_threshold = 0 //!Number of times hard deletes took longer than the configured threshold
|
||||||
var/slept_destroy = 0 //Number of times it's slept in its destroy
|
var/no_respect_force = 0 //!Number of times it's not respected force=TRUE
|
||||||
|
var/no_hint = 0 //!Number of times it's not even bother to give a qdel hint
|
||||||
|
var/slept_destroy = 0 //!Number of times it's slept in its destroy
|
||||||
|
var/qdel_flags = 0 //!Flags related to this type's trip thru qdel.
|
||||||
|
|
||||||
/datum/qdel_item/New(mytype)
|
/datum/qdel_item/New(mytype)
|
||||||
name = "[mytype]"
|
name = "[mytype]"
|
||||||
|
|
||||||
|
|
||||||
// Should be treated as a replacement for the 'del' keyword.
|
/// Should be treated as a replacement for the 'del' keyword.
|
||||||
// Datums passed to this will be given a chance to clean up references to allow the GC to collect them.
|
///
|
||||||
/proc/qdel(datum/D, force=FALSE)
|
/// Datums passed to this will be given a chance to clean up references to allow the GC to collect them.
|
||||||
|
/proc/qdel(datum/D, force=FALSE, ...)
|
||||||
if(!istype(D))
|
if(!istype(D))
|
||||||
del(D)
|
del(D)
|
||||||
return
|
return
|
||||||
|
|
||||||
var/datum/qdel_item/I = SSgarbage.items[D.type]
|
var/datum/qdel_item/I = SSgarbage.items[D.type]
|
||||||
if (!I)
|
if (!I)
|
||||||
I = SSgarbage.items[D.type] = new /datum/qdel_item(D.type)
|
I = SSgarbage.items[D.type] = new /datum/qdel_item(D.type)
|
||||||
I.qdels++
|
I.qdels++
|
||||||
|
|
||||||
|
|
||||||
if(isnull(D.gc_destroyed))
|
if(isnull(D.gc_destroyed))
|
||||||
if(SEND_SIGNAL(D, COMSIG_PARENT_PREQDELETED, force)) // Give the components a chance to prevent their parent from being deleted
|
if (SEND_SIGNAL(D, COMSIG_PARENT_PREQDELETED, force)) // Give the components a chance to prevent their parent from being deleted
|
||||||
return
|
return
|
||||||
D.gc_destroyed = GC_CURRENTLY_BEING_QDELETED
|
D.gc_destroyed = GC_CURRENTLY_BEING_QDELETED
|
||||||
var/start_time = world.time
|
var/start_time = world.time
|
||||||
var/start_tick = world.tick_usage
|
var/start_tick = world.tick_usage
|
||||||
SEND_SIGNAL(D, COMSIG_PARENT_QDELETING, force) // Let the (remaining) components know about the result of Destroy
|
SEND_SIGNAL(D, COMSIG_PARENT_QDELETING, force) // Let the (remaining) components know about the result of Destroy
|
||||||
var/hint = D.Destroy(force) // Let our friend know they're about to get fucked up.
|
var/hint = D.Destroy(arglist(args.Copy(2))) // Let our friend know they're about to get fucked up.
|
||||||
if(world.time != start_time)
|
if(world.time != start_time)
|
||||||
I.slept_destroy++
|
I.slept_destroy++
|
||||||
else
|
else
|
||||||
@@ -299,7 +306,7 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
return
|
return
|
||||||
switch(hint)
|
switch(hint)
|
||||||
if (QDEL_HINT_QUEUE) //qdel should queue the object for deletion.
|
if (QDEL_HINT_QUEUE) //qdel should queue the object for deletion.
|
||||||
SSgarbage.PreQueue(D)
|
SSgarbage.Queue(D)
|
||||||
if (QDEL_HINT_IWILLGC)
|
if (QDEL_HINT_IWILLGC)
|
||||||
D.gc_destroyed = world.time
|
D.gc_destroyed = world.time
|
||||||
return
|
return
|
||||||
@@ -311,7 +318,7 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
// indicates the objects Destroy() does not respect force
|
// indicates the objects Destroy() does not respect force
|
||||||
#ifdef TESTING
|
#ifdef TESTING
|
||||||
if(!I.no_respect_force)
|
if(!I.no_respect_force)
|
||||||
crash_with("[D.type] has been force deleted, but is \
|
testing("WARNING: [D.type] has been force deleted, but is \
|
||||||
returning an immortal QDEL_HINT, indicating it does \
|
returning an immortal QDEL_HINT, indicating it does \
|
||||||
not respect the force flag for qdel(). It has been \
|
not respect the force flag for qdel(). It has been \
|
||||||
placed in the queue, further instances of this type \
|
placed in the queue, further instances of this type \
|
||||||
@@ -319,136 +326,25 @@ SUBSYSTEM_DEF(garbage)
|
|||||||
#endif
|
#endif
|
||||||
I.no_respect_force++
|
I.no_respect_force++
|
||||||
|
|
||||||
SSgarbage.PreQueue(D)
|
SSgarbage.Queue(D)
|
||||||
if (QDEL_HINT_HARDDEL) //qdel should assume this object won't gc, and queue a hard delete using a hard reference to save time from the locate()
|
if (QDEL_HINT_HARDDEL) //qdel should assume this object won't gc, and queue a hard delete
|
||||||
SSgarbage.HardQueue(D)
|
SSgarbage.Queue(D, GC_QUEUE_HARDDELETE)
|
||||||
if (QDEL_HINT_HARDDEL_NOW) //qdel should assume this object won't gc, and hard del it post haste.
|
if (QDEL_HINT_HARDDEL_NOW) //qdel should assume this object won't gc, and hard del it post haste.
|
||||||
SSgarbage.HardDelete(D)
|
SSgarbage.HardDelete(D)
|
||||||
if (QDEL_HINT_FINDREFERENCE)//qdel will, if TESTING is enabled, display all references to this object, then queue the object for deletion.
|
#ifdef REFERENCE_TRACKING
|
||||||
SSgarbage.PreQueue(D)
|
if (QDEL_HINT_FINDREFERENCE) //qdel will, if REFERENCE_TRACKING is enabled, display all references to this object, then queue the object for deletion.
|
||||||
#ifdef TESTING
|
SSgarbage.Queue(D)
|
||||||
D.find_references()
|
D.find_references()
|
||||||
|
if (QDEL_HINT_IFFAIL_FINDREFERENCE) //qdel will, if REFERENCE_TRACKING is enabled and the object fails to collect, display all references to this object.
|
||||||
|
SSgarbage.Queue(D)
|
||||||
|
SSgarbage.reference_find_on_fail["\ref[D]"] = TRUE
|
||||||
#endif
|
#endif
|
||||||
else
|
else
|
||||||
#ifdef TESTING
|
#ifdef TESTING
|
||||||
if(!I.no_hint)
|
if(!I.no_hint)
|
||||||
crash_with("[D.type] is not returning a qdel hint. It is being placed in the queue. Further instances of this type will also be queued.")
|
testing("WARNING: [D.type] is not returning a qdel hint. It is being placed in the queue. Further instances of this type will also be queued.")
|
||||||
#endif
|
#endif
|
||||||
I.no_hint++
|
I.no_hint++
|
||||||
SSgarbage.PreQueue(D)
|
SSgarbage.Queue(D)
|
||||||
else if(D.gc_destroyed == GC_CURRENTLY_BEING_QDELETED)
|
else if(D.gc_destroyed == GC_CURRENTLY_BEING_QDELETED)
|
||||||
CRASH("[D.type] destroy proc was called multiple times, likely due to a qdel loop in the Destroy logic")
|
CRASH("[D.type] destroy proc was called multiple times, likely due to a qdel loop in the Destroy logic")
|
||||||
|
|
||||||
#ifdef TESTING
|
|
||||||
|
|
||||||
/datum/verb/find_refs()
|
|
||||||
set category = "Debug"
|
|
||||||
set name = "Find References"
|
|
||||||
set background = 1
|
|
||||||
set src in world
|
|
||||||
|
|
||||||
find_references(FALSE)
|
|
||||||
|
|
||||||
/datum/proc/find_references(skip_alert)
|
|
||||||
running_find_references = type
|
|
||||||
if(usr && usr.client)
|
|
||||||
if(usr.client.running_find_references)
|
|
||||||
testing("CANCELLED search for references to a [usr.client.running_find_references].")
|
|
||||||
usr.client.running_find_references = null
|
|
||||||
running_find_references = null
|
|
||||||
//restart the garbage collector
|
|
||||||
SSgarbage.can_fire = 1
|
|
||||||
SSgarbage.next_fire = world.time + world.tick_lag
|
|
||||||
return
|
|
||||||
|
|
||||||
if(!skip_alert)
|
|
||||||
if(tgui_alert(usr, "Running this will lock everything up for about 5 minutes. Would you like to begin the search?", "Find References", list("Yes", "No")) == "No")
|
|
||||||
running_find_references = null
|
|
||||||
return
|
|
||||||
|
|
||||||
//this keeps the garbage collector from failing to collect objects being searched for in here
|
|
||||||
SSgarbage.can_fire = 0
|
|
||||||
|
|
||||||
if(usr && usr.client)
|
|
||||||
usr.client.running_find_references = type
|
|
||||||
|
|
||||||
testing("Beginning search for references to a [type].")
|
|
||||||
last_find_references = world.time
|
|
||||||
|
|
||||||
// DoSearchVar(GLOB) // If we ever implement GLOB this would be the place.
|
|
||||||
for(var/datum/thing in world) //atoms (don't beleive it's lies)
|
|
||||||
DoSearchVar(thing, "World -> [thing]")
|
|
||||||
|
|
||||||
for (var/datum/thing) //datums
|
|
||||||
DoSearchVar(thing, "World -> [thing]")
|
|
||||||
|
|
||||||
for (var/client/thing) //clients
|
|
||||||
DoSearchVar(thing, "World -> [thing]")
|
|
||||||
|
|
||||||
testing("Completed search for references to a [type].")
|
|
||||||
if(usr && usr.client)
|
|
||||||
usr.client.running_find_references = null
|
|
||||||
running_find_references = null
|
|
||||||
|
|
||||||
//restart the garbage collector
|
|
||||||
SSgarbage.can_fire = 1
|
|
||||||
SSgarbage.next_fire = world.time + world.tick_lag
|
|
||||||
|
|
||||||
/datum/verb/qdel_then_find_references()
|
|
||||||
set category = "Debug"
|
|
||||||
set name = "qdel() then Find References"
|
|
||||||
set background = 1
|
|
||||||
set src in world
|
|
||||||
|
|
||||||
qdel(src)
|
|
||||||
if(!running_find_references)
|
|
||||||
find_references(TRUE)
|
|
||||||
|
|
||||||
/datum/proc/DoSearchVar(X, Xname, recursive_limit = 64)
|
|
||||||
if(usr && usr.client && !usr.client.running_find_references)
|
|
||||||
return
|
|
||||||
if (!recursive_limit)
|
|
||||||
return
|
|
||||||
|
|
||||||
if(istype(X, /datum))
|
|
||||||
var/datum/D = X
|
|
||||||
if(D.last_find_references == last_find_references)
|
|
||||||
return
|
|
||||||
|
|
||||||
D.last_find_references = last_find_references
|
|
||||||
var/list/L = D.vars
|
|
||||||
|
|
||||||
for(var/varname in L)
|
|
||||||
if (varname == "vars")
|
|
||||||
continue
|
|
||||||
var/variable = L[varname]
|
|
||||||
|
|
||||||
if(variable == src)
|
|
||||||
testing("Found [src.type] \ref[src] in [D.type]'s [varname] var. [Xname]")
|
|
||||||
|
|
||||||
else if(islist(variable))
|
|
||||||
DoSearchVar(variable, "[Xname] -> list", recursive_limit-1)
|
|
||||||
|
|
||||||
else if(islist(X))
|
|
||||||
var/normal = IS_NORMAL_LIST(X)
|
|
||||||
for(var/I in X)
|
|
||||||
if (I == src)
|
|
||||||
testing("Found [src.type] \ref[src] in list [Xname].")
|
|
||||||
|
|
||||||
else if (I && !isnum(I) && normal && X[I] == src)
|
|
||||||
testing("Found [src.type] \ref[src] in list [Xname]\[[I]\]")
|
|
||||||
|
|
||||||
else if (islist(I))
|
|
||||||
DoSearchVar(I, "[Xname] -> list", recursive_limit-1)
|
|
||||||
|
|
||||||
#ifndef FIND_REF_NO_CHECK_TICK
|
|
||||||
CHECK_TICK
|
|
||||||
#endif
|
|
||||||
|
|
||||||
#endif
|
|
||||||
|
|
||||||
|
|
||||||
/image/Destroy()
|
|
||||||
..()
|
|
||||||
loc = null
|
|
||||||
return QDEL_HINT_QUEUE
|
|
||||||
|
|||||||
@@ -10,9 +10,9 @@ SUBSYSTEM_DEF(overlays)
|
|||||||
var/list/overlay_icon_state_caches // Cache thing
|
var/list/overlay_icon_state_caches // Cache thing
|
||||||
var/list/overlay_icon_cache // Cache thing
|
var/list/overlay_icon_cache // Cache thing
|
||||||
|
|
||||||
var/global/image/stringbro = new() // Temporarily super-global because of BYOND init order dumbness.
|
var/static/image/stringbro
|
||||||
var/global/image/iconbro = new() // Temporarily super-global because of BYOND init order dumbness.
|
var/static/image/iconbro
|
||||||
var/global/image/appearance_bro = new() // Temporarily super-global because of BYOND init order dumbness.
|
var/static/image/appearance_bro
|
||||||
|
|
||||||
/datum/controller/subsystem/overlays/PreInit()
|
/datum/controller/subsystem/overlays/PreInit()
|
||||||
overlay_icon_state_caches = list()
|
overlay_icon_state_caches = list()
|
||||||
@@ -20,6 +20,10 @@ var/global/image/appearance_bro = new() // Temporarily super-global because of B
|
|||||||
queue = list()
|
queue = list()
|
||||||
stats = list()
|
stats = list()
|
||||||
|
|
||||||
|
stringbro = new()
|
||||||
|
iconbro = new()
|
||||||
|
appearance_bro = new()
|
||||||
|
|
||||||
/datum/controller/subsystem/overlays/Initialize()
|
/datum/controller/subsystem/overlays/Initialize()
|
||||||
fire(mc_check = FALSE)
|
fire(mc_check = FALSE)
|
||||||
..()
|
..()
|
||||||
@@ -71,12 +75,12 @@ var/global/image/appearance_bro = new() // Temporarily super-global because of B
|
|||||||
var/cached_appearance = cached_icon["[iconstate]"]
|
var/cached_appearance = cached_icon["[iconstate]"]
|
||||||
if (cached_appearance)
|
if (cached_appearance)
|
||||||
return cached_appearance
|
return cached_appearance
|
||||||
stringbro.icon = icon
|
SSoverlays.stringbro.icon = icon
|
||||||
stringbro.icon_state = iconstate
|
SSoverlays.stringbro.icon_state = iconstate
|
||||||
if (!cached_icon) //not using the macro to save an associated lookup
|
if (!cached_icon) //not using the macro to save an associated lookup
|
||||||
cached_icon = list()
|
cached_icon = list()
|
||||||
icon_states_cache[icon] = cached_icon
|
icon_states_cache[icon] = cached_icon
|
||||||
var/cached_appearance = stringbro.appearance
|
var/cached_appearance = SSoverlays.stringbro.appearance
|
||||||
cached_icon["[iconstate]"] = cached_appearance
|
cached_icon["[iconstate]"] = cached_appearance
|
||||||
return cached_appearance
|
return cached_appearance
|
||||||
|
|
||||||
@@ -85,8 +89,8 @@ var/global/image/appearance_bro = new() // Temporarily super-global because of B
|
|||||||
var/list/icon_cache = SSoverlays.overlay_icon_cache
|
var/list/icon_cache = SSoverlays.overlay_icon_cache
|
||||||
. = icon_cache[icon]
|
. = icon_cache[icon]
|
||||||
if (!.)
|
if (!.)
|
||||||
iconbro.icon = icon
|
SSoverlays.iconbro.icon = icon
|
||||||
. = iconbro.appearance
|
. = SSoverlays.iconbro.appearance
|
||||||
icon_cache[icon] = .
|
icon_cache[icon] = .
|
||||||
|
|
||||||
/atom/proc/build_appearance_list(old_overlays)
|
/atom/proc/build_appearance_list(old_overlays)
|
||||||
@@ -106,11 +110,11 @@ var/global/image/appearance_bro = new() // Temporarily super-global because of B
|
|||||||
var/atom/A = overlay
|
var/atom/A = overlay
|
||||||
if (A.flags & OVERLAY_QUEUED)
|
if (A.flags & OVERLAY_QUEUED)
|
||||||
COMPILE_OVERLAYS(A)
|
COMPILE_OVERLAYS(A)
|
||||||
appearance_bro.appearance = overlay //this works for images and atoms too!
|
SSoverlays.appearance_bro.appearance = overlay //this works for images and atoms too!
|
||||||
if(!ispath(overlay))
|
if(!ispath(overlay))
|
||||||
var/image/I = overlay
|
var/image/I = overlay
|
||||||
appearance_bro.dir = I.dir
|
SSoverlays.appearance_bro.dir = I.dir
|
||||||
new_overlays += appearance_bro.appearance
|
new_overlays += SSoverlays.appearance_bro.appearance
|
||||||
return new_overlays
|
return new_overlays
|
||||||
|
|
||||||
#define NOT_QUEUED_ALREADY (!(flags & OVERLAY_QUEUED))
|
#define NOT_QUEUED_ALREADY (!(flags & OVERLAY_QUEUED))
|
||||||
|
|||||||
@@ -14,9 +14,13 @@
|
|||||||
var/datum_flags = NONE
|
var/datum_flags = NONE
|
||||||
var/trigger_uid //CHOMPEdit
|
var/trigger_uid //CHOMPEdit
|
||||||
|
|
||||||
#ifdef TESTING
|
#ifdef REFERENCE_TRACKING
|
||||||
var/tmp/running_find_references
|
var/tmp/running_find_references
|
||||||
var/tmp/last_find_references = 0
|
var/tmp/last_find_references = 0
|
||||||
|
#ifdef REFERENCE_TRACKING_DEBUG
|
||||||
|
///Stores info about where refs are found, used for sanity checks and testing
|
||||||
|
var/list/found_refs
|
||||||
|
#endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Default implementation of clean-up code.
|
// Default implementation of clean-up code.
|
||||||
|
|||||||
140
code/datums/reference_tracking.dm
Normal file
140
code/datums/reference_tracking.dm
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
#ifdef REFERENCE_TRACKING
|
||||||
|
|
||||||
|
/datum/proc/find_references(skip_alert)
|
||||||
|
running_find_references = type
|
||||||
|
if(usr?.client)
|
||||||
|
if(usr.client.running_find_references)
|
||||||
|
log_reftracker("CANCELLED search for references to a [usr.client.running_find_references].")
|
||||||
|
usr.client.running_find_references = null
|
||||||
|
running_find_references = null
|
||||||
|
//restart the garbage collector
|
||||||
|
SSgarbage.can_fire = TRUE
|
||||||
|
SSgarbage.next_fire = world.time + world.tick_lag
|
||||||
|
return
|
||||||
|
|
||||||
|
if(!skip_alert && tgui_alert(usr,"Running this will lock everything up for about 5 minutes. Would you like to begin the search?", "Find References", list("Yes", "No")) != "Yes")
|
||||||
|
running_find_references = null
|
||||||
|
return
|
||||||
|
|
||||||
|
//this keeps the garbage collector from failing to collect objects being searched for in here
|
||||||
|
SSgarbage.can_fire = FALSE
|
||||||
|
|
||||||
|
if(usr?.client)
|
||||||
|
usr.client.running_find_references = type
|
||||||
|
|
||||||
|
log_reftracker("Beginning search for references to a [type].")
|
||||||
|
|
||||||
|
var/starting_time = world.time
|
||||||
|
|
||||||
|
//Time to search the whole game for our ref
|
||||||
|
DoSearchVar(GLOB, "GLOB") //globals
|
||||||
|
log_reftracker("Finished searching globals")
|
||||||
|
|
||||||
|
for(var/datum/thing in world) //atoms (don't beleive its lies)
|
||||||
|
DoSearchVar(thing, "World -> [thing.type]", search_time = starting_time)
|
||||||
|
log_reftracker("Finished searching atoms")
|
||||||
|
|
||||||
|
for(var/datum/thing) //datums
|
||||||
|
DoSearchVar(thing, "Datums -> [thing.type]", search_time = starting_time)
|
||||||
|
log_reftracker("Finished searching datums")
|
||||||
|
|
||||||
|
//Warning, attempting to search clients like this will cause crashes if done on live. Watch yourself
|
||||||
|
for(var/client/thing) //clients
|
||||||
|
DoSearchVar(thing, "Clients -> [thing.type]", search_time = starting_time)
|
||||||
|
log_reftracker("Finished searching clients")
|
||||||
|
|
||||||
|
log_reftracker("Completed search for references to a [type].")
|
||||||
|
|
||||||
|
if(usr?.client)
|
||||||
|
usr.client.running_find_references = null
|
||||||
|
running_find_references = null
|
||||||
|
|
||||||
|
//restart the garbage collector
|
||||||
|
SSgarbage.can_fire = TRUE
|
||||||
|
SSgarbage.next_fire = world.time + world.tick_lag
|
||||||
|
|
||||||
|
/datum/proc/DoSearchVar(potential_container, container_name, recursive_limit = 64, search_time = world.time)
|
||||||
|
#ifdef REFERENCE_TRACKING_DEBUG
|
||||||
|
if(!found_refs)
|
||||||
|
found_refs = list()
|
||||||
|
#endif
|
||||||
|
|
||||||
|
if(usr?.client && !usr.client.running_find_references)
|
||||||
|
return
|
||||||
|
|
||||||
|
if(!recursive_limit)
|
||||||
|
log_reftracker("Recursion limit reached. [container_name]")
|
||||||
|
return
|
||||||
|
|
||||||
|
//Check each time you go down a layer. This makes it a bit slow, but it won't effect the rest of the game at all
|
||||||
|
#ifndef FIND_REF_NO_CHECK_TICK
|
||||||
|
CHECK_TICK
|
||||||
|
#endif
|
||||||
|
|
||||||
|
if(istype(potential_container, /datum))
|
||||||
|
var/datum/datum_container = potential_container
|
||||||
|
if(datum_container.last_find_references == search_time)
|
||||||
|
return
|
||||||
|
|
||||||
|
datum_container.last_find_references = search_time
|
||||||
|
var/list/vars_list = datum_container.vars
|
||||||
|
|
||||||
|
for(var/varname in vars_list)
|
||||||
|
#ifndef FIND_REF_NO_CHECK_TICK
|
||||||
|
CHECK_TICK
|
||||||
|
#endif
|
||||||
|
if (varname == "vars" || varname == "vis_locs") //Fun fact, vis_locs don't count for references
|
||||||
|
continue
|
||||||
|
var/variable = vars_list[varname]
|
||||||
|
|
||||||
|
if(variable == src)
|
||||||
|
#ifdef REFERENCE_TRACKING_DEBUG
|
||||||
|
found_refs[varname] = TRUE
|
||||||
|
#endif
|
||||||
|
log_reftracker("Found [type] \ref[src] in [datum_container.type]'s \ref[datum_container] [varname] var. [container_name]")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if(islist(variable))
|
||||||
|
DoSearchVar(variable, "[container_name] \ref[datum_container] -> [varname] (list)", recursive_limit - 1, search_time)
|
||||||
|
|
||||||
|
else if(islist(potential_container))
|
||||||
|
var/normal = IS_NORMAL_LIST(potential_container)
|
||||||
|
var/list/potential_cache = potential_container
|
||||||
|
for(var/element_in_list in potential_cache)
|
||||||
|
#ifndef FIND_REF_NO_CHECK_TICK
|
||||||
|
CHECK_TICK
|
||||||
|
#endif
|
||||||
|
//Check normal entrys
|
||||||
|
if(element_in_list == src)
|
||||||
|
#ifdef REFERENCE_TRACKING_DEBUG
|
||||||
|
found_refs[potential_cache] = TRUE
|
||||||
|
#endif
|
||||||
|
log_reftracker("Found [type] \ref[src] in list [container_name].")
|
||||||
|
continue
|
||||||
|
|
||||||
|
var/assoc_val = null
|
||||||
|
if(!isnum(element_in_list) && normal)
|
||||||
|
assoc_val = potential_cache[element_in_list]
|
||||||
|
//Check assoc entrys
|
||||||
|
if(assoc_val == src)
|
||||||
|
#ifdef REFERENCE_TRACKING_DEBUG
|
||||||
|
found_refs[potential_cache] = TRUE
|
||||||
|
#endif
|
||||||
|
log_reftracker("Found [type] \ref[src] in list [container_name]\[[element_in_list]\]")
|
||||||
|
continue
|
||||||
|
//We need to run both of these checks, since our object could be hiding in either of them
|
||||||
|
//Check normal sublists
|
||||||
|
if(islist(element_in_list))
|
||||||
|
DoSearchVar(element_in_list, "[container_name] -> [element_in_list] (list)", recursive_limit - 1, search_time)
|
||||||
|
//Check assoc sublists
|
||||||
|
if(islist(assoc_val))
|
||||||
|
DoSearchVar(potential_container[element_in_list], "[container_name]\[[element_in_list]\] -> [assoc_val] (list)", recursive_limit - 1, search_time)
|
||||||
|
|
||||||
|
/proc/qdel_and_find_ref_if_fail(datum/thing_to_del, force = FALSE)
|
||||||
|
thing_to_del.qdel_and_find_ref_if_fail(force)
|
||||||
|
|
||||||
|
/datum/proc/qdel_and_find_ref_if_fail(force = FALSE)
|
||||||
|
SSgarbage.reference_find_on_fail["\ref[src]"] = TRUE
|
||||||
|
qdel(src, force)
|
||||||
|
|
||||||
|
#endif
|
||||||
@@ -345,6 +345,7 @@
|
|||||||
#include "code\datums\organs.dm"
|
#include "code\datums\organs.dm"
|
||||||
#include "code\datums\position_point_vector.dm"
|
#include "code\datums\position_point_vector.dm"
|
||||||
#include "code\datums\progressbar.dm"
|
#include "code\datums\progressbar.dm"
|
||||||
|
#include "code\datums\reference_tracking.dm"
|
||||||
#include "code\datums\riding.dm"
|
#include "code\datums\riding.dm"
|
||||||
#include "code\datums\soul_link.dm"
|
#include "code\datums\soul_link.dm"
|
||||||
#include "code\datums\sun.dm"
|
#include "code\datums\sun.dm"
|
||||||
|
|||||||
Reference in New Issue
Block a user