[MIRROR] JSON Logging Refactor (#11623)

Co-authored-by: Selis <12716288+ItsSelis@users.noreply.github.com>
Co-authored-by: Kashargul <144968721+Kashargul@users.noreply.github.com>
This commit is contained in:
CHOMPStation2StaffMirrorBot
2025-09-14 11:05:26 -07:00
committed by GitHub
parent 272afa33c8
commit 5a62077f2c
425 changed files with 4081 additions and 2568 deletions

View File

@@ -21,7 +21,7 @@ var/datum/controller/transfer_controller/transfer_controller
//VOREStation Edit START
if (round_duration_in_ds >= shift_last_vote - 2 MINUTES)
shift_last_vote = 1000000000000 //Setting to a stupidly high number since it'll be not used again. //CHOMPEdit
to_world(span_world(span_notice("Warning: This upcoming round-extend vote will be your last chance to vote for shift extension. Wrap up your scenes in the next 60 minutes if the round is extended."))) //CHOMPStation Edit
to_chat(world, span_world(span_notice("Warning: This upcoming round-extend vote will be your last chance to vote for shift extension. Wrap up your scenes in the next 60 minutes if the round is extended."))) //CHOMPStation Edit
if (round_duration_in_ds >= shift_hard_end - 1 MINUTE)
init_shift_change(null, 1)
shift_hard_end = timerbuffer + CONFIG_GET(number/vote_autotransfer_interval) //If shuttle somehow gets recalled, let's force it to call again next time a vote would occur.

View File

@@ -254,7 +254,7 @@
value = new_value
E = new_ver
else
warning("[new_ver.type] is deprecated but gave no proper return for DeprecationUpdate()")
WARNING("[new_ver.type] is deprecated but gave no proper return for DeprecationUpdate()")
var/validated = E.ValidateAndSet(value)
if(!validated)

View File

@@ -19,9 +19,18 @@
/// log messages sent in OOC
/datum/config_entry/flag/log_ooc
/// log messages sent in LOOC
/datum/config_entry/flag/log_looc
/// log login/logout
/datum/config_entry/flag/log_access
/// Config entry which special logging of failed logins under suspicious circumstances.
/datum/config_entry/flag/log_suspicious_login
/// log prayers
/datum/config_entry/flag/log_prayer
/// log client say
/datum/config_entry/flag/log_say
@@ -29,13 +38,12 @@
/datum/config_entry/flag/log_admin
protection = CONFIG_ENTRY_LOCKED
/// log debug output
/datum/config_entry/flag/log_debug
default = TRUE
/// log game events
/datum/config_entry/flag/log_game
/// log assets
/datum/config_entry/flag/log_asset
/// log voting
/datum/config_entry/flag/log_vote
@@ -51,8 +59,8 @@
/// log admin chat messages
/datum/config_entry/flag/log_adminchat
/// log warnings admins get about bomb construction and such
/datum/config_entry/flag/log_adminwarn
/// log EVENT chat messages
/datum/config_entry/flag/log_eventchat
/// log pda messages
/datum/config_entry/flag/log_pda
@@ -72,9 +80,16 @@
/// logs graffiti
/datum/config_entry/flag/log_graffiti
/// log all world.Topic() calls
/datum/config_entry/flag/log_world_topic
/// logs all timers in buckets on automatic bucket reset (Useful for timer debugging)
/datum/config_entry/flag/log_timers_on_bucket_reset
/// Log human readable versions of json log entries
/datum/config_entry/flag/log_as_human_readable
default = TRUE
// FIXME: Unused
///datum/config_entry/string/nudge_script_path // where the nudge.py script is located
// default = "nudge.py"
@@ -194,6 +209,22 @@
config_entry_value = 15
min_val = 0
/datum/config_entry/number/error_cooldown // The "cooldown" time for each occurrence of a unique error
default = 600
integer = FALSE
min_val = 0
/datum/config_entry/number/error_limit // How many occurrences before the next will silence them
default = 50
/datum/config_entry/number/error_silence_time // How long a unique error will be silenced for
default = 6000
integer = FALSE
/datum/config_entry/number/error_msg_delay // How long to wait between messaging admins about occurrences of a unique error
default = 50
integer = FALSE
/datum/config_entry/number/fps
default = 20
integer = FALSE
@@ -850,3 +881,6 @@
/// If admins with +DEBUG can queue byond-tracy to run the next round.
/datum/config_entry/flag/allow_tracy_queue
protection = CONFIG_ENTRY_LOCKED
/// log vore interactions
/datum/config_entry/flag/log_vore

View File

@@ -53,7 +53,7 @@ GLOBAL_REAL(GLOB, /datum/controller/global_vars)
var/list/global_procs = typesof(/datum/controller/global_vars/proc)
var/expected_len = vars.len - gvars_datum_in_built_vars.len
if(global_procs.len != expected_len)
warning("Unable to detect all global initialization procs! Expected [expected_len] got [global_procs.len]!")
WARNING("Unable to detect all global initialization procs! Expected [expected_len] got [global_procs.len]!")
if(global_procs.len)
var/list/expected_global_procs = vars - gvars_datum_in_built_vars
for(var/I in global_procs)
@@ -65,6 +65,6 @@ GLOBAL_REAL(GLOB, /datum/controller/global_vars)
call(src, I)()
var/end_tick = world.time
if(end_tick - start_tick)
warning("Global [replacetext("[I]", "InitGlobal", "")] slept during initialization!")
WARNING("Global [replacetext("[I]", "InitGlobal", "")] slept during initialization!")
//populate_legacy_globals()

View File

@@ -26,14 +26,14 @@
/proc/callHook(hook, list/arguments=null)
var/hook_path = text2path("/hook/[hook]")
if(!hook_path)
error("Invalid hook '/hook/[hook]' called.")
log_world("## ERROR Invalid hook '/hook/[hook]' called.")
return 0
var/requester = new hook_path
var/status = 1
for(var/P in typesof("[hook_path]/proc"))
if(!call(requester, P)(arglist(arguments)))
error("Hook '[P]' failed or runtimed.")
log_world("## ERROR Hook '[P]' failed or runtimed.")
status = 0
return status

View File

@@ -506,11 +506,11 @@ ADMIN_VERB(cmd_controller_view_ui, R_SERVER|R_DEBUG, "Controller Overview", "Vie
// Gave invalid return value.
if(result && !(result in valid_results))
warning("[subsystem.name] subsystem initialized, returning invalid result [result]. This is a bug.")
WARNING("[subsystem.name] subsystem initialized, returning invalid result [result]. This is a bug.")
// just returned ..() or didn't implement Initialize() at all
if(result == SS_INIT_NONE)
warning("[subsystem.name] subsystem does not implement Initialize() or it returns ..(). If the former is true, the SS_NO_INIT flag should be set for this subsystem.")
WARNING("[subsystem.name] subsystem does not implement Initialize() or it returns ..(). If the former is true, the SS_NO_INIT flag should be set for this subsystem.")
if(result != SS_INIT_FAILURE)
// Some form of success, implicit failure, or the SS in unused.

View File

@@ -16,7 +16,7 @@ var/global/last_tick_duration = 0
/datum/controller/game_controller/New()
//There can be only one master_controller. Out with the old and in with the new.
if(master_controller != src)
log_debug("Rebuilding Master Controller")
log_world("Rebuilding Master Controller")
if(istype(master_controller))
qdel(master_controller)
master_controller = src

View File

@@ -167,7 +167,7 @@ Total Unsimulated Turfs: [world.maxx*world.maxy*world.maxz - simulated_turf_coun
for(var/turf/T in E.connecting_turfs)
edge_log += "+--- Connecting Turf [T] ([T.type]) @ [T.x], [T.y], [T.z] ([T.loc])"
log_debug("Active Edges on ZAS Startup\n" + edge_log.Join("\n"))
log_mapping("Active Edges on ZAS Startup\n" + edge_log.Join("\n"))
startup_active_edge_log = edge_log.Copy()
return SS_INIT_SUCCESS

View File

@@ -15,7 +15,6 @@ SUBSYSTEM_DEF(chemistry)
var/list/chemical_reagents = list()
/datum/controller/subsystem/chemistry/Recover()
log_debug("[name] subsystem Recover().")
chemical_reactions = SSchemistry.chemical_reactions
chemical_reagents = SSchemistry.chemical_reagents

View File

@@ -84,7 +84,7 @@ SUBSYSTEM_DEF(dbcore)
var/datum/db_query/query = popleft(processing_queries)
if(world.time - query.last_activity_time > (5 MINUTES))
stack_trace("Found undeleted query, check the sql.log for the undeleted query and add a delete call to the query datum.")
log_debug("Undeleted query: \"[query.sql]\" LA: [query.last_activity] LAT: [query.last_activity_time]")
log_sql("Undeleted query: \"[query.sql]\" LA: [query.last_activity] LAT: [query.last_activity_time]")
qdel(query)
if(MC_TICK_CHECK)
return
@@ -141,7 +141,7 @@ SUBSYSTEM_DEF(dbcore)
/datum/controller/subsystem/dbcore/Shutdown()
shutting_down = TRUE
log_debug("Clearing DB queries standby:[length(queries_standby)] active: [length(queries_active)] all: [length(all_queries)]")
log_sql("Clearing DB queries standby:[length(queries_standby)] active: [length(queries_active)] all: [length(all_queries)]")
//This is as close as we can get to the true round end before Disconnect() without changing where it's called, defeating the reason this is a subsystem
if(SSdbcore.Connect())
//Execute all waiting queries
@@ -162,7 +162,7 @@ SUBSYSTEM_DEF(dbcore)
query_round_shutdown.Execute(FALSE)
qdel(query_round_shutdown)
log_debug("Done clearing DB queries standby:[length(queries_standby)] active: [length(queries_active)] all: [length(all_queries)]")
log_sql("Done clearing DB queries standby:[length(queries_standby)] active: [length(queries_active)] all: [length(all_queries)]")
if(IsConnected())
Disconnect()
//stop_db_daemon()
@@ -237,7 +237,7 @@ SUBSYSTEM_DEF(dbcore)
else
connection = null
last_error = result["data"]
log_debug("Connect() failed | [last_error]")
log_sql("Connect() failed | [last_error]")
++failed_connections
/datum/controller/subsystem/dbcore/proc/CheckSchemaVersion()
@@ -245,9 +245,9 @@ SUBSYSTEM_DEF(dbcore)
if(Connect())
log_world("Database connection established.")
else
log_debug("Your server failed to establish a connection with the database.")
log_sql("Your server failed to establish a connection with the database.")
else
log_debug("Database is not enabled in configuration.")
log_sql("Database is not enabled in configuration.")
/datum/controller/subsystem/dbcore/proc/InitializeRound()
if(!Connect())
@@ -539,12 +539,12 @@ Ignore_errors instructes mysql to continue inserting rows if some of them have e
. = (status != DB_QUERY_BROKEN)
var/timed_out = !. && findtext(last_error, "Operation timed out")
if(!. && log_error)
log_debug("[last_error] | Query used: [sql] | Arguments: [json_encode(arguments)]")
log_sql("[last_error] | Query used: [sql] | Arguments: [json_encode(arguments)]")
if(!async && timed_out)
log_debug("Query execution started at [start_time]")
log_debug("Query execution ended at [REALTIMEOFDAY]")
log_debug("Slow query timeout detected.")
log_debug("Query used: [sql]")
log_sql("Query execution started at [start_time]")
log_sql("Query execution ended at [REALTIMEOFDAY]")
log_sql("Slow query timeout detected.")
log_sql("Query used: [sql]")
slow_query_check()
/// Sleeps until execution of the query has finished.

View File

@@ -58,7 +58,7 @@ SUBSYSTEM_DEF(events)
active_events -= E
if(!E.event_meta || !E.severity) // datum/event is used here and there for random reasons, maintaining "backwards compatibility"
log_debug("Event of '[E.type]' with missing meta-data has completed.")
log_game("Event of '[E.type]' with missing meta-data has completed.")
return
finished_events += E
@@ -69,7 +69,7 @@ SUBSYSTEM_DEF(events)
if(EM.add_to_queue)
EC.available_events += EM
log_debug("Event '[EM.name]' has completed at [stationtime2text()].")
log_game("Event '[EM.name]' has completed at [stationtime2text()].")
/datum/controller/subsystem/events/proc/delay_events(var/severity, var/delay)
var/datum/event_container/EC = event_containers[severity]

View File

@@ -30,10 +30,10 @@ SUBSYSTEM_DEF(event_ticker)
event_started(E)
/datum/controller/subsystem/event_ticker/proc/event_started(datum/event2/event/E)
log_debug("Event [E.type] is now being ran.")
log_game("Event [E.type] is now being ran.")
active_events += E
/datum/controller/subsystem/event_ticker/proc/event_finished(datum/event2/event/E)
log_debug("Event [E.type] has finished.")
log_game("Event [E.type] has finished.")
active_events -= E
finished_events += E

View File

@@ -21,7 +21,7 @@ SUBSYSTEM_DEF(game_master)
var/next_event = 0 // Minimum amount of time of nothingness until the GM can pick something again.
var/debug_messages = FALSE // If true, debug information is written to `log_debug()`.
var/debug_messages = FALSE // If true, debug information is written to `log_world()`.
/datum/controller/subsystem/game_master/Initialize()
var/list/subtypes = subtypesof(/datum/event2/meta)
@@ -116,7 +116,7 @@ SUBSYSTEM_DEF(game_master)
// if(hours < 1 && mins <= 20) // Don't do anything for the first twenty minutes of the round.
// if(!quiet)
// log_debug("Game Master unable to start event: It is too early.")
// log_game_master("Game Master unable to start event: It is too early.")
// return FALSE
if(hours >= 2 && mins >= 40) // Don't do anything in the last twenty minutes of the round, as well.
if(!quiet)
@@ -133,7 +133,7 @@ SUBSYSTEM_DEF(game_master)
/datum/controller/subsystem/game_master/proc/log_game_master(message)
if(debug_messages)
log_debug("GAME MASTER: [message]")
log_world("GAME MASTER: [message]")
// This object makes the actual decisions.

View File

@@ -117,7 +117,7 @@ SUBSYSTEM_DEF(garbage)
if(LAZYLEN(I.extra_details))
entry["Deleted Metadata"] = I.extra_details
log_debug("", del_log)
log_qdel("", del_log)
/datum/controller/subsystem/garbage/fire()
//the fact that this resets its processing each fire (rather then resume where it left off) is intentional.

View File

@@ -149,7 +149,7 @@ SUBSYSTEM_DEF(job)
/datum/controller/subsystem/job/proc/job_debug_message(message)
if(debug_messages)
log_debug("JOB DEBUG: [message]")
log_world("JOB DEBUG: [message]")
//CHOMPadd start
/datum/controller/subsystem/job/proc/load_camp_lists()
@@ -163,11 +163,11 @@ SUBSYSTEM_DEF(job)
fdel(savepath)
var/json_to_file = json_encode(shift_keys)
if(!json_to_file)
log_debug("Saving: [savepath] failed jsonencode")
log_world("Saving: [savepath] failed jsonencode")
return
//Write it out
rustg_file_write(json_to_file, savepath)
if(!fexists(savepath))
log_debug("Saving: failed to save [savepath]")
log_world("Saving: failed to save [savepath]")
//CHOMPadd end

View File

@@ -192,19 +192,19 @@ SUBSYSTEM_DEF(machines)
/datum/controller/subsystem/machines/Recover()
for(var/datum/D as anything in SSmachines.networks)
if(!istype(D, /datum/pipe_network))
error("Found wrong type during SSmachinery recovery: list=SSmachines.networks, item=[D], type=[D?.type]")
log_world("## ERROR Found wrong type during SSmachinery recovery: list=SSmachines.networks, item=[D], type=[D?.type]")
SSmachines.networks -= D
for(var/datum/D as anything in SSmachines.processing_machines)
if(!istype(D, /obj/machinery))
error("Found wrong type during SSmachinery recovery: list=SSmachines.machines, item=[D], type=[D?.type]")
log_world("## ERROR Found wrong type during SSmachinery recovery: list=SSmachines.machines, item=[D], type=[D?.type]")
SSmachines.processing_machines -= D
for(var/datum/D as anything in SSmachines.powernets)
if(!istype(D, /datum/powernet))
error("Found wrong type during SSmachinery recovery: list=SSmachines.powernets, item=[D], type=[D?.type]")
log_world("## ERROR Found wrong type during SSmachinery recovery: list=SSmachines.powernets, item=[D], type=[D?.type]")
SSmachines.powernets -= D
for(var/datum/D as anything in SSmachines.powerobjs)
if(!istype(D, /obj/item))
error("Found wrong type during SSmachinery recovery: list=SSmachines.powerobjs, item=[D], type=[D?.type]")
log_world("## ERROR Found wrong type during SSmachinery recovery: list=SSmachines.powerobjs, item=[D], type=[D?.type]")
SSmachines.powerobjs -= D
all_machines = SSmachines.all_machines

View File

@@ -59,7 +59,7 @@ SUBSYSTEM_DEF(mapping)
var/turf/T = get_turf(engine_loader)
if(!isturf(T))
to_world_log("[log_info_line(engine_loader)] not on a turf! Cannot place engine template.")
log_mapping("[log_info_line(engine_loader)] not on a turf! Cannot place engine template.")
return
// Choose an engine type
@@ -68,7 +68,7 @@ SUBSYSTEM_DEF(mapping)
var/chosen_name = pick(CONFIG_GET(str_list/engine_map))
chosen_type = map_templates[chosen_name]
if(!istype(chosen_type))
error("Configured engine map [chosen_name] is not a valid engine map name!")
log_mapping("Configured engine map [chosen_name] is not a valid engine map name!")
if(!istype(chosen_type))
var/list/engine_types = list()
for(var/map in map_templates)
@@ -76,7 +76,7 @@ SUBSYSTEM_DEF(mapping)
if(istype(MT))
engine_types += MT
chosen_type = pick(engine_types)
to_world_log("Chose Engine Map: [chosen_type.name]")
log_mapping("Chose Engine Map: [chosen_type.name]")
admin_notice(span_danger("Chose Engine Map: [chosen_type.name]"), R_DEBUG)
// Annihilate movable atoms
@@ -94,12 +94,12 @@ SUBSYSTEM_DEF(mapping)
for(var/list/maplist in deffo_load)
if(!islist(maplist))
error("Lateload Z level [maplist] is not a list! Must be in a list!")
log_mapping("Lateload Z level [maplist] is not a list! Must be in a list!")
continue
for(var/mapname in maplist)
var/datum/map_template/MT = map_templates[mapname]
if(!istype(MT))
error("Lateload Z level \"[mapname]\" is not a valid map!")
log_mapping("Lateload Z level \"[mapname]\" is not a valid map!")
continue
admin_notice("Lateload: [MT]", R_DEBUG)
MT.load_new_z(centered = FALSE)
@@ -112,7 +112,7 @@ SUBSYSTEM_DEF(mapping)
return
if(!islist(picklist)) //So you can have a 'chain' of z-levels that make up one away mission
error("Randompick Z level [picklist] is not a list! Must be in a list!")
log_mapping("Randompick Z level [picklist] is not a list! Must be in a list!")
return
for(var/map in picklist)
@@ -122,7 +122,7 @@ SUBSYSTEM_DEF(mapping)
map = pick(map)
var/datum/map_template/MT = map_templates[map]
if(!istype(MT))
error("Randompick Z level \"[map]\" is not a valid map!")
log_mapping("Randompick Z level \"[map]\" is not a valid map!")
else
admin_notice("Gateway: [MT]", R_DEBUG)
MT.load_new_z(centered = FALSE)
@@ -134,7 +134,7 @@ SUBSYSTEM_DEF(mapping)
return
if(!islist(picklist)) //So you can have a 'chain' of z-levels that make up one away mission
error("Randompick Z level [picklist] is not a list! Must be in a list!")
log_mapping("Randompick Z level [picklist] is not a list! Must be in a list!")
return
for(var/map in picklist)
@@ -144,7 +144,7 @@ SUBSYSTEM_DEF(mapping)
map = pick(map)
var/datum/map_template/MT = map_templates[map]
if(!istype(MT))
error("Randompick Z level \"[map]\" is not a valid map!")
log_mapping("Randompick Z level \"[map]\" is not a valid map!")
else
admin_notice("OM Adventure: [MT]", R_DEBUG)
MT.load_new_z(centered = FALSE)
@@ -156,7 +156,7 @@ SUBSYSTEM_DEF(mapping)
return
if(!islist(picklist)) //So you can have a 'chain' of z-levels that make up one away mission
error("Randompick Z level [picklist] is not a list! Must be in a list!")
log_mapping("Randompick Z level [picklist] is not a list! Must be in a list!")
return
for(var/map in picklist)
@@ -166,7 +166,7 @@ SUBSYSTEM_DEF(mapping)
map = pick(map)
var/datum/map_template/MT = map_templates[map]
if(!istype(MT))
error("Randompick Z level \"[map]\" is not a valid map!")
log_mapping("Randompick Z level \"[map]\" is not a valid map!")
else
admin_notice("Redgate: [MT]", R_DEBUG)
MT.load_new_z(centered = FALSE)

View File

@@ -23,33 +23,33 @@ SUBSYSTEM_DEF(media_tracks)
report_progress("Loading jukebox track: [filename]")
if(!fexists(filename))
error("File not found: [filename]")
log_world("## ERROR File not found: [filename]")
continue
var/list/jsonData = json_decode(file2text(filename))
if(!istype(jsonData))
error("Failed to read tracks from [filename], json_decode failed.")
log_world("## ERROR Failed to read tracks from [filename], json_decode failed.")
continue
for(var/entry in jsonData)
// Critical problems that will prevent the track from working
if(!istext(entry["url"]))
error("Jukebox entry in [filename]: bad or missing 'url'. Tracks must have a URL.")
log_world("## ERROR Jukebox entry in [filename]: bad or missing 'url'. Tracks must have a URL.")
continue
if(!istext(entry["title"]))
error("Jukebox entry in [filename]: bad or missing 'title'. Tracks must have a title.")
log_world("## ERROR Jukebox entry in [filename]: bad or missing 'title'. Tracks must have a title.")
continue
if(!isnum(entry["duration"]))
error("Jukebox entry in [filename]: bad or missing 'duration'. Tracks must have a duration (in deciseconds).")
log_world("## ERROR Jukebox entry in [filename]: bad or missing 'duration'. Tracks must have a duration (in deciseconds).")
continue
// Noncritical problems, we can keep going anyway, but warn so it can be fixed
if(!istext(entry["artist"]))
warning("Jukebox entry in [filename], [entry["title"]]: bad or missing 'artist'. Please consider crediting the artist.")
WARNING("Jukebox entry in [filename], [entry["title"]]: bad or missing 'artist'. Please consider crediting the artist.")
if(!istext(entry["genre"]))
warning("Jukebox entry in [filename], [entry["title"]]: bad or missing 'genre'. Please consider adding a genre.")
WARNING("Jukebox entry in [filename], [entry["title"]]: bad or missing 'genre'. Please consider adding a genre.")
var/datum/track/T = new(entry["url"], entry["title"], entry["duration"], entry["artist"], entry["genre"])

View File

@@ -28,8 +28,8 @@ if we end up with multiple renamable lateload overmap objects.*/
V.modify_descriptors()
if(V.visitable_renamed) //could just if(D.modify_descriptors()), but having a var recording renaming is useful for debugging and stuff!
if(V.known)
to_world_log("##Overmap Renamer: Renamed Debris Field as: [V.name]")
log_mapping("##Overmap Renamer: Renamed Debris Field as: [V.name]")
admin_notice(span_danger("Debris Field name chosen as [V.name]"), R_DEBUG)
else
to_world_log("##Overmap Renamer: Renamed Debris Field as: [V.real_name]")
log_mapping("##Overmap Renamer: Renamed Debris Field as: [V.real_name]")
admin_notice(span_danger("Debris Field name chosen as [V.real_name]"), R_DEBUG)

View File

@@ -63,21 +63,21 @@ SUBSYSTEM_DEF(pathfinder)
stoplag(1)
if(world.time > started + PATHFINDER_TIMEOUT)
stack_trace("pathfinder timeout; check debug logs.")
log_debug("pathfinder timeout of instance with debug variables [instance.debug_log_string()]")
log_runtime("pathfinder timeout of instance with debug variables [instance.debug_log_string()]")
return
else
while(pathfinding_mutex)
stoplag(3)
if(world.time > started + PATHFINDER_TIMEOUT)
stack_trace("pathfinder timeout; check debug logs.")
log_debug("pathfinder timeout of instance with debug variables [instance.debug_log_string()]")
log_runtime("pathfinder timeout of instance with debug variables [instance.debug_log_string()]")
return
--pathfinding_blocked
pathfinding_mutex = TRUE
. = instance.search()
if(world.time > started + PATHFINDER_TIMEOUT)
stack_trace("pathfinder timeout; check debug logs.")
log_debug("pathfinder timeout of instance with debug variables [instance.debug_log_string()]")
log_runtime("pathfinder timeout of instance with debug variables [instance.debug_log_string()]")
pathfinding_mutex = FALSE
#undef PATHFINDER_TIMEOUT

View File

@@ -12,7 +12,7 @@ SUBSYSTEM_DEF(points_of_interest)
/datum/controller/subsystem/points_of_interest/Initialize()
while (poi_queue.len)
load_next_poi()
to_world_log("Initializing POIs")
log_mapping("Initializing POIs")
admin_notice(span_danger("Initializing POIs"), R_DEBUG)
return SS_INIT_SUCCESS
@@ -50,7 +50,7 @@ SUBSYSTEM_DEF(points_of_interest)
return
var/turf/T = get_turf(poi_to_load)
if(!isturf(T))
to_world_log("[log_info_line(poi_to_load)] not on a turf! Cannot place poi template.")
log_mapping("[log_info_line(poi_to_load)] not on a turf! Cannot place poi template.")
return
// Choose a poi

View File

@@ -9,12 +9,12 @@ PROCESSING_SUBSYSTEM_DEF(bellies)
runlevels = RUNLEVEL_GAME|RUNLEVEL_POSTGAME
/datum/controller/subsystem/processing/bellies/Recover()
log_debug("[name] subsystem Recover().")
log_runtime("[name] subsystem Recover().")
if(SSbellies.current_thing)
log_debug("current_thing was: (\ref[SSbellies.current_thing])[SSbellies.current_thing]([SSbellies.current_thing.type]) - currentrun: [SSbellies.currentrun.len] vs total: [SSbellies.processing.len]")
log_runtime("current_thing was: (\ref[SSbellies.current_thing])[SSbellies.current_thing]([SSbellies.current_thing.type]) - currentrun: [SSbellies.currentrun.len] vs total: [SSbellies.processing.len]")
var/list/old_processing = SSbellies.processing.Copy()
for(var/datum/D in old_processing)
if(!isbelly(D))
log_debug("[name] subsystem Recover() found inappropriate item in list: [D.type]")
log_runtime("[name] subsystem Recover() found inappropriate item in list: [D.type]")
if(CHECK_BITFIELD(D.datum_flags, DF_ISPROCESSING))
processing |= D

View File

@@ -7,9 +7,9 @@ PROCESSING_SUBSYSTEM_DEF(fastprocess)
flags = SS_NO_INIT
/datum/controller/subsystem/processing/fastprocess/Recover()
log_debug("[name] subsystem Recover().")
log_runtime("[name] subsystem Recover().")
if(SSfastprocess.current_thing)
log_debug("current_thing was: (\ref[SSfastprocess.current_thing])[SSfastprocess.current_thing]([SSfastprocess.current_thing.type]) - currentrun: [SSfastprocess.currentrun.len] vs total: [SSfastprocess.processing.len]")
log_runtime("current_thing was: (\ref[SSfastprocess.current_thing])[SSfastprocess.current_thing]([SSfastprocess.current_thing.type]) - currentrun: [SSfastprocess.currentrun.len] vs total: [SSfastprocess.processing.len]")
var/list/old_processing = SSfastprocess.processing.Copy()
for(var/datum/D in old_processing)
if(CHECK_BITFIELD(D.datum_flags, DF_ISPROCESSING))

View File

@@ -5,12 +5,12 @@ PROCESSING_SUBSYSTEM_DEF(obj)
wait = 20
/datum/controller/subsystem/processing/obj/Recover()
log_debug("[name] subsystem Recover().")
log_runtime("[name] subsystem Recover().")
if(SSobj.current_thing)
log_debug("current_thing was: (\ref[SSobj.current_thing])[SSobj.current_thing]([SSobj.current_thing.type]) - currentrun: [SSobj.currentrun.len] vs total: [SSobj.processing.len]")
log_runtime("current_thing was: (\ref[SSobj.current_thing])[SSobj.current_thing]([SSobj.current_thing.type]) - currentrun: [SSobj.currentrun.len] vs total: [SSobj.processing.len]")
var/list/old_processing = SSobj.processing.Copy()
for(var/datum/D in old_processing)
if(!isobj(D))
log_debug("[name] subsystem Recover() found inappropriate item in list: [D.type]")
log_runtime("[name] subsystem Recover() found inappropriate item in list: [D.type]")
if(CHECK_BITFIELD(D.datum_flags, DF_ISPROCESSING))
processing |= D

View File

@@ -16,9 +16,9 @@ SUBSYSTEM_DEF(processing)
var/datum/current_thing
/datum/controller/subsystem/processing/Recover()
log_debug("[name] subsystem Recover().")
log_runtime("[name] subsystem Recover().")
if(SSprocessing.current_thing)
log_debug("current_thing was: (\ref[SSprocessing.current_thing])[SSprocessing.current_thing]([SSprocessing.current_thing.type]) - currentrun: [SSprocessing.currentrun.len] vs total: [SSprocessing.processing.len]")
log_runtime("current_thing was: (\ref[SSprocessing.current_thing])[SSprocessing.current_thing]([SSprocessing.current_thing.type]) - currentrun: [SSprocessing.currentrun.len] vs total: [SSprocessing.processing.len]")
var/list/old_processing = SSprocessing.processing.Copy()
for(var/datum/D in old_processing)
if(CHECK_BITFIELD(D.datum_flags, DF_ISPROCESSING))
@@ -66,7 +66,6 @@ SUBSYSTEM_DEF(processing)
msg += "- Process subsystems are processed tail-first -\n"
if(!currentrun || !processing)
msg += "ERROR: A critical list [currentrun ? "processing" : "currentrun"] is gone!"
log_game(msg)
log_world(msg)
return
msg += "Lists: current_run: [currentrun.len], processing: [processing.len]\n"
@@ -93,7 +92,6 @@ SUBSYSTEM_DEF(processing)
for(var/i in start to end)
msg += "[describeThis(processing[i])][i == position ? " << TAIL" : ""]\n"
msg += "---\n"
log_game(msg)
log_world(msg)
/datum/proc/DebugSubsystemProcess(var/wait, var/times_fired, var/datum/controller/subsystem/processing/subsystem)

View File

@@ -9,9 +9,9 @@ PROCESSING_SUBSYSTEM_DEF(projectiles)
var/global_iterations_per_move = 16
/datum/controller/subsystem/processing/projectiles/Recover()
log_debug("[name] subsystem Recover().")
log_runtime("[name] subsystem Recover().")
if(SSprojectiles.current_thing)
log_debug("current_thing was: (\ref[SSprojectiles.current_thing])[SSprojectiles.current_thing]([SSprojectiles.current_thing.type]) - currentrun: [SSprojectiles.currentrun.len] vs total: [SSprojectiles.processing.len]")
log_runtime("current_thing was: (\ref[SSprojectiles.current_thing])[SSprojectiles.current_thing]([SSprojectiles.current_thing.type]) - currentrun: [SSprojectiles.currentrun.len] vs total: [SSprojectiles.processing.len]")
var/list/old_processing = SSprojectiles.processing.Copy()
for(var/datum/D in old_processing)
if(CHECK_BITFIELD(D.datum_flags, DF_ISPROCESSING))

View File

@@ -4,12 +4,12 @@ PROCESSING_SUBSYSTEM_DEF(turfs)
flags = SS_NO_INIT
/datum/controller/subsystem/processing/turfs/Recover()
log_debug("[name] subsystem Recover().")
log_runtime("[name] subsystem Recover().")
if(SSturfs.current_thing)
log_debug("current_thing was: (\ref[SSturfs.current_thing])[SSturfs.current_thing]([SSturfs.current_thing.type]) - currentrun: [SSturfs.currentrun.len] vs total: [SSturfs.processing.len]")
log_runtime("current_thing was: (\ref[SSturfs.current_thing])[SSturfs.current_thing]([SSturfs.current_thing.type]) - currentrun: [SSturfs.currentrun.len] vs total: [SSturfs.processing.len]")
var/list/old_processing = SSturfs.processing.Copy()
for(var/datum/D in old_processing)
if(!isturf(D))
log_debug("[name] subsystem Recover() found inappropriate item in list: [D.type]")
log_runtime("[name] subsystem Recover() found inappropriate item in list: [D.type]")
if(CHECK_BITFIELD(D.datum_flags, DF_ISPROCESSING))
processing |= D

View File

@@ -14,9 +14,9 @@ SUBSYSTEM_DEF(reflector)
var/obj/structure/reflector/current_thing
/datum/controller/subsystem/reflector/Recover()
log_debug("[name] subsystem Recover().")
log_runtime("[name] subsystem Recover().")
if(SSreflector.current_thing)
log_debug("current_thing was: (\ref[SSreflector.current_thing])[SSreflector.current_thing]([SSreflector.current_thing.type]) - currentrun: [SSreflector.currentrun.len] vs total: [SSreflector.processing.len]")
log_runtime("current_thing was: (\ref[SSreflector.current_thing])[SSreflector.current_thing]([SSreflector.current_thing.type]) - currentrun: [SSreflector.currentrun.len] vs total: [SSreflector.processing.len]")
var/list/old_processing = SSreflector.processing.Copy()
for(var/datum/D in old_processing)
if(CHECK_BITFIELD(D.datum_flags, DF_ISPROCESSING))

View File

@@ -58,7 +58,7 @@ SUBSYSTEM_DEF(shuttles)
var/datum/shuttle/S = working_shuttles[working_shuttles.len]
working_shuttles.len--
if(!istype(S) || QDELETED(S))
error("Bad entry in SSshuttles.process_shuttles - [log_info_line(S)] ")
log_world("## ERROR Bad entry in SSshuttles.process_shuttles - [log_info_line(S)] ")
process_shuttles -= S
continue
// NOTE - In old system, /datum/shuttle/ferry was processed only if (F.process_state || F.always_process)
@@ -147,7 +147,7 @@ SUBSYSTEM_DEF(shuttles)
if(initial(shuttle.category) != shuttle_type) // Skip if its an "abstract class" datum
shuttle = new shuttle()
shuttle_areas |= shuttle.shuttle_area
log_debug("Initialized shuttle [shuttle] ([shuttle.type])")
log_world("Initialized shuttle [shuttle] ([shuttle.type])")
return shuttle
// Historical note: No need to call shuttle.init_docking_controllers(), controllers register themselves
// and shuttles fetch refs in New(). Shuttles also dock() themselves in new if they want.
@@ -161,7 +161,7 @@ SUBSYSTEM_DEF(shuttles)
S.motherdock = S.current_location.landmark_tag
mothership.shuttle_area |= S.shuttle_area
else
error("Shuttle [S] was unable to find mothership [mothership]!")
log_world("## ERROR Shuttle [S] was unable to find mothership [mothership]!")
// Let shuttles scan their owned areas for objects they want to configure (Called after mothership hookup)
/datum/controller/subsystem/shuttles/proc/hook_up_shuttle_objects(shuttles_list)

View File

@@ -24,10 +24,10 @@ SUBSYSTEM_DEF(sqlite)
if(!sqlite_db)
to_world_log("Failed to load or create a SQLite database.")
log_debug("ERROR: SQLite database is active in config but failed to load.")
log_sql("Failed to load or create a SQLite database.")
log_sql("ERROR: SQLite database is active in config but failed to load.")
else
to_world_log("Sqlite database connected.")
log_sql("Sqlite database connected.")
// Makes the tables, if they do not already exist in the sqlite file.
/datum/controller/subsystem/sqlite/proc/init_schema(database/sqlite_object)
@@ -66,7 +66,7 @@ SUBSYSTEM_DEF(sqlite)
// The desc parameter should be unique for each call, to make it easier to track down where the error occured.
/datum/controller/subsystem/sqlite/proc/sqlite_check_for_errors(var/database/query/query_used, var/desc)
if(query_used && query_used.ErrorMsg())
log_debug("SQLite Error: [desc] : [query_used.ErrorMsg()]")
log_sql("SQLite Error: [desc] : [query_used.ErrorMsg()]")
return TRUE
return FALSE
@@ -171,7 +171,7 @@ SUBSYSTEM_DEF(sqlite)
// This stops mods/admins/etc from guessing the author by shoving names in an MD5 hasher until they pick the right one.
// Don't use this for things needing actual security.
/datum/controller/subsystem/sqlite/proc/get_feedback_pepper()
var/pepper_file = file2list("config/sqlite_feedback_pepper.txt")
var/pepper_file = world.file2list("config/sqlite_feedback_pepper.txt")
var/pepper = null
for(var/line in pepper_file)
if(!line)

View File

@@ -170,7 +170,7 @@ SUBSYSTEM_DEF(supply)
A.req_access = L.Copy()
LAZYCLEARLIST(A.req_one_access)
else
log_debug(span_danger("Supply pack with invalid access restriction [SP.access] encountered!"))
log_runtime(span_danger("Supply pack with invalid access restriction [SP.access] encountered!"))
//supply manifest generation begin
var/obj/item/paper/manifest/slip

View File

@@ -176,7 +176,7 @@ SUBSYSTEM_DEF(ticker)
end_game_state = END_GAME_MODE_FINISHED // Only do this cleanup once!
mode.cleanup()
//call a transfer shuttle vote
to_world(span_boldannounce("The round has ended!"))
to_chat(world, span_boldannounce("The round has ended!"))
SSvote.start_vote(new /datum/vote/crew_transfer)
// FIXME: IMPROVE THIS LATER!
@@ -184,7 +184,7 @@ SUBSYSTEM_DEF(ticker)
post_game_tick()
if (world.time - last_restart_notify >= 1 MINUTE && !delay_end)
to_world(span_boldannounce("Restarting in [round(restart_timeleft/600, 1)] minute\s."))
to_chat(world, span_boldannounce("Restarting in [round(restart_timeleft/600, 1)] minute\s."))
last_restart_notify = world.time
/datum/controller/subsystem/ticker/proc/setup()
@@ -276,7 +276,7 @@ SUBSYSTEM_DEF(ticker)
var/list/runnable_modes = config.get_runnable_modes()
if((GLOB.master_mode == "random") || (GLOB.master_mode == "secret"))
if(!runnable_modes.len)
to_world(span_filter_system(span_bold("Unable to choose playable game mode.") + " Reverting to pregame lobby."))
to_chat(world, span_filter_system(span_bold("Unable to choose playable game mode.") + " Reverting to pregame lobby."))
return 0
if(GLOB.secret_force_mode != "secret")
src.mode = config.pick_mode(GLOB.secret_force_mode)
@@ -289,7 +289,7 @@ SUBSYSTEM_DEF(ticker)
src.mode = config.pick_mode(GLOB.master_mode)
if(!src.mode)
to_world(span_boldannounce("Serious error in mode setup! Reverting to pregame lobby.")) //Uses setup instead of set up due to computational context.
to_chat(world, span_boldannounce("Serious error in mode setup! Reverting to pregame lobby.")) //Uses setup instead of set up due to computational context.
return 0
job_master.ResetOccupations()
@@ -298,21 +298,21 @@ SUBSYSTEM_DEF(ticker)
job_master.DivideOccupations() // Apparently important for new antagonist system to register specific job antags properly.
if(!src.mode.can_start())
to_world(span_filter_system(span_bold("Unable to start [mode.name].") + " Not enough players readied, [CONFIG_GET(keyed_list/player_requirements)[mode.config_tag]] players needed. Reverting to pregame lobby."))
to_chat(world, span_filter_system(span_bold("Unable to start [mode.name].") + " Not enough players readied, [CONFIG_GET(keyed_list/player_requirements)[mode.config_tag]] players needed. Reverting to pregame lobby."))
mode.fail_setup()
mode = null
job_master.ResetOccupations()
return 0
if(hide_mode)
to_world(span_world(span_notice("The current game mode is - Secret!")))
to_chat(world, span_world(span_notice("The current game mode is - Secret!")))
if(runnable_modes.len)
var/list/tmpmodes = list()
for (var/datum/game_mode/M in runnable_modes)
tmpmodes+=M.name
tmpmodes = sortList(tmpmodes)
if(tmpmodes.len)
to_world(span_filter_system(span_bold("Possibilities:") + " [english_list(tmpmodes, and_text= "; ", comma_text = "; ")]"))
to_chat(world, span_filter_system(span_bold("Possibilities:") + " [english_list(tmpmodes, and_text= "; ", comma_text = "; ")]"))
else
src.mode.announce()
return 1
@@ -327,7 +327,7 @@ SUBSYSTEM_DEF(ticker)
feedback_set_details("end_proper", "nuke")
restart_timeleft = 1 MINUTE // No point waiting five minutes if everyone's dead.
if(!delay_end)
to_world(span_boldannounce("Rebooting due to destruction of [station_name()] in [round(restart_timeleft/600)] minute\s."))
to_chat(world, span_boldannounce("Rebooting due to destruction of [station_name()] in [round(restart_timeleft/600)] minute\s."))
last_restart_notify = world.time
else
feedback_set_details("end_proper", "proper completion")

View File

@@ -1,7 +1,6 @@
SUBSYSTEM_DEF(time_track)
name = "Time Tracking"
wait = 600
flags = SS_NO_INIT|SS_NO_TICK_CHECK
wait = 100
runlevels = RUNLEVEL_LOBBY | RUNLEVELS_DEFAULT
var/time_dilation_current = 0
@@ -15,6 +14,70 @@ SUBSYSTEM_DEF(time_track)
var/last_tick_realtime = 0
var/last_tick_byond_time = 0
var/last_tick_tickcount = 0
var/list/sendmaps_names_map = list(
"SendMaps" = "send_maps",
"SendMaps: Initial housekeeping" = "initial_house",
"SendMaps: Cleanup" = "cleanup",
"SendMaps: Client loop" = "client_loop",
"SendMaps: Per client" = "per_client",
"SendMaps: Per client: Deleted images" = "deleted_images",
"SendMaps: Per client: HUD update" = "hud_update",
"SendMaps: Per client: Statpanel update" = "statpanel_update",
"SendMaps: Per client: Map data" = "map_data",
"SendMaps: Per client: Map data: Check eye position" = "check_eye_pos",
"SendMaps: Per client: Map data: Update chunks" = "update_chunks",
"SendMaps: Per client: Map data: Send turfmap updates" = "turfmap_updates",
"SendMaps: Per client: Map data: Send changed turfs" = "changed_turfs",
"SendMaps: Per client: Map data: Send turf chunk info" = "turf_chunk_info",
"SendMaps: Per client: Map data: Send obj changes" = "obj_changes",
"SendMaps: Per client: Map data: Send mob changes" = "mob_changes",
"SendMaps: Per client: Map data: Send notable turf visual contents" = "send_turf_vis_conts",
"SendMaps: Per client: Map data: Send pending animations" = "pending_animations",
"SendMaps: Per client: Map data: Look for movable changes" = "look_for_movable_changes",
"SendMaps: Per client: Map data: Look for movable changes: Check notable turf visual contents" = "check_turf_vis_conts",
"SendMaps: Per client: Map data: Look for movable changes: Check HUD/image visual contents" = "check_hud/image_vis_contents",
"SendMaps: Per client: Map data: Look for movable changes: Loop through turfs in range" = "turfs_in_range",
"SendMaps: Per client: Map data: Look for movable changes: Movables examined" = "movables_examined",
)
/datum/controller/subsystem/time_track/Initialize()
//GLOB.perf_log = "[GLOB.log_directory]/perf-[GLOB.round_id ? GLOB.round_id : "NULL"]-[SSmapping.current_map.map_name].csv"
GLOB.perf_log = "[GLOB.log_directory]/perf-[GLOB.round_id ? GLOB.round_id : "NULL"]-[using_map.name].csv"
world.Profile(PROFILE_RESTART, type = "sendmaps")
//Need to do the sendmaps stuff in its own file, since it works different then everything else
var/list/sendmaps_headers = list()
for(var/proper_name in sendmaps_names_map)
sendmaps_headers += sendmaps_names_map[proper_name]
sendmaps_headers += "[sendmaps_names_map[proper_name]]_count"
log_perf(
list(
"time",
"players",
"tidi",
"tidi_fastavg",
"tidi_avg",
"tidi_slowavg",
"maptick",
"num_timers",
"air_turf_cost",
"air_eg_cost",
"air_highpressure_cost",
"air_hotspots_cost",
"air_superconductivity_cost",
"air_pipenets_cost",
"air_rebuilds_cost",
"air_turf_count",
"air_eg_count",
"air_hotspot_count",
"air_network_count",
"air_delta_count",
"air_superconductive_count",
"all_queries",
"queries_active",
"queries_standby"
) + sendmaps_headers
)
return SS_INIT_SUCCESS
/datum/controller/subsystem/time_track/fire()
@@ -30,11 +93,64 @@ SUBSYSTEM_DEF(time_track)
time_dilation_avg_fast = MC_AVERAGE_FAST(time_dilation_avg_fast, time_dilation_current)
time_dilation_avg = MC_AVERAGE(time_dilation_avg, time_dilation_avg_fast)
time_dilation_avg_slow = MC_AVERAGE_SLOW(time_dilation_avg_slow, time_dilation_avg)
log_game("TIDI: [time_dilation_current];[time_dilation_avg_fast];[time_dilation_avg];[time_dilation_avg_slow]")
//GLOB.glide_size_multiplier = (current_byondtime - last_tick_byond_time) / (current_realtime - last_tick_realtime)
else
first_run = FALSE
log_debug("TiDi Starting Log")
last_tick_realtime = current_realtime
last_tick_byond_time = current_byondtime
last_tick_tickcount = current_tickcount
var/sendmaps_json = world.Profile(PROFILE_REFRESH, type = "sendmaps", format="json")
var/list/send_maps_data = null
try
send_maps_data = json_decode(sendmaps_json)
catch
text2file(sendmaps_json,"bad_sendmaps.json")
can_fire = FALSE
return
var/send_maps_sort = send_maps_data.Copy() //Doing it like this guarantees us a properly sorted list
for(var/list/packet in send_maps_data)
send_maps_sort[packet["name"]] = packet
var/list/send_maps_values = list()
for(var/entry_name in sendmaps_names_map)
var/list/packet = send_maps_sort[entry_name]
if(!packet) //If the entry does not have a value for us, just put in 0 for both
send_maps_values += 0
send_maps_values += 0
continue
send_maps_values += packet["value"]
send_maps_values += packet["calls"]
//SSblackbox.record_feedback("associative", "time_dilation_current", 1, list("[ISOtime()]" = list("current" = "[time_dilation_current]", "avg_fast" = "[time_dilation_avg_fast]", "avg" = "[time_dilation_avg]", "avg_slow" = "[time_dilation_avg_slow]")))
log_perf(
list(
world.time,
length(GLOB.clients),
time_dilation_current,
time_dilation_avg_fast,
time_dilation_avg,
time_dilation_avg_slow,
MAPTICK_LAST_INTERNAL_TICK_USAGE,
length(SStimer.timer_id_dict),
SSair.cost_turfs,
//SSair.cost_groups,
//SSair.cost_highpressure,
SSair.cost_hotspots,
//SSair.cost_superconductivity,
//SSair.cost_pipenets,
//SSair.cost_rebuilds,
//length(SSair.active_turfs),
//length(SSair.excited_groups),
//length(SSair.hotspots),
//length(SSair.networks),
//length(SSair.high_pressure_delta),
//length(SSair.active_super_conductivity),
SSdbcore.all_queries_num,
SSdbcore.queries_active_num,
SSdbcore.queries_standby_num
) + send_maps_values
)
SSdbcore.reset_tracking()

View File

@@ -35,7 +35,7 @@ SUBSYSTEM_DEF(transcore)
for(var/t in subtypesof(/datum/transcore_db))
var/datum/transcore_db/db = new t()
if(!db.key)
warning("Instantiated transcore DB without a key: [t]")
WARNING("Instantiated transcore DB without a key: [t]")
continue
databases[db.key] = db
return SS_INIT_SUCCESS
@@ -105,7 +105,7 @@ SUBSYSTEM_DEF(transcore)
//Invalid record
if(!curr_MR)
log_debug("Tried to process [name] in transcore w/o a record!")
log_runtime("Tried to process [name] in transcore w/o a record!")
db.backed_up -= curr_MR.mindname
continue
@@ -147,7 +147,7 @@ SUBSYSTEM_DEF(transcore)
/datum/controller/subsystem/transcore/Recover()
for(var/key in SStranscore.databases)
if(!SStranscore.databases[key])
warning("SStranscore recovery found missing database value for key: [key]")
WARNING("SStranscore recovery found missing database value for key: [key]")
continue
if(key == "default")
default_db = SStranscore.databases[key]
@@ -156,10 +156,10 @@ SUBSYSTEM_DEF(transcore)
/datum/controller/subsystem/transcore/proc/leave_round(var/mob/M)
if(!istype(M))
warning("Non-mob asked to be removed from transcore: [M] [M?.type]")
WARNING("Non-mob asked to be removed from transcore: [M] [M?.type]")
return
if(!M.mind)
warning("No mind mob asked to be removed from transcore: [M] [M?.type]")
WARNING("No mind mob asked to be removed from transcore: [M] [M?.type]")
return
for(var/key in databases)
@@ -175,7 +175,7 @@ SUBSYSTEM_DEF(transcore)
if(isnull(key))
return default_db
if(!databases[key])
warning("Tried to find invalid transcore database: [key]")
WARNING("Tried to find invalid transcore database: [key]")
return default_db
return databases[key]
@@ -271,7 +271,6 @@ SUBSYSTEM_DEF(transcore)
ASSERT(MR)
backed_up[MR.mindname] = MR
backed_up = sortAssoc(backed_up)
log_debug("Added [MR.mindname] to transcore DB.")
// Remove a mind_record from the backup-checking list. Keeps track of it in has_left // Why do we do that? ~Leshana
/datum/transcore_db/proc/stop_backup(var/datum/transhuman/mind_record/MR)
@@ -279,7 +278,6 @@ SUBSYSTEM_DEF(transcore)
has_left[MR.mindname] = MR
backed_up.Remove("[MR.mindname]")
MR.cryo_at = world.time
log_debug("Put [MR.mindname] in transcore suspended DB.")
// Called from body_record to add itself to the transcore.
/datum/transcore_db/proc/add_body(var/datum/transhuman/body_record/BR)
@@ -288,13 +286,11 @@ SUBSYSTEM_DEF(transcore)
qdel(body_scans[BR.mydna.name])
body_scans[BR.mydna.name] = BR
body_scans = sortAssoc(body_scans)
log_debug("Added [BR.mydna.name] to transcore body DB.")
// Remove a body record from the database (Usually done when someone cryos) // Why? ~Leshana
/datum/transcore_db/proc/remove_body(var/datum/transhuman/body_record/BR)
ASSERT(BR)
body_scans.Remove("[BR.mydna.name]")
log_debug("Removed [BR.mydna.name] from transcore body DB.")
// Moves all mind records from the databaes into the disk and shuts down all backup canary processing.
/datum/transcore_db/proc/core_dump(var/obj/item/disk/transcore/disk)

View File

@@ -46,16 +46,16 @@ SUBSYSTEM_DEF(vote)
break
if(!players_are_in_round)
log_debug("The crew transfer shuttle would have been called at vote time due to no players being present.") //YW Edit
// init_shift_change(null, 1) //YW Edit
log_game("The crew transfer shuttle was automatically called at vote time due to no players being present.")
init_shift_change(null, 1)
return
initiate_vote(VOTE_CREW_TRANSFER, "the server", 1)
log_debug("The server has called a crew transfer vote.")
log_game("The server has called a crew transfer vote.")
/datum/controller/subsystem/vote/proc/autogamemode()
initiate_vote(VOTE_GAMEMODE, "the server", 1)
log_debug("The server has called a gamemode vote.")
log_game("The server has called a gamemode vote.")
/datum/controller/subsystem/vote/proc/reset()
initiator = null

View File

@@ -13,7 +13,7 @@ SUBSYSTEM_DEF(webhooks)
/datum/controller/subsystem/webhooks/proc/load_webhooks()
if(!fexists(HTTP_POST_DLL_LOCATION))
to_world_log("Unable to locate HTTP POST lib at [HTTP_POST_DLL_LOCATION], webhooks will not function on this run.")
log_world("Unable to locate HTTP POST lib at [HTTP_POST_DLL_LOCATION], webhooks will not function on this run.")
return
var/list/all_webhooks_by_id = list()
@@ -32,7 +32,7 @@ SUBSYSTEM_DEF(webhooks)
var/list/wmention = webhook_data["mentions"]
if(wmention && !islist(wmention))
wmention = list(wmention)
to_world_log("Setting up webhook [wid].")
log_world("Setting up webhook [wid].")
if(wid && wurl && all_webhooks_by_id[wid])
var/decl/webhook/webhook = all_webhooks_by_id[wid]
webhook.urls = islist(wurl) ? wurl : list(wurl)
@@ -44,19 +44,19 @@ SUBSYSTEM_DEF(webhooks)
if(wmention)
webhook.mentions = wmention?.Copy()
webhook_decls[wid] = webhook
to_world_log("Webhook [wid] ready.")
log_world("Webhook [wid] ready.")
else
to_world_log("Failed to set up webhook [wid].")
log_world("Failed to set up webhook [wid].")
/datum/controller/subsystem/webhooks/proc/send(var/wid, var/wdata)
var/decl/webhook/webhook = webhook_decls[wid]
if(webhook)
if(webhook.send(wdata))
to_world_log("Sent webhook [webhook.id].")
log_debug("Webhook sent: [webhook.id].")
log_world("Sent webhook [webhook.id].")
// to_chat(world, "Webhook sent: [webhook.id].")
else
to_world_log("Failed to send webhook [webhook.id].")
log_debug("Webhook failed to send: [webhook.id].")
log_world("Failed to send webhook [webhook.id].")
// to_chat(world, "Webhook failed to send: [webhook.id].")
/client/proc/reload_webhooks()
set name = "Reload Webhooks"
@@ -69,7 +69,7 @@ SUBSYSTEM_DEF(webhooks)
to_chat(usr, span_warning("Let the webhook subsystem initialize before trying to reload it."))
return
to_world_log("[usr.key] has reloaded webhooks.")
log_world("[usr.key] has reloaded webhooks.")
log_and_message_admins("has reloaded webhooks.")
SSwebhooks.load_webhooks()
@@ -88,7 +88,7 @@ SUBSYSTEM_DEF(webhooks)
if(choice && SSwebhooks.webhook_decls[choice])
var/decl/webhook/webhook = SSwebhooks.webhook_decls[choice]
log_and_message_admins("has pinged webhook [choice].", usr)
to_world_log("[usr.key] has pinged webhook [choice].")
log_world("[usr.key] has pinged webhook [choice].")
webhook.send()
/hook/roundstart/proc/run_webhook()