mirror of
https://github.com/yogstation13/Yogstation.git
synced 2025-02-26 09:04:50 +00:00
Ports Paradise error handler, with in game runtime viewer! (#24036)
* Ports paradise error handler, with in game runtime viewer! * Changes to the old runtime error and removes inerror reference * Oops * Adds a wrapper for world.log so it displays both in the runtime diary and in DD window
This commit is contained in:
@@ -346,7 +346,7 @@ var/global/list/ghost_others_options = list(GHOST_OTHERS_SIMPLE, GHOST_OTHERS_DE
|
||||
//debug printing macros
|
||||
#define debug_world(msg) if (Debug2) world << "DEBUG: [msg]"
|
||||
#define debug_admins(msg) if (Debug2) admins << "DEBUG: [msg]"
|
||||
#define debug_world_log(msg) if (Debug2) world.log << "DEBUG: [msg]"
|
||||
#define debug_world_log(msg) if (Debug2) log_world("DEBUG: [msg]")
|
||||
|
||||
#define COORD(A) "([A.x],[A.y],[A.z])"
|
||||
#define INCREMENT_TALLY(L, stat) if(L[stat]){L[stat]++}else{L[stat] = 1}
|
||||
@@ -409,3 +409,6 @@ var/global/list/ghost_others_options = list(GHOST_OTHERS_SIMPLE, GHOST_OTHERS_DE
|
||||
#define TURF_DECAL_PAINT "paint"
|
||||
#define TURF_DECAL_DAMAGE "damage"
|
||||
#define TURF_DECAL_DIRT "dirt"
|
||||
|
||||
//Error handler defines
|
||||
#define ERROR_USEFUL_LEN 2
|
||||
@@ -1,16 +1,18 @@
|
||||
//print a warning message to world.log
|
||||
#define WARNING(MSG) warning("[MSG] in [__FILE__] at line [__LINE__] src: [src] usr: [usr].")
|
||||
/proc/warning(msg)
|
||||
world.log << "## WARNING: [msg]"
|
||||
msg = "## WARNING: [msg]"
|
||||
log_world(msg)
|
||||
|
||||
//not an error or a warning, but worth to mention on the world log, just in case.
|
||||
#define NOTICE(MSG) notice(MSG)
|
||||
/proc/notice(msg)
|
||||
world.log << "## NOTICE: [msg]"
|
||||
msg = "## NOTICE: [msg]"
|
||||
log_world(msg)
|
||||
|
||||
//print a testing-mode debug message to world.log and world
|
||||
#ifdef TESTING
|
||||
#define testing(msg) world.log << "## TESTING: [msg]"; world << "## TESTING: [msg]"
|
||||
#define testing(msg) log_world("## TESTING: [msg]"); world << "## TESTING: [msg]"
|
||||
#else
|
||||
#define testing(msg)
|
||||
#endif
|
||||
@@ -80,3 +82,30 @@
|
||||
/proc/log_chat(text)
|
||||
if (config.log_pda)
|
||||
diary << "\[[time_stamp()]]CHAT: [text]"
|
||||
|
||||
//This replaces world.log so it displays both in DD and the file
|
||||
/proc/log_world(text)
|
||||
if(config && config.log_runtimes)
|
||||
world.log = runtime_diary
|
||||
world.log << text
|
||||
world.log = null
|
||||
world.log << text
|
||||
|
||||
// Helper procs for building detailed log lines
|
||||
|
||||
/proc/datum_info_line(datum/D)
|
||||
if(!istype(D))
|
||||
return
|
||||
if(!istype(D, /mob))
|
||||
return "[D] ([D.type])"
|
||||
var/mob/M = D
|
||||
return "[M] ([M.ckey]) ([M.type])"
|
||||
|
||||
/proc/atom_loc_line(atom/A)
|
||||
if(!istype(A))
|
||||
return
|
||||
var/turf/T = get_turf(A)
|
||||
if(istype(T))
|
||||
return "[A.loc] [COORD(T)] ([A.loc.type])"
|
||||
else if(A.loc)
|
||||
return "[A.loc] (0, 0, 0) ([A.loc.type])"
|
||||
@@ -5,7 +5,7 @@
|
||||
var/DBQuery/query = dbcon.NewQuery("SELECT DATEDIFF(Now(),'[y]-[m]-[d]')")
|
||||
|
||||
if(!query.Execute())
|
||||
world.log << "SQL ERROR doing datediff. Error : \[[query.ErrorMsg()]\]\n"
|
||||
log_world("SQL ERROR doing datediff. Error : \[[query.ErrorMsg()]\]\n")
|
||||
return FALSE
|
||||
|
||||
if(query.NextRow())
|
||||
@@ -24,7 +24,7 @@
|
||||
/client/proc/findJoinDate()
|
||||
var/http[] = world.Export("http://byond.com/members/[src.ckey]?format=text")
|
||||
if(!http)
|
||||
world.log << "Failed to connect to byond age check for [src.ckey]"
|
||||
log_world("Failed to connect to byond age check for [src.ckey]")
|
||||
return FALSE
|
||||
|
||||
var/F = file2text(http["CONTENT"])
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
var/diary = null
|
||||
var/runtime_diary = null
|
||||
var/diaryofmeanpeople = null
|
||||
var/href_logfile = null
|
||||
|
||||
|
||||
@@ -42,6 +42,7 @@
|
||||
var/log_hrefs = 0 // log all links clicked in-game. Could be used for debugging and tracking down exploits
|
||||
var/log_twitter = 0 // log certain expliotable parrots and other such fun things in a JSON file of twitter valid phrases.
|
||||
var/log_world_topic = 0 // log all world.Topic() calls
|
||||
var/log_runtimes = FALSE // log runtimes into a file
|
||||
var/sql_enabled = 0 // for sql switching
|
||||
var/allow_admin_ooccolor = 0 // Allows admins with relevant permissions to have their own ooc colour
|
||||
var/allow_vote_restart = 0 // allow votes to restart
|
||||
@@ -241,6 +242,11 @@
|
||||
var/minutetopiclimit
|
||||
var/secondtopiclimit
|
||||
|
||||
var/error_cooldown = 600 // The "cooldown" time for each occurrence of a unique error
|
||||
var/error_limit = 50 // How many occurrences before the next will silence them
|
||||
var/error_silence_time = 6000 // How long a unique error will be silenced for
|
||||
var/error_msg_delay = 50 // How long to wait between messaging admins about occurrences of a unique error
|
||||
|
||||
/datum/configuration/New()
|
||||
gamemode_cache = typecacheof(/datum/game_mode,TRUE)
|
||||
for(var/T in gamemode_cache)
|
||||
@@ -459,10 +465,11 @@
|
||||
if("aggressive_changelog")
|
||||
config.aggressive_changelog = 1
|
||||
if("log_runtimes")
|
||||
log_runtimes = TRUE
|
||||
var/newlog = file("data/logs/runtimes/runtime-[time2text(world.realtime, "YYYY-MM-DD")].log")
|
||||
if (world.log != newlog)
|
||||
if(runtime_diary != newlog)
|
||||
world.log << "Now logging runtimes to data/logs/runtimes/runtime-[time2text(world.realtime, "YYYY-MM-DD")].log"
|
||||
world.log = newlog
|
||||
runtime_diary = newlog
|
||||
if("autoconvert_notes")
|
||||
config.autoconvert_notes = 1
|
||||
if("allow_webclient")
|
||||
@@ -495,6 +502,14 @@
|
||||
config.minutetopiclimit = text2num(value)
|
||||
if("second_topic_limit")
|
||||
config.secondtopiclimit = text2num(value)
|
||||
if("error_cooldown")
|
||||
error_cooldown = text2num(value)
|
||||
if("error_limit")
|
||||
error_limit = text2num(value)
|
||||
if("error_silence_time")
|
||||
error_silence_time = text2num(value)
|
||||
if("error_msg_delay")
|
||||
error_msg_delay = text2num(value)
|
||||
else
|
||||
diary << "Unknown setting in configuration: '[name]'"
|
||||
|
||||
|
||||
@@ -104,7 +104,7 @@ var/CURRENT_TICKLIMIT = TICK_LIMIT_RUNNING
|
||||
msg += "\t [varname] = [D]([D.type])\n"
|
||||
else
|
||||
msg += "\t [varname] = [varval]\n"
|
||||
world.log << msg
|
||||
log_world(msg)
|
||||
if (istype(Master.subsystems))
|
||||
subsystems = Master.subsystems
|
||||
StartProcessing(10)
|
||||
@@ -139,7 +139,7 @@ var/CURRENT_TICKLIMIT = TICK_LIMIT_RUNNING
|
||||
CURRENT_TICKLIMIT = TICK_LIMIT_RUNNING
|
||||
|
||||
world << "<span class='boldannounce'>Initializations complete!</span>"
|
||||
world.log << "Initializations complete."
|
||||
log_world("Initializations complete.")
|
||||
|
||||
// Sort subsystems by display setting for easy access.
|
||||
sortTim(subsystems, /proc/cmp_subsystem_display)
|
||||
@@ -267,7 +267,7 @@ var/CURRENT_TICKLIMIT = TICK_LIMIT_RUNNING
|
||||
subsystems_to_check = tickersubsystems
|
||||
if (CheckQueue(subsystems_to_check) <= 0)
|
||||
if (!SoftReset(tickersubsystems, normalsubsystems, lobbysubsystems))
|
||||
world.log << "MC: SoftReset() failed, crashing"
|
||||
log_world("MC: SoftReset() failed, crashing")
|
||||
return
|
||||
if (!error_level)
|
||||
iteration++
|
||||
@@ -279,7 +279,7 @@ var/CURRENT_TICKLIMIT = TICK_LIMIT_RUNNING
|
||||
if (queue_head)
|
||||
if (RunQueue() <= 0)
|
||||
if (!SoftReset(tickersubsystems, normalsubsystems, lobbysubsystems))
|
||||
world.log << "MC: SoftReset() failed, crashing"
|
||||
log_world("MC: SoftReset() failed, crashing")
|
||||
return
|
||||
if (!error_level)
|
||||
iteration++
|
||||
@@ -451,9 +451,9 @@ var/CURRENT_TICKLIMIT = TICK_LIMIT_RUNNING
|
||||
// called if any mc's queue procs runtime or exit improperly.
|
||||
/datum/controller/master/proc/SoftReset(list/ticker_SS, list/normal_SS, list/lobby_SS)
|
||||
. = 0
|
||||
world.log << "MC: SoftReset called, resetting MC queue state."
|
||||
log_world("MC: SoftReset called, resetting MC queue state.")
|
||||
if (!istype(subsystems) || !istype(ticker_SS) || !istype(normal_SS) || !istype(lobby_SS))
|
||||
world.log << "MC: SoftReset: Bad list contents: '[subsystems]' '[ticker_SS]' '[normal_SS]' '[lobby_SS]' Crashing!"
|
||||
log_world("MC: SoftReset: Bad list contents: '[subsystems]' '[ticker_SS]' '[normal_SS]' '[lobby_SS]' Crashing!")
|
||||
return
|
||||
var/subsystemstocheck = subsystems + ticker_SS + normal_SS + lobby_SS
|
||||
|
||||
@@ -465,26 +465,26 @@ var/CURRENT_TICKLIMIT = TICK_LIMIT_RUNNING
|
||||
ticker_SS -= list(SS)
|
||||
normal_SS -= list(SS)
|
||||
lobby_SS -= list(SS)
|
||||
world.log << "MC: SoftReset: Found bad entry in subsystem list, '[SS]'"
|
||||
log_world("MC: SoftReset: Found bad entry in subsystem list, '[SS]'")
|
||||
continue
|
||||
if (SS.queue_next && !istype(SS.queue_next))
|
||||
world.log << "MC: SoftReset: Found bad data in subsystem queue, queue_next = '[SS.queue_next]'"
|
||||
log_world("MC: SoftReset: Found bad data in subsystem queue, queue_next = '[SS.queue_next]'")
|
||||
SS.queue_next = null
|
||||
if (SS.queue_prev && !istype(SS.queue_prev))
|
||||
world.log << "MC: SoftReset: Found bad data in subsystem queue, queue_prev = '[SS.queue_prev]'"
|
||||
log_world("MC: SoftReset: Found bad data in subsystem queue, queue_prev = '[SS.queue_prev]'")
|
||||
SS.queue_prev = null
|
||||
SS.queued_priority = 0
|
||||
SS.queued_time = 0
|
||||
SS.state = SS_IDLE
|
||||
if (queue_head && !istype(queue_head))
|
||||
world.log << "MC: SoftReset: Found bad data in subsystem queue, queue_head = '[queue_head]'"
|
||||
log_world("MC: SoftReset: Found bad data in subsystem queue, queue_head = '[queue_head]'")
|
||||
queue_head = null
|
||||
if (queue_tail && !istype(queue_tail))
|
||||
world.log << "MC: SoftReset: Found bad data in subsystem queue, queue_tail = '[queue_tail]'"
|
||||
log_world("MC: SoftReset: Found bad data in subsystem queue, queue_tail = '[queue_tail]'")
|
||||
queue_tail = null
|
||||
queue_priority_count = 0
|
||||
queue_priority_count_bg = 0
|
||||
world.log << "MC: SoftReset: Finished."
|
||||
log_world("MC: SoftReset: Finished.")
|
||||
. = 1
|
||||
|
||||
|
||||
|
||||
@@ -157,7 +157,7 @@
|
||||
var/time = (world.timeofday - start_timeofday) / 10
|
||||
var/msg = "Initialized [name] subsystem within [time] seconds!"
|
||||
world << "<span class='boldannounce'>[msg]</span>"
|
||||
world.log << msg
|
||||
log_world(msg)
|
||||
return time
|
||||
|
||||
//hook for printing stats to the "MC" statuspanel for admins to see performance and related stats etc.
|
||||
|
||||
@@ -347,7 +347,6 @@ var/datum/subsystem/garbage_collector/SSgarbage
|
||||
//update this list using tools/DMTreeToGlobalsList
|
||||
/datum/proc/find_references_in_globals()
|
||||
SearchVar(last_irc_status)
|
||||
SearchVar(inerror)
|
||||
SearchVar(failed_db_connections)
|
||||
SearchVar(nextmap)
|
||||
SearchVar(mapchanging)
|
||||
|
||||
@@ -374,10 +374,10 @@ var/datum/subsystem/job/SSjob
|
||||
S = sloc
|
||||
break
|
||||
if(!S) //if there isn't a spawnpoint send them to latejoin, if there's no latejoin go yell at your mapper
|
||||
world.log << "Couldn't find a round start spawn point for [rank]"
|
||||
log_world("Couldn't find a round start spawn point for [rank]")
|
||||
S = pick(latejoin)
|
||||
if(!S) //final attempt, lets find some area in the arrivals shuttle to spawn them in to.
|
||||
world.log << "Couldn't find a round start latejoin spawn point."
|
||||
log_world("Couldn't find a round start latejoin spawn point.")
|
||||
for(var/turf/T in get_area_turfs(/area/shuttle/arrival))
|
||||
if(!T.density)
|
||||
var/clear = 1
|
||||
|
||||
@@ -116,4 +116,4 @@ var/datum/subsystem/lighting/SSlighting
|
||||
varval1 = "/list([length(varval1)])"
|
||||
varval2 = "/list([length(varval2)])"
|
||||
msg += "\t [varname] = [varval1] -> [varval2]\n"
|
||||
world.log << msg
|
||||
log_world(msg)
|
||||
|
||||
@@ -45,7 +45,7 @@ var/datum/subsystem/minimap/SSminimap
|
||||
for(var/z in z_levels)
|
||||
if(!fexists(file(map_path(z,backup)))) //Let's make sure we have a file for this map
|
||||
if(backup)
|
||||
world.log << "Failed to find backup file for map [MAP_NAME] on zlevel [z]."
|
||||
log_world("Failed to find backup file for map [MAP_NAME] on zlevel [z].")
|
||||
return FALSE
|
||||
return TRUE
|
||||
|
||||
|
||||
@@ -550,7 +550,7 @@ var/datum/subsystem/ticker/ticker
|
||||
for(var/path in SSgarbage.didntgc)
|
||||
dellog += "Path : [path] \n"
|
||||
dellog += "Failures : [SSgarbage.didntgc[path]] \n"
|
||||
world.log << dellog
|
||||
log_world(dellog)
|
||||
|
||||
CHECK_TICK
|
||||
|
||||
|
||||
@@ -627,7 +627,7 @@
|
||||
if (prompt != "Yes")
|
||||
return
|
||||
L.Cut(index, index+1)
|
||||
world.log << "### ListVarEdit by [src]: /list's contents: REMOVED=[html_encode("[variable]")]"
|
||||
log_world("### ListVarEdit by [src]: /list's contents: REMOVED=[html_encode("[variable]")]")
|
||||
log_admin("[key_name(src)] modified list's contents: REMOVED=[variable]")
|
||||
message_admins("[key_name_admin(src)] modified list's contents: REMOVED=[variable]")
|
||||
|
||||
@@ -646,7 +646,7 @@
|
||||
return
|
||||
|
||||
uniqueList_inplace(L)
|
||||
world.log << "### ListVarEdit by [src]: /list contents: CLEAR DUPES"
|
||||
log_world("### ListVarEdit by [src]: /list contents: CLEAR DUPES")
|
||||
log_admin("[key_name(src)] modified list's contents: CLEAR DUPES")
|
||||
message_admins("[key_name_admin(src)] modified list's contents: CLEAR DUPES")
|
||||
|
||||
@@ -657,7 +657,7 @@
|
||||
return
|
||||
|
||||
listclearnulls(L)
|
||||
world.log << "### ListVarEdit by [src]: /list contents: CLEAR NULLS"
|
||||
log_world("### ListVarEdit by [src]: /list contents: CLEAR NULLS")
|
||||
log_admin("[key_name(src)] modified list's contents: CLEAR NULLS")
|
||||
message_admins("[key_name_admin(src)] modified list's contents: CLEAR NULLS")
|
||||
|
||||
@@ -671,7 +671,7 @@
|
||||
return
|
||||
|
||||
L.len = value["value"]
|
||||
world.log << "### ListVarEdit by [src]: /list len: [L.len]"
|
||||
log_world("### ListVarEdit by [src]: /list len: [L.len]")
|
||||
log_admin("[key_name(src)] modified list's len: [L.len]")
|
||||
message_admins("[key_name_admin(src)] modified list's len: [L.len]")
|
||||
|
||||
@@ -682,7 +682,7 @@
|
||||
return
|
||||
|
||||
shuffle_inplace(L)
|
||||
world.log << "### ListVarEdit by [src]: /list contents: SHUFFLE"
|
||||
log_world("### ListVarEdit by [src]: /list contents: SHUFFLE")
|
||||
log_admin("[key_name(src)] modified list's contents: SHUFFLE")
|
||||
message_admins("[key_name_admin(src)] modified list's contents: SHUFFLE")
|
||||
|
||||
|
||||
@@ -19,17 +19,17 @@ var/global/datum/getrev/revdata = new()
|
||||
parentcommit = head_log.group[1]
|
||||
date = unix2date(text2num(head_log.group[2]))
|
||||
commit = head_log.group[4]
|
||||
world.log << "Running /tg/ revision:"
|
||||
world.log << "[date]"
|
||||
log_world("Running /tg/ revision:")
|
||||
log_world("[date]")
|
||||
if(testmerge.len)
|
||||
world.log << commit
|
||||
log_world(commit)
|
||||
for(var/line in testmerge)
|
||||
if(line)
|
||||
world.log << "Test merge active of PR #[line]"
|
||||
world.log << "Based off master commit [parentcommit]"
|
||||
log_world("Test merge active of PR #[line]")
|
||||
log_world("Based off master commit [parentcommit]")
|
||||
else
|
||||
world.log << parentcommit
|
||||
world.log << "Current map - [MAP_NAME]" //can't think of anywhere better to put it
|
||||
log_world(parentcommit)
|
||||
log_world("Current map - [MAP_NAME]") //can't think of anywhere better to put it
|
||||
|
||||
/client/verb/showrevinfo()
|
||||
set category = "OOC"
|
||||
|
||||
@@ -75,7 +75,7 @@
|
||||
. = file(mappath)
|
||||
|
||||
if(!.)
|
||||
world.log << "The file of [src] appears to be empty/non-existent."
|
||||
log_world("The file of [src] appears to be empty/non-existent.")
|
||||
|
||||
/datum/map_template/proc/get_affected_turfs(turf/T, centered = FALSE)
|
||||
var/turf/placement = T
|
||||
|
||||
@@ -179,7 +179,7 @@ var/explosionid = 1
|
||||
var/took = (world.timeofday-start)/10
|
||||
//You need to press the DebugGame verb to see these now....they were getting annoying and we've collected a fair bit of data. Just -test- changes to explosion code using this please so we can compare
|
||||
if(Debug2)
|
||||
world.log << "## DEBUG: Explosion([x0],[y0],[z0])(d[devastation_range],h[heavy_impact_range],l[light_impact_range]): Took [took] seconds."
|
||||
log_world("## DEBUG: Explosion([x0],[y0],[z0])(d[devastation_range],h[heavy_impact_range],l[light_impact_range]): Took [took] seconds.")
|
||||
|
||||
//Machines which report explosions.
|
||||
for(var/array in doppler_arrays)
|
||||
|
||||
@@ -62,7 +62,7 @@
|
||||
var/ckeytext = ckey(key)
|
||||
|
||||
if(!establish_db_connection())
|
||||
world.log << "Ban database connection failure. Key [ckeytext] not checked"
|
||||
log_world("Ban database connection failure. Key [ckeytext] not checked")
|
||||
diary << "Ban database connection failure. Key [ckeytext] not checked"
|
||||
return
|
||||
|
||||
|
||||
@@ -129,7 +129,7 @@ var/list/admin_ranks = list() //list of all admin_rank datums
|
||||
else
|
||||
establish_db_connection()
|
||||
if(!dbcon.IsConnected())
|
||||
world.log << "Failed to connect to database in load_admin_ranks(). Reverting to legacy system."
|
||||
log_world("Failed to connect to database in load_admin_ranks(). Reverting to legacy system.")
|
||||
diary << "Failed to connect to database in load_admin_ranks(). Reverting to legacy system."
|
||||
config.admin_legacy_system = 1
|
||||
load_admin_ranks()
|
||||
@@ -204,7 +204,7 @@ var/list/admin_ranks = list() //list of all admin_rank datums
|
||||
else
|
||||
establish_db_connection()
|
||||
if(!dbcon.IsConnected())
|
||||
world.log << "Failed to connect to database in load_admins(). Reverting to legacy system."
|
||||
log_world("Failed to connect to database in load_admins(). Reverting to legacy system.")
|
||||
diary << "Failed to connect to database in load_admins(). Reverting to legacy system."
|
||||
config.admin_legacy_system = 1
|
||||
load_admins()
|
||||
|
||||
@@ -156,7 +156,8 @@ var/list/admin_verbs_debug = list(
|
||||
/client/proc/map_template_upload,
|
||||
/client/proc/jump_to_ruin,
|
||||
/client/proc/clear_dynamic_transit,
|
||||
/client/proc/toggle_medal_disable
|
||||
/client/proc/toggle_medal_disable,
|
||||
/client/proc/view_runtimes
|
||||
)
|
||||
var/list/admin_verbs_possess = list(
|
||||
/proc/possess,
|
||||
|
||||
@@ -2246,3 +2246,14 @@
|
||||
message_admins("[key_name(usr)] created \"[G.name]\" station goal.")
|
||||
ticker.mode.station_goals += G
|
||||
modify_goals()
|
||||
|
||||
else if(href_list["viewruntime"])
|
||||
var/datum/error_viewer/error_viewer = locate(href_list["viewruntime"])
|
||||
if(!istype(error_viewer))
|
||||
usr << "<span class='warning'>That runtime viewer no longer exists.</span>"
|
||||
return
|
||||
|
||||
if(href_list["viewruntime_backto"])
|
||||
error_viewer.show_to(owner, locate(href_list["viewruntime_backto"]), href_list["viewruntime_linear"])
|
||||
else
|
||||
error_viewer.show_to(owner, null, href_list["viewruntime_linear"])
|
||||
|
||||
@@ -759,3 +759,13 @@ var/list/TYPES_SHORTCUTS = list(
|
||||
message_admins("<span class='adminnotice'>[key_name_admin(src)] [global.medals_enabled ? "disabled" : "enabled"] the medal hub lockout.</span>")
|
||||
feedback_add_details("admin_verb","TMH") // If...
|
||||
log_admin("[key_name(src)] [global.medals_enabled ? "disabled" : "enabled"] the medal hub lockout.")
|
||||
|
||||
/client/proc/view_runtimes()
|
||||
set category = "Debug"
|
||||
set name = "View Runtimes"
|
||||
set desc = "Open the runtime Viewer"
|
||||
|
||||
if(!holder)
|
||||
return
|
||||
|
||||
error_cache.show_to(src)
|
||||
@@ -193,7 +193,7 @@
|
||||
if (rejected)
|
||||
src << "[rejected] out of [count] objects rejected your edit"
|
||||
|
||||
world.log << "### MassVarEdit by [src]: [O.type] (A/R [accepted]/[rejected]) [variable]=[html_encode("[O.vars[variable]]")]([list2params(value)])"
|
||||
log_world("### MassVarEdit by [src]: [O.type] (A/R [accepted]/[rejected]) [variable]=[html_encode("[O.vars[variable]]")]([list2params(value)])")
|
||||
log_admin("[key_name(src)] mass modified [original_name]'s [variable] to [O.vars[variable]] ([accepted] objects modified)")
|
||||
message_admins("[key_name_admin(src)] mass modified [original_name]'s [variable] to [O.vars[variable]] ([accepted] objects modified)")
|
||||
|
||||
|
||||
@@ -337,7 +337,7 @@ var/list/VVpixelmovement = list("step_x", "step_y", "bound_height", "bound_width
|
||||
if (O.vv_edit_var(objectvar, L) == FALSE)
|
||||
src << "Your edit was rejected by the object."
|
||||
return
|
||||
world.log << "### ListVarEdit by [src]: [(O ? O.type : "/list")] [objectvar]: ADDED=[var_value]"
|
||||
log_world("### ListVarEdit by [src]: [(O ? O.type : "/list")] [objectvar]: ADDED=[var_value]")
|
||||
log_admin("[key_name(src)] modified [original_name]'s [objectvar]: ADDED=[var_value]")
|
||||
message_admins("[key_name_admin(src)] modified [original_name]'s [objectvar]: ADDED=[var_value]")
|
||||
|
||||
@@ -380,7 +380,7 @@ var/list/VVpixelmovement = list("step_x", "step_y", "bound_height", "bound_width
|
||||
if (!O.vv_edit_var(objectvar, L))
|
||||
src << "Your edit was rejected by the object."
|
||||
return
|
||||
world.log << "### ListVarEdit by [src]: [O.type] [objectvar]: CLEAR NULLS"
|
||||
log_world("### ListVarEdit by [src]: [O.type] [objectvar]: CLEAR NULLS")
|
||||
log_admin("[key_name(src)] modified [original_name]'s [objectvar]: CLEAR NULLS")
|
||||
message_admins("[key_name_admin(src)] modified [original_name]'s list [objectvar]: CLEAR NULLS")
|
||||
return
|
||||
@@ -390,7 +390,7 @@ var/list/VVpixelmovement = list("step_x", "step_y", "bound_height", "bound_width
|
||||
if (!O.vv_edit_var(objectvar, L))
|
||||
src << "Your edit was rejected by the object."
|
||||
return
|
||||
world.log << "### ListVarEdit by [src]: [O.type] [objectvar]: CLEAR DUPES"
|
||||
log_world("### ListVarEdit by [src]: [O.type] [objectvar]: CLEAR DUPES")
|
||||
log_admin("[key_name(src)] modified [original_name]'s [objectvar]: CLEAR DUPES")
|
||||
message_admins("[key_name_admin(src)] modified [original_name]'s list [objectvar]: CLEAR DUPES")
|
||||
return
|
||||
@@ -400,7 +400,7 @@ var/list/VVpixelmovement = list("step_x", "step_y", "bound_height", "bound_width
|
||||
if (!O.vv_edit_var(objectvar, L))
|
||||
src << "Your edit was rejected by the object."
|
||||
return
|
||||
world.log << "### ListVarEdit by [src]: [O.type] [objectvar]: SHUFFLE"
|
||||
log_world("### ListVarEdit by [src]: [O.type] [objectvar]: SHUFFLE")
|
||||
log_admin("[key_name(src)] modified [original_name]'s [objectvar]: SHUFFLE")
|
||||
message_admins("[key_name_admin(src)] modified [original_name]'s list [objectvar]: SHUFFLE")
|
||||
return
|
||||
@@ -477,7 +477,7 @@ var/list/VVpixelmovement = list("step_x", "step_y", "bound_height", "bound_width
|
||||
if (O.vv_edit_var(objectvar, L))
|
||||
src << "Your edit was rejected by the object."
|
||||
return
|
||||
world.log << "### ListVarEdit by [src]: [O.type] [objectvar]: REMOVED=[html_encode("[original_var]")]"
|
||||
log_world("### ListVarEdit by [src]: [O.type] [objectvar]: REMOVED=[html_encode("[original_var]")]")
|
||||
log_admin("[key_name(src)] modified [original_name]'s [objectvar]: REMOVED=[original_var]")
|
||||
message_admins("[key_name_admin(src)] modified [original_name]'s [objectvar]: REMOVED=[original_var]")
|
||||
return
|
||||
@@ -496,7 +496,7 @@ var/list/VVpixelmovement = list("step_x", "step_y", "bound_height", "bound_width
|
||||
if (O.vv_edit_var(objectvar, L) == FALSE)
|
||||
src << "Your edit was rejected by the object."
|
||||
return
|
||||
world.log << "### ListVarEdit by [src]: [(O ? O.type : "/list")] [objectvar]: [original_var]=[new_var]"
|
||||
log_world("### ListVarEdit by [src]: [(O ? O.type : "/list")] [objectvar]: [original_var]=[new_var]")
|
||||
log_admin("[key_name(src)] modified [original_name]'s [objectvar]: [original_var]=[new_var]")
|
||||
message_admins("[key_name_admin(src)] modified [original_name]'s varlist [objectvar]: [original_var]=[new_var]")
|
||||
|
||||
@@ -605,6 +605,6 @@ var/list/VVpixelmovement = list("step_x", "step_y", "bound_height", "bound_width
|
||||
if (O.vv_edit_var(variable, var_new) == FALSE)
|
||||
src << "Your edit was rejected by the object."
|
||||
return
|
||||
world.log << "### VarEdit by [src]: [O.type] [variable]=[html_encode("[O.vars[variable]]")]"
|
||||
log_world("### VarEdit by [src]: [O.type] [variable]=[html_encode("[O.vars[variable]]")]")
|
||||
log_admin("[key_name(src)] modified [original_name]'s [variable] to [O.vars[variable]]")
|
||||
message_admins("[key_name_admin(src)] modified [original_name]'s [variable] to [O.vars[variable]]")
|
||||
|
||||
@@ -586,7 +586,7 @@ var/swapmaps_byname
|
||||
else if(swapmaps_mode!=SWAPMAPS_TEXT && fexists("map_[template_id].txt"))
|
||||
text=1
|
||||
else
|
||||
world.log << "SwapMaps error in SwapMaps_CreateFromTemplate(): map_[template_id] file not found."
|
||||
log_world("SwapMaps error in SwapMaps_CreateFromTemplate(): map_[template_id] file not found.")
|
||||
return
|
||||
if(text)
|
||||
S=new
|
||||
@@ -613,7 +613,7 @@ var/swapmaps_byname
|
||||
else if(swapmaps_mode!=SWAPMAPS_TEXT && fexists("map_[chunk_id].txt"))
|
||||
text=1
|
||||
else
|
||||
world.log << "SwapMaps error in SwapMaps_LoadChunk(): map_[chunk_id] file not found."
|
||||
log_world("SwapMaps error in SwapMaps_LoadChunk(): map_[chunk_id] file not found.")
|
||||
return
|
||||
if(text)
|
||||
S=new
|
||||
@@ -631,9 +631,11 @@ var/swapmaps_byname
|
||||
|
||||
/proc/SwapMaps_SaveChunk(chunk_id,turf/corner1,turf/corner2)
|
||||
if(!corner1 || !corner2)
|
||||
world.log << "SwapMaps error in SwapMaps_SaveChunk():"
|
||||
if(!corner1) world.log << " corner1 turf is null"
|
||||
if(!corner2) world.log << " corner2 turf is null"
|
||||
log_world("SwapMaps error in SwapMaps_SaveChunk():")
|
||||
if(!corner1)
|
||||
log_world(" corner1 turf is null")
|
||||
if(!corner2)
|
||||
log_world(" corner2 turf is null")
|
||||
return
|
||||
var/swapmap/M=new
|
||||
M.id=chunk_id
|
||||
@@ -660,7 +662,7 @@ var/swapmaps_byname
|
||||
else if(swapmaps_mode!=SWAPMAPS_TEXT && fexists("map_[id].txt"))
|
||||
text=1
|
||||
else
|
||||
world.log << "SwapMaps error in SwapMaps_GetSize(): map_[id] file not found."
|
||||
log_world("SwapMaps error in SwapMaps_GetSize(): map_[id] file not found.")
|
||||
return
|
||||
if(text)
|
||||
S=new
|
||||
|
||||
@@ -18,7 +18,7 @@ var/global/list/potentialRandomZlevels = generateMapList(filename = "config/away
|
||||
maploader.load_map(file)
|
||||
smooth_zlevel(world.maxz)
|
||||
SortAreas()
|
||||
world.log << "loaded [file] as z-level [world.maxz]"
|
||||
log_world("loaded [file] as z-level [world.maxz]")
|
||||
|
||||
/proc/reset_gateway_spawns(reset = FALSE)
|
||||
for(var/obj/machinery/gateway/G in world)
|
||||
@@ -92,7 +92,7 @@ var/global/list/potentialRandomZlevels = generateMapList(filename = "config/away
|
||||
if(ruins && ruins.len)
|
||||
ruin = ruins[pick(ruins)]
|
||||
else
|
||||
world.log << "Ruin loader had no ruins to pick from with [budget] left to spend."
|
||||
log_world("Ruin loader had no ruins to pick from with [budget] left to spend.")
|
||||
break
|
||||
// Can we afford it
|
||||
if(ruin.cost > budget)
|
||||
@@ -119,7 +119,7 @@ var/global/list/potentialRandomZlevels = generateMapList(filename = "config/away
|
||||
if(!valid)
|
||||
continue
|
||||
|
||||
world.log << "Ruin \"[ruin.name]\" placed at ([T.x], [T.y], [T.z])"
|
||||
log_world("Ruin \"[ruin.name]\" placed at ([T.x], [T.y], [T.z])")
|
||||
|
||||
var/obj/effect/ruin_loader/R = new /obj/effect/ruin_loader(T)
|
||||
R.Load(ruins,ruin)
|
||||
@@ -129,7 +129,7 @@ var/global/list/potentialRandomZlevels = generateMapList(filename = "config/away
|
||||
break
|
||||
|
||||
if(!overall_sanity)
|
||||
world.log << "Ruin loader gave up with [budget] left to spend."
|
||||
log_world("Ruin loader gave up with [budget] left to spend.")
|
||||
|
||||
|
||||
/obj/effect/ruin_loader
|
||||
|
||||
114
code/modules/error_handler/error_handler.dm
Normal file
114
code/modules/error_handler/error_handler.dm
Normal file
@@ -0,0 +1,114 @@
|
||||
var/global/list/error_last_seen = list()
|
||||
var/global/list/error_cooldown = list() /* Error_cooldown items will either be positive(cooldown time) or negative(silenced error)
|
||||
If negative, starts at -1, and goes down by 1 each time that error gets skipped*/
|
||||
var/global/total_runtimes = 0
|
||||
var/global/total_runtimes_skipped = 0
|
||||
|
||||
#ifdef DEBUG
|
||||
/world/Error(exception/E, datum/e_src)
|
||||
if(!istype(E)) //Something threw an unusual exception
|
||||
log_world("\[[time_stamp()]] Uncaught exception: [E]")
|
||||
return ..()
|
||||
if(!error_last_seen) // A runtime is occurring too early in start-up initialization
|
||||
return ..()
|
||||
|
||||
total_runtimes++
|
||||
|
||||
var/erroruid = "[E.file][E.line]"
|
||||
var/last_seen = error_last_seen[erroruid]
|
||||
var/cooldown = error_cooldown[erroruid] || 0
|
||||
|
||||
if(last_seen == null)
|
||||
error_last_seen[erroruid] = world.time
|
||||
last_seen = world.time
|
||||
|
||||
if(cooldown < 0)
|
||||
error_cooldown[erroruid]-- //Used to keep track of skip count for this error
|
||||
total_runtimes_skipped++
|
||||
return //Error is currently silenced, skip handling it
|
||||
//Handle cooldowns and silencing spammy errors
|
||||
var/silencing = FALSE
|
||||
|
||||
// We can runtime before config is initialized because BYOND initialize objs/map before a bunch of other stuff happens.
|
||||
// This is a bunch of workaround code for that. Hooray!
|
||||
|
||||
var/configured_error_cooldown = initial(config.error_cooldown)
|
||||
var/configured_error_limit = initial(config.error_limit)
|
||||
var/configured_error_silence_time = initial(config.error_silence_time)
|
||||
if(config)
|
||||
configured_error_cooldown = config.error_cooldown
|
||||
configured_error_limit = config.error_limit
|
||||
configured_error_silence_time = config.error_silence_time
|
||||
|
||||
|
||||
//Each occurence of an unique error adds to its cooldown time...
|
||||
cooldown = max(0, cooldown - (world.time - last_seen)) + configured_error_cooldown
|
||||
// ... which is used to silence an error if it occurs too often, too fast
|
||||
if(cooldown > configured_error_cooldown * configured_error_limit)
|
||||
cooldown = -1
|
||||
silencing = TRUE
|
||||
spawn(0)
|
||||
usr = null
|
||||
sleep(configured_error_silence_time)
|
||||
var/skipcount = abs(error_cooldown[erroruid]) - 1
|
||||
error_cooldown[erroruid] = 0
|
||||
if(skipcount > 0)
|
||||
world.log << "\[[time_stamp()]] Skipped [skipcount] runtimes in [E.file],[E.line]."
|
||||
error_cache.log_error(E, skip_count = skipcount)
|
||||
|
||||
error_last_seen[erroruid] = world.time
|
||||
error_cooldown[erroruid] = cooldown
|
||||
|
||||
var/list/usrinfo = null
|
||||
var/locinfo
|
||||
if(istype(usr))
|
||||
usrinfo = list(" usr: [datum_info_line(usr)]")
|
||||
locinfo = atom_loc_line(usr)
|
||||
if(locinfo)
|
||||
usrinfo += " usr.loc: [locinfo]"
|
||||
// The proceeding mess will almost definitely break if error messages are ever changed
|
||||
var/list/splitlines = splittext(E.desc, "\n")
|
||||
var/list/desclines = list()
|
||||
if(LAZYLEN(splitlines) > ERROR_USEFUL_LEN) // If there aren't at least three lines, there's no info
|
||||
for(var/line in splitlines)
|
||||
if(LAZYLEN(line) < 3 || findtext(line, "source file:") || findtext(line, "usr.loc:"))
|
||||
continue
|
||||
if(findtext(line, "usr:"))
|
||||
if(usrinfo)
|
||||
desclines.Add(usrinfo)
|
||||
usrinfo = null
|
||||
continue // Our usr info is better, replace it
|
||||
|
||||
if(copytext(line, 1, 3) != " ")
|
||||
desclines += (" " + line) // Pad any unpadded lines, so they look pretty
|
||||
else
|
||||
desclines += line
|
||||
if(usrinfo) //If this info isn't null, it hasn't been added yet
|
||||
desclines.Add(usrinfo)
|
||||
if(silencing)
|
||||
desclines += " (This error will now be silenced for [configured_error_silence_time / 600] minutes)"
|
||||
if(error_cache)
|
||||
error_cache.log_error(E, desclines)
|
||||
|
||||
world.log << "\[[time_stamp()]] Runtime in [E.file],[E.line]: [E]"
|
||||
for(var/line in desclines)
|
||||
world.log << line
|
||||
|
||||
/* This logs the runtime in the old format */
|
||||
|
||||
E.name = "\n\[[time2text(world.timeofday,"hh:mm:ss")]\][E.name]"
|
||||
|
||||
//Original
|
||||
//
|
||||
var/list/split = splittext(E.desc, "\n")
|
||||
for (var/i in 1 to split.len)
|
||||
if (split[i] != "")
|
||||
split[i] = "\[[time2text(world.timeofday,"hh:mm:ss")]\][split[i]]"
|
||||
E.desc = jointext(split, "\n")
|
||||
if(config && config.log_runtimes)
|
||||
world.log = runtime_diary
|
||||
..(E)
|
||||
|
||||
world.log = null
|
||||
|
||||
#endif
|
||||
194
code/modules/error_handler/error_viewer.dm
Normal file
194
code/modules/error_handler/error_viewer.dm
Normal file
@@ -0,0 +1,194 @@
|
||||
// Error viewing datums, responsible for storing error info, notifying admins
|
||||
// when errors occur, and showing them to admins on demand.
|
||||
|
||||
// There are 3 different types used here:
|
||||
//
|
||||
// - error_cache keeps track of all error sources, as well as all individually
|
||||
// logged errors. Only one instance of this datum should ever exist, and it's
|
||||
// right here:
|
||||
|
||||
#ifdef DEBUG
|
||||
/var/datum/error_viewer/error_cache/error_cache = new()
|
||||
#else
|
||||
// If debugging is disabled, there's nothing useful to log, so don't bother.
|
||||
/var/datum/error_viewer/error_cache/error_cache = null
|
||||
#endif
|
||||
|
||||
// - error_source datums exist for each line (of code) that generates an error,
|
||||
// and keep track of all errors generated by that line.
|
||||
//
|
||||
// - error_entry datums exist for each logged error, and keep track of all
|
||||
// relevant info about that error.
|
||||
|
||||
// Common vars and procs are kept at the error_viewer level
|
||||
/datum/error_viewer
|
||||
var/name = ""
|
||||
|
||||
/datum/error_viewer/proc/browse_to(client/user, html)
|
||||
var/datum/browser/browser = new(user.mob, "error_viewer", null, 600, 400)
|
||||
browser.set_content(html)
|
||||
browser.add_head_content({"
|
||||
<style>
|
||||
.runtime
|
||||
{
|
||||
background-color: #171717;
|
||||
border: solid 1px #202020;
|
||||
font-family: "Courier New";
|
||||
padding-left: 10px;
|
||||
color: #CCCCCC;
|
||||
}
|
||||
.runtime_line
|
||||
{
|
||||
margin-bottom: 10px;
|
||||
display: inline-block;
|
||||
}
|
||||
</style>
|
||||
"})
|
||||
browser.open()
|
||||
|
||||
/datum/error_viewer/proc/build_header(datum/error_viewer/back_to, linear)
|
||||
// Common starter HTML for show_to
|
||||
|
||||
. = ""
|
||||
|
||||
if (istype(back_to))
|
||||
. += back_to.make_link("<b><<<</b>", null, linear)
|
||||
|
||||
. += "[make_link("Refresh")]<br><br>"
|
||||
|
||||
/datum/error_viewer/proc/show_to(user, datum/error_viewer/back_to, linear)
|
||||
// Specific to each child type
|
||||
return
|
||||
|
||||
/datum/error_viewer/proc/make_link(linktext, datum/error_viewer/back_to, linear)
|
||||
var/back_to_param = ""
|
||||
if (!linktext)
|
||||
linktext = name
|
||||
|
||||
if (istype(back_to))
|
||||
back_to_param = ";viewruntime_backto=\ref[back_to]"
|
||||
|
||||
if (linear)
|
||||
back_to_param += ";viewruntime_linear=1"
|
||||
|
||||
return "<a href='?_src_=holder;viewruntime=\ref[src][back_to_param]'>[linktext]</a>"
|
||||
|
||||
/datum/error_viewer/error_cache
|
||||
var/list/errors = list()
|
||||
var/list/error_sources = list()
|
||||
var/list/errors_silenced = list()
|
||||
|
||||
/datum/error_viewer/error_cache/show_to(user, datum/error_viewer/back_to, linear)
|
||||
var/html = build_header()
|
||||
html += "<b>[global.total_runtimes]</b> runtimes, <b>[global.total_runtimes_skipped]</b> skipped<br><br>"
|
||||
if (!linear)
|
||||
html += "organized | [make_link("linear", null, 1)]<hr>"
|
||||
var/datum/error_viewer/error_source/error_source
|
||||
for (var/erroruid in error_sources)
|
||||
error_source = error_sources[erroruid]
|
||||
html += "[error_source.make_link(null, src)]<br>"
|
||||
|
||||
else
|
||||
html += "[make_link("organized", null)] | linear<hr>"
|
||||
for (var/datum/error_viewer/error_entry/error_entry in errors)
|
||||
html += "[error_entry.make_link(null, src, 1)]<br>"
|
||||
|
||||
browse_to(user, html)
|
||||
|
||||
/datum/error_viewer/error_cache/proc/log_error(exception/e, list/desclines, skip_count)
|
||||
if (!istype(e))
|
||||
return // Abnormal exception, don't even bother
|
||||
|
||||
var/erroruid = "[e.file][e.line]"
|
||||
var/datum/error_viewer/error_source/error_source = error_sources[erroruid]
|
||||
if (!error_source)
|
||||
error_source = new(e)
|
||||
error_sources[erroruid] = error_source
|
||||
|
||||
var/datum/error_viewer/error_entry/error_entry = new(e, desclines, skip_count)
|
||||
error_entry.error_source = error_source
|
||||
errors += error_entry
|
||||
error_source.errors += error_entry
|
||||
if (skip_count)
|
||||
return // Skip notifying admins about skipped errors.
|
||||
|
||||
// Show the error to admins with debug messages turned on, but only if one
|
||||
// from the same source hasn't been shown too recently
|
||||
if (error_source.next_message_at <= world.time)
|
||||
var/const/viewtext = "\[view]" // Nesting these in other brackets went poorly
|
||||
//log_debug("Runtime in <b>[e.file]</b>, line <b>[e.line]</b>: <b>[html_encode(e.name)]</b> [error_entry.make_link(viewtext)]")
|
||||
var/err_msg_delay
|
||||
if(config)
|
||||
err_msg_delay = config.error_msg_delay
|
||||
else
|
||||
err_msg_delay = initial(config.error_msg_delay)
|
||||
error_source.next_message_at = world.time + err_msg_delay
|
||||
|
||||
/datum/error_viewer/error_source
|
||||
var/list/errors = list()
|
||||
var/next_message_at = 0
|
||||
|
||||
/datum/error_viewer/error_source/New(exception/e)
|
||||
if (!istype(e))
|
||||
name = "\[[time_stamp()]] Uncaught exceptions"
|
||||
return
|
||||
|
||||
name = "<b>\[[time_stamp()]]</b> Runtime in <b>[e.file]</b>, line <b>[e.line]</b>: <b>[html_encode(e.name)]</b>"
|
||||
|
||||
/datum/error_viewer/error_source/show_to(user, datum/error_viewer/back_to, linear)
|
||||
if (!istype(back_to))
|
||||
back_to = error_cache
|
||||
|
||||
var/html = build_header(back_to)
|
||||
for (var/datum/error_viewer/error_entry/error_entry in errors)
|
||||
html += "[error_entry.make_link(null, src)]<br>"
|
||||
|
||||
browse_to(user, html)
|
||||
|
||||
/datum/error_viewer/error_entry
|
||||
var/datum/error_viewer/error_source/error_source
|
||||
var/exception/exc
|
||||
var/desc = ""
|
||||
var/usr_ref
|
||||
var/turf/usr_loc
|
||||
var/is_skip_count
|
||||
|
||||
/datum/error_viewer/error_entry/New(exception/e, list/desclines, skip_count)
|
||||
if (!istype(e))
|
||||
name = "<b>\[[time_stamp()]]</b> Uncaught exception: <b>[html_encode(e.name)]</b>"
|
||||
return
|
||||
|
||||
if(skip_count)
|
||||
name = "\[[time_stamp()]] Skipped [skip_count] runtimes in [e.file],[e.line]."
|
||||
is_skip_count = TRUE
|
||||
return
|
||||
|
||||
name = "<b>\[[time_stamp()]]</b> Runtime in <b>[e.file]</b>, line <b>[e.line]</b>: <b>[html_encode(e.name)]</b>"
|
||||
exc = e
|
||||
if (istype(desclines))
|
||||
for (var/line in desclines)
|
||||
// There's probably a better way to do this than non-breaking spaces...
|
||||
desc += "<span class='runtime_line'>[html_encode(line)]</span><br>"
|
||||
|
||||
if (usr)
|
||||
usr_ref = "\ref[usr]"
|
||||
usr_loc = get_turf(usr)
|
||||
|
||||
/datum/error_viewer/error_entry/show_to(user, datum/error_viewer/back_to, linear)
|
||||
if (!istype(back_to))
|
||||
back_to = error_source
|
||||
|
||||
var/html = build_header(back_to, linear)
|
||||
html += "[name]<div class='runtime'>[desc]</div>"
|
||||
if (usr_ref)
|
||||
html += "<br><b>usr</b>: <a href='?_src_=vars;Vars=[usr_ref]'>VV</a>"
|
||||
html += " <a href='?_src_=holder;adminplayeropts=[usr_ref]'>PP</a>"
|
||||
html += " <a href='?_src_=holder;adminplayerobservefollow=[usr_ref]'>Follow</a>"
|
||||
if (istype(usr_loc))
|
||||
html += "<br><b>usr.loc</b>: <a href='?_src_=vars;Vars=\ref[usr_loc]'>VV</a>"
|
||||
html += " <a href='?_src_=holder;adminplayerobservecoodjump=1;X=[usr_loc.x];Y=[usr_loc.y];Z=[usr_loc.z]'>JMP</a>"
|
||||
|
||||
browse_to(user, html)
|
||||
|
||||
/datum/error_viewer/error_entry/make_link(linktext, datum/error_viewer/back_to, linear)
|
||||
return is_skip_count ? name : ..()
|
||||
@@ -33,7 +33,7 @@
|
||||
if(areasToOpen && areasToOpen.len > 0)
|
||||
priority_announce("Gr3y.T1d3 virus detected in [station_name()] door subroutines. Severity level of [severity]. Recommend station AI involvement.", "Security Alert")
|
||||
else
|
||||
world.log << "ERROR: Could not initate grey-tide. No areas in the list!"
|
||||
log_world("ERROR: Could not initate grey-tide. No areas in the list!")
|
||||
kill()
|
||||
|
||||
|
||||
|
||||
@@ -54,9 +54,9 @@
|
||||
// the following is necessary for power reasons
|
||||
var/area/AS = get_area(src)
|
||||
if(istype(AS,/area/holodeck))
|
||||
world.log << "### MAPPING ERROR"
|
||||
world.log << "Holodeck computer cannot be in a holodeck."
|
||||
world.log << "This would cause circular power dependency."
|
||||
log_world("### MAPPING ERROR")
|
||||
log_world("Holodeck computer cannot be in a holodeck.")
|
||||
log_world("This would cause circular power dependency.")
|
||||
qdel(src) // todo handle constructed computers
|
||||
return //l-lewd...
|
||||
else
|
||||
|
||||
@@ -264,8 +264,7 @@
|
||||
found++
|
||||
if(found > 1)
|
||||
qdel(P, force=TRUE)
|
||||
world.log << "Map warning: Shuttle Template [S.mappath] \
|
||||
has multiple mobile docking ports."
|
||||
log_world("Map warning: Shuttle Template [S.mappath] has multiple mobile docking ports.")
|
||||
else if(!M.timid)
|
||||
// The shuttle template we loaded isn't "timid" which means
|
||||
// it's already registered with the shuttles subsystem.
|
||||
@@ -277,11 +276,9 @@
|
||||
else
|
||||
preview_shuttle = P
|
||||
if(istype(P, /obj/docking_port/stationary))
|
||||
world.log << "Map warning: Shuttle Template [S.mappath] has a \
|
||||
stationary docking port."
|
||||
log_world("Map warning: Shuttle Template [S.mappath] has a stationary docking port.")
|
||||
if(!found)
|
||||
var/msg = "load_template(): Shuttle Template [S.mappath] has no \
|
||||
mobile docking port. Aborting import."
|
||||
var/msg = "load_template(): Shuttle Template [S.mappath] has no mobile docking port. Aborting import."
|
||||
for(var/T in affected)
|
||||
var/turf/T0 = T
|
||||
T0.empty()
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
var/list/map_transition_config = MAP_TRANSITION_CONFIG
|
||||
|
||||
/world/New()
|
||||
world.log << "World loaded at [world.timeofday]"
|
||||
log_world("World loaded at [world.timeofday]")
|
||||
map_ready = 1
|
||||
|
||||
#if (PRELOAD_RSC == 0)
|
||||
@@ -51,9 +51,9 @@ var/list/map_transition_config = MAP_TRANSITION_CONFIG
|
||||
|
||||
if(config.sql_enabled)
|
||||
if(!setup_database_connection())
|
||||
world.log << "Your server failed to establish a connection with the database."
|
||||
log_world("Your server failed to establish a connection with the database.")
|
||||
else
|
||||
world.log << "Database connection established."
|
||||
log_world("Database connection established.")
|
||||
|
||||
|
||||
data_core = new /datum/datacore()
|
||||
@@ -184,7 +184,7 @@ var/last_irc_status = 0
|
||||
else
|
||||
return ircadminwho()
|
||||
|
||||
#define WORLD_REBOOT(X) world.log << "World rebooted at [world.timeofday]"; ..(X)
|
||||
#define WORLD_REBOOT(X) log_world("World rebooted at [world.timeofday]"); ..(X)
|
||||
/world/Reboot(var/reason, var/feedback_c, var/feedback_r, var/time)
|
||||
if (reason == 1) //special reboot, do none of the normal stuff
|
||||
if (usr)
|
||||
@@ -270,26 +270,6 @@ var/last_irc_status = 0
|
||||
)
|
||||
world << sound(round_end_sound)
|
||||
|
||||
var/inerror = 0
|
||||
/world/Error(var/exception/e)
|
||||
//runtime while processing runtimes
|
||||
if (inerror)
|
||||
inerror = 0
|
||||
return ..(e)
|
||||
inerror = 1
|
||||
//newline at start is because of the "runtime error" byond prints that can't be timestamped.
|
||||
e.name = "\n\[[time2text(world.timeofday,"hh:mm:ss")]\][e.name]"
|
||||
|
||||
//this is done this way rather then replace text to pave the way for processing the runtime reports more thoroughly
|
||||
// (and because runtimes end with a newline, and we don't want to basically print an empty time stamp)
|
||||
var/list/split = splittext(e.desc, "\n")
|
||||
for (var/i in 1 to split.len)
|
||||
if (split[i] != "")
|
||||
split[i] = "\[[time2text(world.timeofday,"hh:mm:ss")]\][split[i]]"
|
||||
e.desc = jointext(split, "\n")
|
||||
inerror = 0
|
||||
return ..(e)
|
||||
|
||||
/world/proc/load_mode()
|
||||
var/list/Lines = file2list("data/mode.txt")
|
||||
if(Lines.len)
|
||||
@@ -397,7 +377,7 @@ var/failed_db_connections = 0
|
||||
else
|
||||
failed_db_connections++ //If it failed, increase the failed connections counter.
|
||||
if(config.sql_enabled)
|
||||
world.log << "SQL error: " + dbcon.ErrorMsg()
|
||||
log_world("SQL error: " + dbcon.ErrorMsg())
|
||||
|
||||
return .
|
||||
|
||||
|
||||
@@ -310,3 +310,12 @@ SECOND_TOPIC_LIMIT 10
|
||||
|
||||
MINUTE_TOPIC_LIMIT 100
|
||||
|
||||
##Error handling related options
|
||||
## The "cooldown" time for each occurence of an unique error
|
||||
#ERROR_COOLDOWN 600
|
||||
## How many occurences before the next will silence them
|
||||
#ERROR_LIMIT 90
|
||||
## How long an unique error will be silenced for
|
||||
#ERROR_SILENCE_TIME 6000
|
||||
##How long to wait between messaging admins about occurences of an unique error
|
||||
#ERROR_MSG_DELAY 50
|
||||
@@ -1161,6 +1161,8 @@
|
||||
#include "code\modules\detectivework\footprints_and_rag.dm"
|
||||
#include "code\modules\detectivework\scanner.dm"
|
||||
#include "code\modules\emoji\emoji_parse.dm"
|
||||
#include "code\modules\error_handler\error_handler.dm"
|
||||
#include "code\modules\error_handler\error_viewer.dm"
|
||||
#include "code\modules\events\_event.dm"
|
||||
#include "code\modules\events\abductor.dm"
|
||||
#include "code\modules\events\alien_infestation.dm"
|
||||
|
||||
Reference in New Issue
Block a user