Merge remote-tracking branch 'citadel/master' into combat_v7

This commit is contained in:
silicons
2021-03-15 14:39:16 -07:00
870 changed files with 32914 additions and 23755 deletions

View File

@@ -415,7 +415,7 @@ Example config:
while(recent_round)
adjustment += repeated_mode_adjust[recent_round]
recent_round = SSpersistence.saved_modes.Find(name,recent_round+1,0)
probability *= ((100-adjustment)/100)
probability *= max(0,((100-adjustment)/100))
runnable_storytellers[S] = probability
return runnable_storytellers
@@ -425,6 +425,7 @@ Example config:
var/list/min_pop = Get(/datum/config_entry/keyed_list/min_pop)
var/list/max_pop = Get(/datum/config_entry/keyed_list/max_pop)
var/list/repeated_mode_adjust = Get(/datum/config_entry/number_list/repeated_mode_adjust)
var/desired_chaos_level = 9 - SSpersistence.get_recent_chaos()
for(var/T in gamemode_cache)
var/datum/game_mode/M = new T()
if(!(M.config_tag in modes))
@@ -448,7 +449,18 @@ Example config:
while(recent_round)
adjustment += repeated_mode_adjust[recent_round]
recent_round = SSpersistence.saved_modes.Find(M.config_tag,recent_round+1,0)
final_weight *= ((100-adjustment)/100)
final_weight *= max(0,((100-adjustment)/100))
if(Get(/datum/config_entry/flag/weigh_by_recent_chaos))
var/chaos_level = M.get_chaos()
var/exponent = Get(/datum/config_entry/number/chaos_exponent)
var/delta = chaos_level - desired_chaos_level
if(desired_chaos_level > 5)
delta = abs(min(delta, 0))
else if(desired_chaos_level < 5)
delta = max(delta, 0)
else
delta = abs(delta)
final_weight /= (delta + 1) ** exponent
runnable_modes[M] = final_weight
return runnable_modes

View File

@@ -22,11 +22,10 @@
/datum/config_entry/string/cross_comms_name
/datum/config_entry/string/medal_hub_address
/datum/config_entry/string/medal_hub_password
protection = CONFIG_ENTRY_HIDDEN
/datum/config_entry/string/cross_comms_network
protection = CONFIG_ENTRY_LOCKED
/// cit config
/datum/config_entry/keyed_list/cross_server_bunker_override
key_mode = KEY_MODE_TEXT
value_mode = VALUE_MODE_TEXT

View File

@@ -7,6 +7,13 @@
/datum/config_entry/keyed_list/probability/ValidateListEntry(key_name)
return key_name in config.modes
/datum/config_entry/keyed_list/chaos_level
key_mode = KEY_MODE_TEXT
value_mode = VALUE_MODE_NUM
/datum/config_entry/keyed_list/chaos_level/ValidateListEntry(key_name)
return key_name in config.modes
/datum/config_entry/keyed_list/max_pop
key_mode = KEY_MODE_TEXT
value_mode = VALUE_MODE_NUM
@@ -605,3 +612,8 @@
/// Dirtyness multiplier for making turfs dirty
/datum/config_entry/number/turf_dirty_multiplier
config_entry_value = 1
/datum/config_entry/flag/weigh_by_recent_chaos
/datum/config_entry/number/chaos_exponent
config_entry_value = 1

View File

@@ -275,6 +275,11 @@
/datum/config_entry/flag/panic_bunker // prevents people the server hasn't seen before from connecting
/datum/config_entry/number/panic_bunker_living // living time in minutes that a player needs to pass the panic bunker
/datum/config_entry/string/panic_bunker_message
config_entry_value = "Sorry but the server is currently not accepting connections from never before seen players."
/datum/config_entry/number/notify_new_player_age // how long do we notify admins of a new player
min_val = -1

View File

@@ -0,0 +1,74 @@
SUBSYSTEM_DEF(achievements)
name = "Achievements"
flags = SS_NO_FIRE
init_order = INIT_ORDER_ACHIEVEMENTS
var/achievements_enabled = FALSE
///List of achievements
var/list/datum/award/achievement/achievements = list()
///List of scores
var/list/datum/award/score/scores = list()
///List of all awards
var/list/datum/award/awards = list()
/datum/controller/subsystem/achievements/Initialize(timeofday)
if(!SSdbcore.Connect())
return ..()
achievements_enabled = TRUE
for(var/T in subtypesof(/datum/award/achievement))
var/instance = new T
achievements[T] = instance
awards[T] = instance
for(var/T in subtypesof(/datum/award/score))
var/instance = new T
scores[T] = instance
awards[T] = instance
update_metadata()
for(var/i in GLOB.clients)
var/client/C = i
if(!C.player_details.achievements.initialized)
C.player_details.achievements.InitializeData()
return ..()
/datum/controller/subsystem/achievements/Shutdown()
save_achievements_to_db()
/datum/controller/subsystem/achievements/proc/save_achievements_to_db()
var/list/cheevos_to_save = list()
for(var/ckey in GLOB.player_details)
var/datum/player_details/PD = GLOB.player_details[ckey]
if(!PD || !PD.achievements)
continue
cheevos_to_save += PD.achievements.get_changed_data()
if(!length(cheevos_to_save))
return
SSdbcore.MassInsert(format_table_name("achievements"),cheevos_to_save,duplicate_key = TRUE)
//Update the metadata if any are behind
/datum/controller/subsystem/achievements/proc/update_metadata()
var/list/current_metadata = list()
//select metadata here
var/datum/db_query/Q = SSdbcore.NewQuery("SELECT achievement_key,achievement_version FROM [format_table_name("achievement_metadata")]")
if(!Q.Execute(async = TRUE))
qdel(Q)
return
else
while(Q.NextRow())
current_metadata[Q.item[1]] = text2num(Q.item[2])
qdel(Q)
var/list/to_update = list()
for(var/T in awards)
var/datum/award/A = awards[T]
if(!A.database_id)
continue
if(!current_metadata[A.database_id] || current_metadata[A.database_id] < A.achievement_version)
to_update += list(A.get_metadata_row())
if(to_update.len)
SSdbcore.MassInsert(format_table_name("achievement_metadata"),to_update,duplicate_key = TRUE)

View File

@@ -5,10 +5,10 @@ SUBSYSTEM_DEF(blackbox)
runlevels = RUNLEVEL_GAME | RUNLEVEL_POSTGAME
init_order = INIT_ORDER_BLACKBOX
var/list/feedback = list() //list of datum/feedback_variable
var/list/feedback = list() //list of datum/feedback_variable
var/list/first_death = list() //the first death of this round, assoc. vars keep track of different things
var/triggertime = 0
var/sealed = FALSE //time to stop tracking stats?
var/sealed = FALSE //time to stop tracking stats?
var/list/versions = list("antagonists" = 3,
"admin_secrets_fun_used" = 2,
"explosion" = 2,
@@ -28,12 +28,12 @@ SUBSYSTEM_DEF(blackbox)
//poll population
/datum/controller/subsystem/blackbox/fire()
set waitfor = FALSE //for population query
set waitfor = FALSE //for population query
CheckPlayerCount()
if(CONFIG_GET(flag/use_exp_tracking))
if((triggertime < 0) || (world.time > (triggertime +3000))) //subsystem fires once at roundstart then once every 10 minutes. a 5 min check skips the first fire. The <0 is midnight rollover check
if((triggertime < 0) || (world.time > (triggertime +3000))) //subsystem fires once at roundstart then once every 10 minutes. a 5 min check skips the first fire. The <0 is midnight rollover check
update_exp(10,FALSE)
/datum/controller/subsystem/blackbox/proc/CheckPlayerCount()
@@ -43,7 +43,17 @@ SUBSYSTEM_DEF(blackbox)
return
var/playercount = LAZYLEN(GLOB.player_list)
var/admincount = GLOB.admins.len
var/datum/DBQuery/query_record_playercount = SSdbcore.NewQuery("INSERT INTO [format_table_name("legacy_population")] (playercount, admincount, time, server_ip, server_port, round_id) VALUES ([playercount], [admincount], '[SQLtime()]', INET_ATON(IF('[world.internet_address]' LIKE '', '0', '[world.internet_address]')), '[world.port]', '[GLOB.round_id]')")
var/datum/db_query/query_record_playercount = SSdbcore.NewQuery({"
INSERT INTO [format_table_name("legacy_population")] (playercount, admincount, time, server_ip, server_port, round_id)
VALUES (:playercount, :admincount, :time, INET_ATON(:server_ip), :server_port, :round_id)
"}, list(
"playercount" = playercount,
"admincount" = admincount,
"time" = SQLtime(),
"server_ip" = world.internet_address || "0",
"server_port" = "[world.port]",
"round_id" = GLOB.round_id,
))
query_record_playercount.Execute()
qdel(query_record_playercount)
@@ -87,24 +97,23 @@ SUBSYSTEM_DEF(blackbox)
if (!SSdbcore.Connect())
return
// var/list/special_columns = list(
// "datetime" = "NOW()"
// )
var/list/special_columns = list(
"datetime" = "NOW()"
)
var/list/sqlrowlist = list()
for (var/datum/feedback_variable/FV in feedback)
sqlrowlist += list(list(
"datetime" = "Now()", //legacy
"round_id" = GLOB.round_id,
"key_name" = sanitizeSQL(FV.key),
"key_name" = FV.key,
"key_type" = FV.key_type,
"version" = versions[FV.key] || 1,
"json" = sanitizeSQL(json_encode(FV.json))
"json" = json_encode(FV.json)
))
if (!length(sqlrowlist))
return
SSdbcore.MassInsert(format_table_name("feedback"), sqlrowlist, ignore_errors = TRUE, delayed = TRUE)//, special_columns = special_columns)
SSdbcore.MassInsert(format_table_name("feedback"), sqlrowlist, ignore_errors = TRUE, delayed = TRUE, special_columns = special_columns)
/datum/controller/subsystem/blackbox/proc/Seal()
if(sealed)
@@ -162,13 +171,13 @@ feedback data can be recorded in 5 formats:
used for simple single-string records i.e. the current map
further calls to the same key will append saved data unless the overwrite argument is true or it already exists
when encoded calls made with overwrite will lack square brackets
calls: SSblackbox.record_feedback("text", "example", 1, "sample text")
calls: SSblackbox.record_feedback("text", "example", 1, "sample text")
SSblackbox.record_feedback("text", "example", 1, "other text")
json: {"data":["sample text","other text"]}
"amount"
used to record simple counts of data i.e. the number of ahelps received
further calls to the same key will add or subtract (if increment argument is a negative) from the saved amount
calls: SSblackbox.record_feedback("amount", "example", 8)
calls: SSblackbox.record_feedback("amount", "example", 8)
SSblackbox.record_feedback("amount", "example", 2)
json: {"data":10}
"tally"
@@ -176,7 +185,7 @@ feedback data can be recorded in 5 formats:
further calls to the same key will:
add or subtract from the saved value of the data key if it already exists
append the key and it's value if it doesn't exist
calls: SSblackbox.record_feedback("tally", "example", 1, "sample data")
calls: SSblackbox.record_feedback("tally", "example", 1, "sample data")
SSblackbox.record_feedback("tally", "example", 4, "sample data")
SSblackbox.record_feedback("tally", "example", 2, "other data")
json: {"data":{"sample data":5,"other data":2}}
@@ -188,19 +197,19 @@ feedback data can be recorded in 5 formats:
further calls to the same key will:
add or subtract from the saved value of the data key if it already exists in the same multi-dimensional position
append the key and it's value if it doesn't exist
calls: SSblackbox.record_feedback("nested tally", "example", 1, list("fruit", "orange", "apricot"))
calls: SSblackbox.record_feedback("nested tally", "example", 1, list("fruit", "orange", "apricot"))
SSblackbox.record_feedback("nested tally", "example", 2, list("fruit", "orange", "orange"))
SSblackbox.record_feedback("nested tally", "example", 3, list("fruit", "orange", "apricot"))
SSblackbox.record_feedback("nested tally", "example", 10, list("fruit", "red", "apple"))
SSblackbox.record_feedback("nested tally", "example", 1, list("vegetable", "orange", "carrot"))
json: {"data":{"fruit":{"orange":{"apricot":4,"orange":2},"red":{"apple":10}},"vegetable":{"orange":{"carrot":1}}}}
tracking values associated with a number can't merge with a nesting value, trying to do so will append the list
call: SSblackbox.record_feedback("nested tally", "example", 3, list("fruit", "orange"))
call: SSblackbox.record_feedback("nested tally", "example", 3, list("fruit", "orange"))
json: {"data":{"fruit":{"orange":{"apricot":4,"orange":2},"red":{"apple":10},"orange":3},"vegetable":{"orange":{"carrot":1}}}}
"associative"
used to record text that's associated with a value i.e. coordinates
further calls to the same key will append a new list to existing data
calls: SSblackbox.record_feedback("associative", "example", 1, list("text" = "example", "path" = /obj/item, "number" = 4))
calls: SSblackbox.record_feedback("associative", "example", 1, list("text" = "example", "path" = /obj/item, "number" = 4))
SSblackbox.record_feedback("associative", "example", 1, list("number" = 7, "text" = "example", "other text" = "sample"))
json: {"data":{"1":{"text":"example","path":"/obj/item","number":"4"},"2":{"number":"7","text":"example","other text":"sample"}}}
@@ -275,7 +284,7 @@ Versioning
/datum/feedback_variable/New(new_key, new_key_type)
key = new_key
key_type = new_key_type
/*
/datum/controller/subsystem/blackbox/proc/LogAhelp(ticket, action, message, recipient, sender)
if(!SSdbcore.Connect())
return
@@ -286,7 +295,7 @@ Versioning
"}, list("ticket" = ticket, "action" = action, "message" = message, "recipient" = recipient, "sender" = sender, "server_ip" = world.internet_address || "0", "server_port" = world.port, "round_id" = GLOB.round_id, "time" = SQLtime()))
query_log_ahelp.Execute()
qdel(query_log_ahelp)
*/
/datum/controller/subsystem/blackbox/proc/ReportDeath(mob/living/L)
set waitfor = FALSE
@@ -302,51 +311,39 @@ Versioning
first_death["area"] = "[AREACOORD(L)]"
first_death["damage"] = "<font color='#FF5555'>[L.getBruteLoss()]</font>/<font color='orange'>[L.getFireLoss()]</font>/<font color='lightgreen'>[L.getToxLoss()]</font>/<font color='lightblue'>[L.getOxyLoss()]</font>/<font color='pink'>[L.getCloneLoss()]</font>"
first_death["last_words"] = L.last_words
var/sqlname = L.real_name
var/sqlkey = L.ckey
var/sqljob = L.mind.assigned_role
var/sqlspecial = L.mind.special_role
var/sqlpod = get_area_name(L, TRUE)
var/laname = L.lastattacker
var/lakey = L.lastattackerckey
var/sqlbrute = L.getBruteLoss()
var/sqlfire = L.getFireLoss()
var/sqlbrain = L.getOrganLoss(ORGAN_SLOT_BRAIN)
var/sqloxy = L.getOxyLoss()
var/sqltox = L.getToxLoss()
var/sqlclone = L.getCloneLoss()
var/sqlstamina = L.getStaminaLoss()
var/x_coord = L.x
var/y_coord = L.y
var/z_coord = L.z
var/last_words = L.last_words
var/suicide = L.suiciding
var/map = SSmapping.config.map_name
if(!SSdbcore.Connect())
return
sqlname = sanitizeSQL(sqlname)
sqlkey = sanitizeSQL(sqlkey)
sqljob = sanitizeSQL(sqljob)
sqlspecial = sanitizeSQL(sqlspecial)
sqlpod = sanitizeSQL(sqlpod)
laname = sanitizeSQL(laname)
lakey = sanitizeSQL(lakey)
sqlbrute = sanitizeSQL(sqlbrute)
sqlfire = sanitizeSQL(sqlfire)
sqlbrain = sanitizeSQL(sqlbrain)
sqloxy = sanitizeSQL(sqloxy)
sqltox = sanitizeSQL(sqltox)
sqlclone = sanitizeSQL(sqlclone)
sqlstamina = sanitizeSQL(sqlstamina)
x_coord = sanitizeSQL(x_coord)
y_coord = sanitizeSQL(y_coord)
z_coord = sanitizeSQL(z_coord)
last_words = sanitizeSQL(last_words)
suicide = sanitizeSQL(suicide)
map = sanitizeSQL(map)
var/datum/DBQuery/query_report_death = SSdbcore.NewQuery("INSERT INTO [format_table_name("death")] (pod, x_coord, y_coord, z_coord, mapname, server_ip, server_port, round_id, tod, job, special, name, byondkey, laname, lakey, bruteloss, fireloss, brainloss, oxyloss, toxloss, cloneloss, staminaloss, last_words, suicide) VALUES ('[sqlpod]', '[x_coord]', '[y_coord]', '[z_coord]', '[map]', INET_ATON(IF('[world.internet_address]' LIKE '', '0', '[world.internet_address]')), '[world.port]', [GLOB.round_id], '[SQLtime()]', '[sqljob]', '[sqlspecial]', '[sqlname]', '[sqlkey]', '[laname]', '[lakey]', [sqlbrute], [sqlfire], [sqlbrain], [sqloxy], [sqltox], [sqlclone], [sqlstamina], '[last_words]', [suicide])")
var/datum/db_query/query_report_death = SSdbcore.NewQuery({"
INSERT INTO [format_table_name("death")] (pod, x_coord, y_coord, z_coord, mapname, server_ip, server_port, round_id, tod, job, special, name, byondkey, laname, lakey, bruteloss, fireloss, brainloss, oxyloss, toxloss, cloneloss, staminaloss, last_words, suicide)
VALUES (:pod, :x_coord, :y_coord, :z_coord, :map, INET_ATON(:internet_address), :port, :round_id, :time, :job, :special, :name, :key, :laname, :lakey, :brute, :fire, :brain, :oxy, :tox, :clone, :stamina, :last_words, :suicide)
"}, list(
"name" = L.real_name,
"key" = L.ckey,
"job" = L.mind.assigned_role,
"special" = L.mind.special_role,
"pod" = get_area_name(L, TRUE),
"laname" = L.lastattacker,
"lakey" = L.lastattackerckey,
"brute" = L.getBruteLoss(),
"fire" = L.getFireLoss(),
"brain" = L.getOrganLoss(ORGAN_SLOT_BRAIN) || BRAIN_DAMAGE_DEATH, //getOrganLoss returns null without a brain but a value is required for this column
"oxy" = L.getOxyLoss(),
"tox" = L.getToxLoss(),
"clone" = L.getCloneLoss(),
"stamina" = L.getStaminaLoss(),
"x_coord" = L.x,
"y_coord" = L.y,
"z_coord" = L.z,
"last_words" = L.last_words,
"suicide" = L.suiciding,
"map" = SSmapping.config.map_name,
"internet_address" = world.internet_address || "0",
"port" = "[world.port]",
"round_id" = GLOB.round_id,
"time" = SQLtime(),
))
if(query_report_death)
query_report_death.Execute(async = TRUE)
qdel(query_report_death)

View File

@@ -3,7 +3,7 @@ SUBSYSTEM_DEF(dbcore)
flags = SS_BACKGROUND
wait = 1 MINUTES
init_order = INIT_ORDER_DBCORE
var/const/FAILED_DB_CONNECTION_CUTOFF = 5
var/failed_connection_timeout = 0
var/schema_mismatch = 0
var/db_minor = 0
@@ -13,8 +13,7 @@ SUBSYSTEM_DEF(dbcore)
var/last_error
var/list/active_queries = list()
var/datum/BSQL_Connection/connection
var/datum/BSQL_Operation/connectOperation
var/connection // Arbitrary handle returned from rust_g.
/datum/controller/subsystem/dbcore/Initialize()
//We send warnings to the admins during subsystem init, as the clients will be New'd and messages
@@ -29,7 +28,7 @@ SUBSYSTEM_DEF(dbcore)
/datum/controller/subsystem/dbcore/fire()
for(var/I in active_queries)
var/datum/DBQuery/Q = I
var/datum/db_query/Q = I
if(world.time - Q.last_activity_time > (5 MINUTES))
message_admins("Found undeleted query, please check the server logs and notify coders.")
log_sql("Undeleted query: \"[Q.sql]\" LA: [Q.last_activity] LAT: [Q.last_activity_time]")
@@ -39,24 +38,25 @@ SUBSYSTEM_DEF(dbcore)
/datum/controller/subsystem/dbcore/Recover()
connection = SSdbcore.connection
connectOperation = SSdbcore.connectOperation
/datum/controller/subsystem/dbcore/Shutdown()
//This is as close as we can get to the true round end before Disconnect() without changing where it's called, defeating the reason this is a subsystem
if(SSdbcore.Connect())
var/datum/DBQuery/query_round_shutdown = SSdbcore.NewQuery("UPDATE [format_table_name("round")] SET shutdown_datetime = Now(), end_state = '[sanitizeSQL(SSticker.end_state)]' WHERE id = [GLOB.round_id]")
var/datum/db_query/query_round_shutdown = SSdbcore.NewQuery(
"UPDATE [format_table_name("round")] SET shutdown_datetime = Now(), end_state = :end_state WHERE id = :round_id",
list("end_state" = SSticker.end_state, "round_id" = GLOB.round_id)
)
query_round_shutdown.Execute()
qdel(query_round_shutdown)
if(IsConnected())
Disconnect()
world.BSQL_Shutdown()
//nu
/datum/controller/subsystem/dbcore/can_vv_get(var_name)
return var_name != NAMEOF(src, connection) && var_name != NAMEOF(src, active_queries) && var_name != NAMEOF(src, connectOperation) && ..()
return var_name != NAMEOF(src, connection) && var_name != NAMEOF(src, active_queries) && ..()
/datum/controller/subsystem/dbcore/vv_edit_var(var_name, var_value)
if(var_name == NAMEOF(src, connection) || var_name == NAMEOF(src, connectOperation))
if(var_name == NAMEOF(src, connection))
return FALSE
return ..()
@@ -64,7 +64,11 @@ SUBSYSTEM_DEF(dbcore)
if(IsConnected())
return TRUE
if(failed_connections > FAILED_DB_CONNECTION_CUTOFF) //If it failed to establish a connection more than 5 times in a row, don't bother attempting to connect anymore.
if(failed_connection_timeout <= world.time) //it's been more than 5 seconds since we failed to connect, reset the counter
failed_connections = 0
if(failed_connections > 5) //If it failed to establish a connection more than 5 times in a row, don't bother attempting to connect for 5 seconds.
failed_connection_timeout = world.time + 50
return FALSE
if(!CONFIG_GET(flag/sql_enabled))
@@ -75,32 +79,33 @@ SUBSYSTEM_DEF(dbcore)
var/db = CONFIG_GET(string/feedback_database)
var/address = CONFIG_GET(string/address)
var/port = CONFIG_GET(number/port)
var/timeout = max(CONFIG_GET(number/async_query_timeout), CONFIG_GET(number/blocking_query_timeout))
var/thread_limit = CONFIG_GET(number/bsql_thread_limit)
connection = new /datum/BSQL_Connection(BSQL_CONNECTION_TYPE_MARIADB, CONFIG_GET(number/async_query_timeout), CONFIG_GET(number/blocking_query_timeout), CONFIG_GET(number/bsql_thread_limit))
var/error
if(QDELETED(connection))
connection = null
error = last_error
var/result = json_decode(rustg_sql_connect_pool(json_encode(list(
"host" = address,
"port" = port,
"user" = user,
"pass" = pass,
"db_name" = db,
"read_timeout" = timeout,
"write_timeout" = timeout,
"max_threads" = thread_limit,
))))
. = (result["status"] == "ok")
if (.)
connection = result["handle"]
else
SSdbcore.last_error = null
connectOperation = connection.BeginConnect(address, port, user, pass, db)
if(SSdbcore.last_error)
CRASH(SSdbcore.last_error)
UNTIL(connectOperation.IsComplete())
error = connectOperation.GetError()
. = !error
if (!.)
last_error = error
log_sql("Connect() failed | [error]")
connection = null
last_error = result["data"]
log_sql("Connect() failed | [last_error]")
++failed_connections
QDEL_NULL(connection)
QDEL_NULL(connectOperation)
/datum/controller/subsystem/dbcore/proc/CheckSchemaVersion()
if(CONFIG_GET(flag/sql_enabled))
if(Connect())
log_world("Database connection established.")
var/datum/DBQuery/query_db_version = NewQuery("SELECT major, minor FROM [format_table_name("schema_revision")] ORDER BY date DESC LIMIT 1")
var/datum/db_query/query_db_version = NewQuery("SELECT major, minor FROM [format_table_name("schema_revision")] ORDER BY date DESC LIMIT 1")
query_db_version.Execute()
if(query_db_version.NextRow())
db_major = text2num(query_db_version.item[1])
@@ -120,47 +125,46 @@ SUBSYSTEM_DEF(dbcore)
/datum/controller/subsystem/dbcore/proc/SetRoundID()
if(!Connect())
return
var/datum/DBQuery/query_round_initialize = SSdbcore.NewQuery("INSERT INTO [format_table_name("round")] (initialize_datetime, server_ip, server_port) VALUES (Now(), INET_ATON(IF('[world.internet_address]' LIKE '', '0', '[world.internet_address]')), '[world.port]')")
query_round_initialize.Execute()
var/datum/db_query/query_round_initialize = SSdbcore.NewQuery(
"INSERT INTO [format_table_name("round")] (initialize_datetime, server_ip, server_port) VALUES (Now(), INET_ATON(:internet_address), :port)",
list("internet_address" = world.internet_address || "0", "port" = "[world.port]")
)
query_round_initialize.Execute(async = FALSE)
GLOB.round_id = "[query_round_initialize.last_insert_id]"
qdel(query_round_initialize)
var/datum/DBQuery/query_round_last_id = SSdbcore.NewQuery("SELECT LAST_INSERT_ID()")
query_round_last_id.Execute()
if(query_round_last_id.NextRow())
GLOB.round_id = query_round_last_id.item[1]
qdel(query_round_last_id)
/datum/controller/subsystem/dbcore/proc/SetRoundStart()
if(!Connect())
return
var/datum/DBQuery/query_round_start = SSdbcore.NewQuery("UPDATE [format_table_name("round")] SET start_datetime = Now() WHERE id = [GLOB.round_id]")
var/datum/db_query/query_round_start = SSdbcore.NewQuery(
"UPDATE [format_table_name("round")] SET start_datetime = Now() WHERE id = :round_id",
list("round_id" = GLOB.round_id)
)
query_round_start.Execute()
qdel(query_round_start)
/datum/controller/subsystem/dbcore/proc/SetRoundEnd()
if(!Connect())
return
var/sql_station_name = sanitizeSQL(station_name())
var/datum/DBQuery/query_round_end = SSdbcore.NewQuery("UPDATE [format_table_name("round")] SET end_datetime = Now(), game_mode_result = '[sanitizeSQL(SSticker.mode_result)]', station_name = '[sql_station_name]' WHERE id = [GLOB.round_id]")
var/datum/db_query/query_round_end = SSdbcore.NewQuery(
"UPDATE [format_table_name("round")] SET end_datetime = Now(), game_mode_result = :game_mode_result, station_name = :station_name WHERE id = :round_id",
list("game_mode_result" = SSticker.mode_result, "station_name" = station_name(), "round_id" = GLOB.round_id)
)
query_round_end.Execute()
qdel(query_round_end)
/datum/controller/subsystem/dbcore/proc/Disconnect()
failed_connections = 0
QDEL_NULL(connectOperation)
QDEL_NULL(connection)
if (connection)
rustg_sql_disconnect_pool(connection)
connection = null
/datum/controller/subsystem/dbcore/proc/IsConnected()
if(!CONFIG_GET(flag/sql_enabled))
if (!CONFIG_GET(flag/sql_enabled))
return FALSE
//block until any connect operations finish
var/datum/BSQL_Connection/_connection = connection
var/datum/BSQL_Operation/op = connectOperation
UNTIL(QDELETED(_connection) || op.IsComplete())
return !QDELETED(connection) && !op.GetError()
/datum/controller/subsystem/dbcore/proc/Quote(str)
if(connection)
return connection.Quote(str)
if (!connection)
return FALSE
return json_decode(rustg_sql_connected(connection))["status"] == "online"
/datum/controller/subsystem/dbcore/proc/ErrorMsg()
if(!CONFIG_GET(flag/sql_enabled))
@@ -170,32 +174,34 @@ SUBSYSTEM_DEF(dbcore)
/datum/controller/subsystem/dbcore/proc/ReportError(error)
last_error = error
/datum/controller/subsystem/dbcore/proc/NewQuery(sql_query)
/datum/controller/subsystem/dbcore/proc/NewQuery(sql_query, arguments)
if(IsAdminAdvancedProcCall())
log_admin_private("ERROR: Advanced admin proc call led to sql query: [sql_query]. Query has been blocked")
message_admins("ERROR: Advanced admin proc call led to sql query. Query has been blocked")
return FALSE
return new /datum/DBQuery(sql_query, connection)
return new /datum/db_query(connection, sql_query, arguments)
/datum/controller/subsystem/dbcore/proc/QuerySelect(list/querys, warn = FALSE, qdel = FALSE)
if (!islist(querys))
if (!istype(querys, /datum/DBQuery))
if (!istype(querys, /datum/db_query))
CRASH("Invalid query passed to QuerySelect: [querys]")
querys = list(querys)
for (var/thing in querys)
var/datum/DBQuery/query = thing
var/datum/db_query/query = thing
if (warn)
INVOKE_ASYNC(query, /datum/DBQuery.proc/warn_execute)
INVOKE_ASYNC(query, /datum/db_query.proc/warn_execute)
else
INVOKE_ASYNC(query, /datum/DBQuery.proc/Execute)
INVOKE_ASYNC(query, /datum/db_query.proc/Execute)
for (var/thing in querys)
var/datum/DBQuery/query = thing
var/datum/db_query/query = thing
UNTIL(!query.in_progress)
if (qdel)
qdel(query)
/*
Takes a list of rows (each row being an associated list of column => value) and inserts them via a single mass query.
Rows missing columns present in other rows will resolve to SQL NULL
@@ -203,137 +209,135 @@ You are expected to do your own escaping of the data, and expected to provide yo
The duplicate_key arg can be true to automatically generate this part of the query
or set to a string that is appended to the end of the query
Ignore_errors instructes mysql to continue inserting rows if some of them have errors.
the erroneous row(s) aren't inserted and there isn't really any way to know why or why errored
the erroneous row(s) aren't inserted and there isn't really any way to know why or why errored
Delayed insert mode was removed in mysql 7 and only works with MyISAM type tables,
It was included because it is still supported in mariadb.
It does not work with duplicate_key and the mysql server ignores it in those cases
*/
/datum/controller/subsystem/dbcore/proc/MassInsert(table, list/rows, duplicate_key = FALSE, ignore_errors = FALSE, delayed = FALSE, warn = FALSE, async = TRUE)
/datum/controller/subsystem/dbcore/proc/MassInsert(table, list/rows, duplicate_key = FALSE, ignore_errors = FALSE, delayed = FALSE, warn = FALSE, async = TRUE, special_columns = null)
if (!table || !rows || !istype(rows))
return
// Prepare column list
var/list/columns = list()
var/list/sorted_rows = list()
var/list/has_question_mark = list()
for (var/list/row in rows)
var/list/sorted_row = list()
sorted_row.len = columns.len
for (var/column in row)
var/idx = columns[column]
if (!idx)
idx = columns.len + 1
columns[column] = idx
sorted_row.len = columns.len
columns[column] = "?"
has_question_mark[column] = TRUE
for (var/column in special_columns)
columns[column] = special_columns[column]
has_question_mark[column] = findtext(special_columns[column], "?")
sorted_row[idx] = row[column]
sorted_rows[++sorted_rows.len] = sorted_row
// Prepare SQL query full of placeholders
var/list/query_parts = list("INSERT")
if (delayed)
query_parts += " DELAYED"
if (ignore_errors)
query_parts += " IGNORE"
query_parts += " INTO "
query_parts += table
query_parts += "\n([columns.Join(", ")])\nVALUES"
var/list/arguments = list()
var/has_row = FALSE
for (var/list/row in rows)
if (has_row)
query_parts += ","
query_parts += "\n ("
var/has_col = FALSE
for (var/column in columns)
if (has_col)
query_parts += ", "
if (has_question_mark[column])
var/name = "p[arguments.len]"
query_parts += replacetext(columns[column], "?", ":[name]")
arguments[name] = row[column]
else
query_parts += columns[column]
has_col = TRUE
query_parts += ")"
has_row = TRUE
if (duplicate_key == TRUE)
var/list/column_list = list()
for (var/column in columns)
column_list += "[column] = VALUES([column])"
duplicate_key = "ON DUPLICATE KEY UPDATE [column_list.Join(", ")]\n"
else if (duplicate_key == FALSE)
duplicate_key = null
query_parts += "\nON DUPLICATE KEY UPDATE [column_list.Join(", ")]"
else if (duplicate_key != FALSE)
query_parts += duplicate_key
if (ignore_errors)
ignore_errors = " IGNORE"
else
ignore_errors = null
if (delayed)
delayed = " DELAYED"
else
delayed = null
var/list/sqlrowlist = list()
var/len = columns.len
for (var/list/row in sorted_rows)
if (length(row) != len)
row.len = len
for (var/value in row)
if (value == null)
value = "NULL"
sqlrowlist += "([row.Join(", ")])"
sqlrowlist = " [sqlrowlist.Join(",\n ")]"
var/datum/DBQuery/Query = NewQuery("INSERT[delayed][ignore_errors] INTO [table]\n([columns.Join(", ")])\nVALUES\n[sqlrowlist]\n[duplicate_key]")
var/datum/db_query/Query = NewQuery(query_parts.Join(), arguments)
if (warn)
. = Query.warn_execute(async)
else
. = Query.Execute(async)
qdel(Query)
/datum/DBQuery
var/sql // The sql query being executed.
var/list/item //list of data values populated by NextRow()
/datum/db_query
// Inputs
var/connection
var/sql
var/arguments
// Status information
var/in_progress
var/last_error
var/last_activity
var/last_activity_time
var/last_error
var/skip_next_is_complete
var/in_progress
var/datum/BSQL_Connection/connection
var/datum/BSQL_Operation/Query/query
// Output
var/list/list/rows
var/next_row_to_take = 1
var/affected
var/last_insert_id
/datum/DBQuery/New(sql_query, datum/BSQL_Connection/connection)
var/list/item //list of data values populated by NextRow()
/datum/db_query/New(connection, sql, arguments)
SSdbcore.active_queries[src] = TRUE
Activity("Created")
item = list()
src.connection = connection
sql = sql_query
/datum/DBQuery/Destroy()
src.connection = connection
src.sql = sql
src.arguments = arguments
/datum/db_query/Destroy()
Close()
SSdbcore.active_queries -= src
return ..()
/datum/DBQuery/CanProcCall(proc_name)
/datum/db_query/CanProcCall(proc_name)
//fuck off kevinz
return FALSE
/datum/DBQuery/proc/SetQuery(new_sql)
if(in_progress)
CRASH("Attempted to set new sql while waiting on active query")
Close()
sql = new_sql
/datum/DBQuery/proc/Activity(activity)
/datum/db_query/proc/Activity(activity)
last_activity = activity
last_activity_time = world.time
/datum/DBQuery/proc/warn_execute(async = FALSE)
/datum/db_query/proc/warn_execute(async = TRUE)
. = Execute(async)
if(!.)
to_chat(usr, "<span class='danger'>A SQL error occurred during this operation, check the server logs.</span>")
/datum/DBQuery/proc/Execute(async = FALSE, log_error = TRUE)
/datum/db_query/proc/Execute(async = TRUE, log_error = TRUE)
Activity("Execute")
if(in_progress)
CRASH("Attempted to start a new query while waiting on the old one")
if(QDELETED(connection))
if(!SSdbcore.IsConnected())
last_error = "No connection!"
return FALSE
var/start_time
var/timed_out
if(!async)
start_time = REALTIMEOFDAY
Close()
query = connection.BeginQuery(sql)
if(!async)
timed_out = !query.WaitForCompletion()
else
in_progress = TRUE
UNTIL(query.IsComplete())
in_progress = FALSE
skip_next_is_complete = TRUE
var/error = QDELETED(query) ? "Query object deleted!" : query.GetError()
last_error = error
. = !error
. = run_query(async)
var/timed_out = !. && findtext(last_error, "Operation timed out")
if(!. && log_error)
log_sql("[error] | Query used: [sql]")
log_sql("[last_error] | Query used: [sql] | Arguments: [json_encode(arguments)]")
if(!async && timed_out)
log_query_debug("Query execution started at [start_time]")
log_query_debug("Query execution ended at [REALTIMEOFDAY]")
@@ -341,44 +345,51 @@ Delayed insert mode was removed in mysql 7 and only works with MyISAM type table
log_query_debug("Query used: [sql]")
slow_query_check()
/datum/DBQuery/proc/slow_query_check()
/datum/db_query/proc/run_query(async)
var/job_result_str
if (async)
var/job_id = rustg_sql_query_async(connection, sql, json_encode(arguments))
in_progress = TRUE
UNTIL((job_result_str = rustg_sql_check_query(job_id)) != RUSTG_JOB_NO_RESULTS_YET)
in_progress = FALSE
if (job_result_str == RUSTG_JOB_ERROR)
last_error = job_result_str
return FALSE
else
job_result_str = rustg_sql_query_blocking(connection, sql, json_encode(arguments))
var/result = json_decode(job_result_str)
switch (result["status"])
if ("ok")
rows = result["rows"]
affected = result["affected"]
last_insert_id = result["last_insert_id"]
return TRUE
if ("err")
last_error = result["data"]
return FALSE
if ("offline")
last_error = "offline"
return FALSE
/datum/db_query/proc/slow_query_check()
message_admins("HEY! A database query timed out. Did the server just hang? <a href='?_src_=holder;[HrefToken()];slowquery=yes'>\[YES\]</a>|<a href='?_src_=holder;[HrefToken()];slowquery=no'>\[NO\]</a>")
/datum/DBQuery/proc/NextRow(async)
/datum/db_query/proc/NextRow(async = TRUE)
Activity("NextRow")
UNTIL(!in_progress)
if(!skip_next_is_complete)
if(!async)
query.WaitForCompletion()
else
in_progress = TRUE
UNTIL(query.IsComplete())
in_progress = FALSE
if (rows && next_row_to_take <= rows.len)
item = rows[next_row_to_take]
next_row_to_take++
return !!item
else
skip_next_is_complete = FALSE
return FALSE
last_error = query.GetError()
var/list/results = query.CurrentRow()
. = results != null
item.Cut()
//populate item array
for(var/I in results)
item += results[I]
/datum/DBQuery/proc/ErrorMsg()
/datum/db_query/proc/ErrorMsg()
return last_error
/datum/DBQuery/proc/Close()
item.Cut()
QDEL_NULL(query)
/world/BSQL_Debug(message)
if(!CONFIG_GET(flag/bsql_debug))
return
//strip sensitive stuff
if(findtext(message, ": CreateConnection("))
message = "CreateConnection CENSORED"
log_sql("BSQL_DEBUG: [message]")
/datum/db_query/proc/Close()
rows = null
item = null

View File

@@ -496,9 +496,15 @@ SUBSYSTEM_DEF(job)
H.equip_to_slot_if_possible(binder, SLOT_IN_BACKPACK, disable_warning = TRUE, bypass_equip_delay_self = TRUE)
for(var/card_type in H.client.prefs.tcg_cards)
if(card_type)
var/obj/item/tcg_card/card = new(get_turf(H), card_type, H.client.prefs.tcg_cards[card_type])
card.forceMove(binder)
binder.cards.Add(card)
if(islist(H.client.prefs.tcg_cards[card_type]))
for(var/duplicate in H.client.prefs.tcg_cards[card_type])
var/obj/item/tcg_card/card = new(get_turf(H), card_type, duplicate)
card.forceMove(binder)
binder.cards.Add(card)
else
var/obj/item/tcg_card/card = new(get_turf(H), card_type, H.client.prefs.tcg_cards[card_type])
card.forceMove(binder)
binder.cards.Add(card)
binder.check_for_exodia()
if(length(H.client.prefs.tcg_decks))
binder.decks = H.client.prefs.tcg_decks
@@ -508,9 +514,15 @@ SUBSYSTEM_DEF(job)
H.equip_to_slot_if_possible(binder, SLOT_IN_BACKPACK, disable_warning = TRUE, bypass_equip_delay_self = TRUE)
for(var/card_type in N.client.prefs.tcg_cards)
if(card_type)
var/obj/item/tcg_card/card = new(get_turf(H), card_type, N.client.prefs.tcg_cards[card_type])
card.forceMove(binder)
binder.cards.Add(card)
if(islist(H.client.prefs.tcg_cards[card_type]))
for(var/duplicate in N.client.prefs.tcg_cards[card_type])
var/obj/item/tcg_card/card = new(get_turf(H), card_type, duplicate)
card.forceMove(binder)
binder.cards.Add(card)
else
var/obj/item/tcg_card/card = new(get_turf(H), card_type, N.client.prefs.tcg_cards[card_type])
card.forceMove(binder)
binder.cards.Add(card)
binder.check_for_exodia()
if(length(N.client.prefs.tcg_decks))
binder.decks = N.client.prefs.tcg_decks

View File

@@ -286,7 +286,9 @@ SUBSYSTEM_DEF(mapping)
setup_station_z_index()
if(SSdbcore.Connect())
var/datum/DBQuery/query_round_map_name = SSdbcore.NewQuery("UPDATE [format_table_name("round")] SET map_name = '[config.map_name]' WHERE id = [GLOB.round_id]")
var/datum/db_query/query_round_map_name = SSdbcore.NewQuery({"
UPDATE [format_table_name("round")] SET map_name = :map_name WHERE id = :round_id
"}, list("map_name" = config.map_name, "round_id" = GLOB.round_id))
query_round_map_name.Execute()
qdel(query_round_map_name)

View File

@@ -1,6 +1,8 @@
/*! How material datums work
Materials are now instanced datums, with an associative list of them being kept in SSmaterials. We only instance the materials once and then re-use these instances for everything.
These materials call on_applied() on whatever item they are applied to, common effects are adding components, changing color and changing description. This allows us to differentiate items based on the material they are made out of.area
*/
SUBSYSTEM_DEF(materials)
@@ -14,12 +16,16 @@ SUBSYSTEM_DEF(materials)
var/list/materialtypes_by_category
///A cache of all material combinations that have been used
var/list/list/material_combos
///List of stackcrafting recipes for materials using rigid materials
///List of stackcrafting recipes for materials using base recipes
var/list/base_stack_recipes = list(
new /datum/stack_recipe("Chair", /obj/structure/chair/greyscale, one_per_turf = TRUE, on_floor = TRUE, applies_mats = TRUE),
new /datum/stack_recipe("Toilet", /obj/structure/toilet/greyscale, one_per_turf = TRUE, on_floor = TRUE, applies_mats = TRUE),
new /datum/stack_recipe("Sink Frame", /obj/structure/sink/greyscale, one_per_turf = TRUE, on_floor = TRUE, applies_mats = TRUE),
new /datum/stack_recipe("Floor tile", /obj/item/stack/tile/material, 1, 4, 20, applies_mats = TRUE),
)
///List of stackcrafting recipes for materials using rigid recipes
var/list/rigid_stack_recipes = list(
new /datum/stack_recipe("chair", /obj/structure/chair/greyscale, one_per_turf = TRUE, on_floor = TRUE, applies_mats = TRUE),
new /datum/stack_recipe("toilet", /obj/structure/toilet/greyscale, one_per_turf = TRUE, on_floor = TRUE, applies_mats = TRUE),
new /datum/stack_recipe("sink", /obj/structure/sink/greyscale, one_per_turf = TRUE, on_floor = TRUE, applies_mats = TRUE),
new /datum/stack_recipe("Floor tile", /obj/item/stack/tile/material, 1, 4, 20, applies_mats = TRUE)
// new /datum/stack_recipe("Carving block", /obj/structure/carving_block, 5, one_per_turf = TRUE, on_floor = TRUE, applies_mats = TRUE),
)
///Ran on initialize, populated the materials and materials_by_category dictionaries with their appropiate vars (See these variables for more info)
@@ -29,7 +35,11 @@ SUBSYSTEM_DEF(materials)
materialtypes_by_category = list()
material_combos = list()
for(var/type in subtypesof(/datum/material))
var/datum/material/ref = new type
var/datum/material/ref = type
// if(!(initial(ref.init_flags) & MATERIAL_INIT_MAPLOAD))
// continue // Do not initialize
ref = new ref
materials[type] = ref
for(var/c in ref.categories)
materials_by_category[c] += list(ref)
@@ -40,7 +50,6 @@ SUBSYSTEM_DEF(materials)
InitializeMaterials()
return materials[fakemat] || fakemat
///Returns a list to be used as an object's custom_materials. Lists will be cached and re-used based on the parameters.
/datum/controller/subsystem/materials/proc/FindOrCreateMaterialCombo(list/materials_declaration, multiplier)
if(!material_combos)

View File

@@ -1,87 +0,0 @@
SUBSYSTEM_DEF(medals)
name = "Medals"
flags = SS_NO_FIRE
var/hub_enabled = FALSE
/datum/controller/subsystem/medals/Initialize(timeofday)
if(CONFIG_GET(string/medal_hub_address) && CONFIG_GET(string/medal_hub_password))
hub_enabled = TRUE
return ..()
/datum/controller/subsystem/medals/proc/UnlockMedal(medal, client/player)
set waitfor = FALSE
if(!medal || !hub_enabled)
return
if(isnull(world.SetMedal(medal, player, CONFIG_GET(string/medal_hub_address), CONFIG_GET(string/medal_hub_password))))
hub_enabled = FALSE
log_game("MEDAL ERROR: Could not contact hub to award medal:[medal] player:[player.key]")
message_admins("Error! Failed to contact hub to award [medal] medal to [player.key]!")
return
to_chat(player, "<span class='greenannounce'><B>Achievement unlocked: [medal]!</B></span>")
/datum/controller/subsystem/medals/proc/SetScore(score, client/player, increment, force)
set waitfor = FALSE
if(!score || !hub_enabled)
return
var/list/oldscore = GetScore(score, player, TRUE)
if(increment)
if(!oldscore[score])
oldscore[score] = 1
else
oldscore[score] = (text2num(oldscore[score]) + 1)
else
oldscore[score] = force
var/newscoreparam = list2params(oldscore)
if(isnull(world.SetScores(player.ckey, newscoreparam, CONFIG_GET(string/medal_hub_address), CONFIG_GET(string/medal_hub_password))))
hub_enabled = FALSE
log_game("SCORE ERROR: Could not contact hub to set score. Score:[score] player:[player.key]")
message_admins("Error! Failed to contact hub to set [score] score for [player.key]!")
/datum/controller/subsystem/medals/proc/GetScore(score, client/player, returnlist)
if(!score || !hub_enabled)
return
var/scoreget = world.GetScores(player.ckey, score, CONFIG_GET(string/medal_hub_address), CONFIG_GET(string/medal_hub_password))
if(isnull(scoreget))
hub_enabled = FALSE
log_game("SCORE ERROR: Could not contact hub to get score. Score:[score] player:[player.key]")
message_admins("Error! Failed to contact hub to get score: [score] for [player.key]!")
return
. = params2list(scoreget)
if(!returnlist)
return .[score]
/datum/controller/subsystem/medals/proc/CheckMedal(medal, client/player)
if(!medal || !hub_enabled)
return
if(isnull(world.GetMedal(medal, player, CONFIG_GET(string/medal_hub_address), CONFIG_GET(string/medal_hub_password))))
hub_enabled = FALSE
log_game("MEDAL ERROR: Could not contact hub to get medal:[medal] player: [player.key]")
message_admins("Error! Failed to contact hub to get [medal] medal for [player.key]!")
return
to_chat(player, "[medal] is unlocked")
/datum/controller/subsystem/medals/proc/LockMedal(medal, client/player)
if(!player || !medal || !hub_enabled)
return
var/result = world.ClearMedal(medal, player, CONFIG_GET(string/medal_hub_address), CONFIG_GET(string/medal_hub_password))
switch(result)
if(null)
hub_enabled = FALSE
log_game("MEDAL ERROR: Could not contact hub to clear medal:[medal] player:[player.key]")
message_admins("Error! Failed to contact hub to clear [medal] medal for [player.key]!")
if(TRUE)
message_admins("Medal: [medal] removed for [player.key]")
if(FALSE)
message_admins("Medal: [medal] was not found for [player.key]. Unable to clear.")
/datum/controller/subsystem/medals/proc/ClearScore(client/player)
if(isnull(world.SetScores(player.ckey, "", CONFIG_GET(string/medal_hub_address), CONFIG_GET(string/medal_hub_password))))
log_game("MEDAL ERROR: Could not contact hub to clear scores for [player.key]!")
message_admins("Error! Failed to contact hub to clear scores for [player.key]!")

View File

@@ -88,7 +88,6 @@ SUBSYSTEM_DEF(persistence)
SavePhotoPersistence() //THIS IS PERSISTENCE, NOT THE LOGGING PORTION.
SavePaintings()
SaveScars()
SaveTCGCards()
/**
* Loads persistent data relevant to the current map: Objects, etc.

View File

@@ -3,6 +3,7 @@
*/
/datum/controller/subsystem/persistence
var/list/saved_modes = list(1,2,3)
var/list/saved_chaos = list(5,5,5)
var/list/saved_dynamic_rules = list(list(),list(),list())
var/list/saved_storytellers = list("foo","bar","baz")
var/list/average_dynamic_threat = 50
@@ -20,6 +21,7 @@
/datum/controller/subsystem/persistence/LoadServerPersistence()
. = ..()
LoadRecentModes()
LoadRecentChaos()
LoadRecentStorytellers()
LoadRecentRulesets()
LoadRecentMaps()
@@ -33,6 +35,14 @@
file_data["data"] = saved_modes
fdel(json_file)
WRITE_FILE(json_file, json_encode(file_data))
saved_chaos[3] = saved_chaos[2]
saved_chaos[2] = saved_chaos[1]
saved_chaos[1] = SSticker.mode.get_chaos()
json_file = file("data/RecentChaos.json")
file_data = list()
file_data["data"] = saved_chaos
fdel(json_file)
WRITE_FILE(json_file, json_encode(file_data))
/datum/controller/subsystem/persistence/proc/CollectStoryteller(var/datum/game_mode/dynamic/mode)
saved_storytellers.len = 3
@@ -76,6 +86,15 @@
return
saved_modes = json["data"]
/datum/controller/subsystem/persistence/proc/LoadRecentChaos()
var/json_file = file("data/RecentChaos.json")
if(!fexists(json_file))
return
var/list/json = json_decode(file2text(json_file))
if(!json)
return
saved_chaos = json["data"]
/datum/controller/subsystem/persistence/proc/LoadRecentRulesets()
var/json_file = file("data/RecentRulesets.json")
if(!fexists(json_file))
@@ -105,3 +124,9 @@
if(!json)
return
saved_maps = json["maps"]
/datum/controller/subsystem/persistence/proc/get_recent_chaos()
var/sum = 0
for(var/n in saved_chaos)
sum += n
return sum/length(saved_chaos)

View File

@@ -47,5 +47,5 @@ SUBSYSTEM_DEF(processing)
* If you override this do not call parent, as it will return PROCESS_KILL. This is done to prevent objects that dont override process() from staying in the processing list
*/
/datum/proc/process(delta_time)
set waitfor = FALSE
// SHOULD_NOT_SLEEP(TRUE)
return PROCESS_KILL

View File

@@ -70,3 +70,8 @@ PROCESSING_SUBSYSTEM_DEF(weather)
A = W
break
return A
/datum/controller/subsystem/processing/weather/proc/get_weather_by_type(datum/weather/weather_datum_type)
for(var/V in processing)
if(istype(V,weather_datum_type))
return V

View File

@@ -12,6 +12,10 @@ SUBSYSTEM_DEF(shuttle)
var/list/beacons = list()
var/list/transit = list()
//Now it only for ID generation
var/list/assoc_mobile = list()
var/list/assoc_stationary = list()
var/list/transit_requesters = list()
var/list/transit_request_failures = list()
@@ -26,6 +30,7 @@ SUBSYSTEM_DEF(shuttle)
var/emergencyCallAmount = 0 //how many times the escape shuttle was called
var/emergencyNoEscape
var/emergencyNoRecall = FALSE
var/adminEmergencyNoRecall = FALSE
var/list/hostileEnvironments = list() //Things blocking escape shuttle from leaving
var/list/tradeBlockade = list() //Things blocking cargo from leaving.
var/supplyBlocked = FALSE
@@ -65,6 +70,8 @@ SUBSYSTEM_DEF(shuttle)
var/datum/turf_reservation/preview_reservation
var/shuttle_loading
/datum/controller/subsystem/shuttle/Initialize(timeofday)
ordernum = rand(1, 9000)
@@ -134,7 +141,7 @@ SUBSYSTEM_DEF(shuttle)
break
/datum/controller/subsystem/shuttle/proc/CheckAutoEvac()
if(emergencyNoEscape || emergencyNoRecall || !emergency || !SSticker.HasRoundStarted())
if(emergencyNoEscape || adminEmergencyNoRecall || emergencyNoRecall || !emergency || !SSticker.HasRoundStarted())
return
var/threshold = CONFIG_GET(number/emergency_shuttle_autocall_threshold)
@@ -179,31 +186,26 @@ SUBSYSTEM_DEF(shuttle)
return S
WARNING("couldn't find dock with id: [id]")
/// Check if we can call the evac shuttle.
/// Returns TRUE if we can. Otherwise, returns a string detailing the problem.
/datum/controller/subsystem/shuttle/proc/canEvac(mob/user)
var/srd = CONFIG_GET(number/shuttle_refuel_delay)
if(world.time - SSticker.round_start_time < srd)
to_chat(user, "<span class='alert'>The emergency shuttle is refueling. Please wait [DisplayTimeText(srd - (world.time - SSticker.round_start_time))] before trying again.</span>")
return FALSE
return "The emergency shuttle is refueling. Please wait [DisplayTimeText(srd - (world.time - SSticker.round_start_time))] before attempting to call."
switch(emergency.mode)
if(SHUTTLE_RECALL)
to_chat(user, "<span class='alert'>The emergency shuttle may not be called while returning to CentCom.</span>")
return FALSE
return "The emergency shuttle may not be called while returning to CentCom."
if(SHUTTLE_CALL)
to_chat(user, "<span class='alert'>The emergency shuttle is already on its way.</span>")
return FALSE
return "The emergency shuttle is already on its way."
if(SHUTTLE_DOCKED)
to_chat(user, "<span class='alert'>The emergency shuttle is already here.</span>")
return FALSE
return "The emergency shuttle is already here."
if(SHUTTLE_IGNITING)
to_chat(user, "<span class='alert'>The emergency shuttle is firing its engines to leave.</span>")
return FALSE
return "The emergency shuttle is firing its engines to leave."
if(SHUTTLE_ESCAPE)
to_chat(user, "<span class='alert'>The emergency shuttle is moving away to a safe distance.</span>")
return FALSE
return "The emergency shuttle is moving away to a safe distance."
if(SHUTTLE_STRANDED)
to_chat(user, "<span class='alert'>The emergency shuttle has been disabled by CentCom.</span>")
return FALSE
return "The emergency shuttle has been disabled by CentCom."
return TRUE
@@ -221,7 +223,9 @@ SUBSYSTEM_DEF(shuttle)
Good luck.")
emergency = backup_shuttle
if(!canEvac(user))
var/can_evac_or_fail_reason = SSshuttle.canEvac(user)
if(can_evac_or_fail_reason != TRUE)
to_chat(user, "<span class='alert'>[can_evac_or_fail_reason]</span>")
return
call_reason = trim(html_encode(call_reason))
@@ -250,10 +254,11 @@ SUBSYSTEM_DEF(shuttle)
var/area/A = get_area(user)
log_shuttle("[key_name(user)] has called the emergency shuttle.")
deadchat_broadcast(" has called the shuttle at <span class='name'>[A.name]</span>.", "<span class='name'>[user.real_name]</span>", user)
deadchat_broadcast(" has called the shuttle at <span class='name'>[A.name]</span>.", "<span class='name'>[user.real_name]</span>", user) //, message_type=DEADCHAT_ANNOUNCEMENT)
if(call_reason)
SSblackbox.record_feedback("text", "shuttle_reason", 1, "[call_reason]")
log_shuttle("Shuttle call reason: [call_reason]")
SSticker.emergency_reason = call_reason
message_admins("[ADMIN_LOOKUPFLW(user)] has called the shuttle. (<A HREF='?_src_=holder;[HrefToken()];trigger_centcom_recall=1'>TRIGGER CENTCOM RECALL</A>)")
/datum/controller/subsystem/shuttle/proc/centcom_recall(old_timer, admiral_message)
@@ -288,7 +293,7 @@ SUBSYSTEM_DEF(shuttle)
emergency.cancel(get_area(user))
log_shuttle("[key_name(user)] has recalled the shuttle.")
message_admins("[ADMIN_LOOKUPFLW(user)] has recalled the shuttle.")
deadchat_broadcast(" has recalled the shuttle from <span class='name'>[get_area_name(user, TRUE)]</span>.", "<span class='name'>[user.real_name]</span>", user)
deadchat_broadcast(" has recalled the shuttle from <span class='name'>[get_area_name(user, TRUE)]</span>.", "<span class='name'>[user.real_name]</span>", user) //, message_type=DEADCHAT_ANNOUNCEMENT)
return 1
/datum/controller/subsystem/shuttle/proc/canRecall()
@@ -314,7 +319,7 @@ SUBSYSTEM_DEF(shuttle)
if (!SSticker.IsRoundInProgress())
return
var/callShuttle = 1
var/callShuttle = TRUE
for(var/thing in GLOB.shuttle_caller_list)
if(isAI(thing))
@@ -330,7 +335,7 @@ SUBSYSTEM_DEF(shuttle)
var/turf/T = get_turf(thing)
if(T && is_station_level(T.z))
callShuttle = 0
callShuttle = FALSE
break
if(callShuttle)
@@ -406,7 +411,7 @@ SUBSYSTEM_DEF(shuttle)
else
if(M.initiate_docking(getDock(destination)) != DOCKING_SUCCESS)
return 2
return 0 //dock successful
return 0 //dock successful
/datum/controller/subsystem/shuttle/proc/moveShuttle(shuttleId, dockId, timed)
@@ -664,7 +669,7 @@ SUBSYSTEM_DEF(shuttle)
emergencyNoRecall = TRUE
endvote_passed = TRUE
/datum/controller/subsystem/shuttle/proc/action_load(datum/map_template/shuttle/loading_template, obj/docking_port/stationary/destination_port)
/datum/controller/subsystem/shuttle/proc/action_load(datum/map_template/shuttle/loading_template, obj/docking_port/stationary/destination_port, replace = FALSE)
// Check for an existing preview
if(preview_shuttle && (loading_template != preview_template))
preview_shuttle.jumpToNullSpace()
@@ -673,8 +678,8 @@ SUBSYSTEM_DEF(shuttle)
QDEL_NULL(preview_reservation)
if(!preview_shuttle)
if(load_template(loading_template))
preview_shuttle.linkup(loading_template, destination_port)
load_template(loading_template)
// preview_shuttle.linkup(loading_template, destination_port)
preview_template = loading_template
// get the existing shuttle information, if any
@@ -684,7 +689,7 @@ SUBSYSTEM_DEF(shuttle)
if(istype(destination_port))
D = destination_port
else if(existing_shuttle)
else if(existing_shuttle && replace)
timer = existing_shuttle.timer
mode = existing_shuttle.mode
D = existing_shuttle.get_docked()
@@ -703,11 +708,12 @@ SUBSYSTEM_DEF(shuttle)
WARNING("Template shuttle [preview_shuttle] cannot dock at [D] ([result]).")
return
if(existing_shuttle)
if(existing_shuttle && replace)
existing_shuttle.jumpToNullSpace()
var/list/force_memory = preview_shuttle.movement_force
preview_shuttle.movement_force = list("KNOCKDOWN" = 0, "THROW" = 0)
preview_shuttle.mode = SHUTTLE_PREARRIVAL//No idle shuttle moving. Transit dock get removed if shuttle moves too long.
preview_shuttle.initiate_docking(D)
preview_shuttle.movement_force = force_memory
@@ -718,7 +724,7 @@ SUBSYSTEM_DEF(shuttle)
preview_shuttle.timer = timer
preview_shuttle.mode = mode
preview_shuttle.register()
preview_shuttle.register(replace)
// TODO indicate to the user that success happened, rather than just
// blanking the modification tab
@@ -848,7 +854,8 @@ SUBSYSTEM_DEF(shuttle)
return data
/datum/controller/subsystem/shuttle/ui_act(action, params)
if(..())
. = ..()
if(.)
return
var/mob/user = usr
@@ -891,22 +898,10 @@ SUBSYSTEM_DEF(shuttle)
SSblackbox.record_feedback("text", "shuttle_manipulator", 1, "[M.name]")
break
if("preview")
if(S)
. = TRUE
unload_preview()
load_template(S)
if(preview_shuttle)
preview_template = S
user.forceMove(get_turf(preview_shuttle))
if("load")
if(existing_shuttle == backup_shuttle)
// TODO make the load button disabled
WARNING("The shuttle that the selected shuttle will replace \
is the backup shuttle. Backup shuttle is required to be \
intact for round sanity.")
else if(S)
if(S && !shuttle_loading)
. = TRUE
shuttle_loading = TRUE
// If successful, returns the mobile docking port
var/obj/docking_port/mobile/mdp = action_load(S)
if(mdp)
@@ -914,3 +909,38 @@ SUBSYSTEM_DEF(shuttle)
message_admins("[key_name_admin(usr)] loaded [mdp] with the shuttle manipulator.")
log_admin("[key_name(usr)] loaded [mdp] with the shuttle manipulator.</span>")
SSblackbox.record_feedback("text", "shuttle_manipulator", 1, "[mdp.name]")
shuttle_loading = FALSE
if("preview")
//if(preview_shuttle && (loading_template != preview_template))
if(S && !shuttle_loading)
. = TRUE
shuttle_loading = TRUE
unload_preview()
load_template(S)
if(preview_shuttle)
preview_template = S
user.forceMove(get_turf(preview_shuttle))
shuttle_loading = FALSE
if("replace")
if(existing_shuttle == backup_shuttle)
// TODO make the load button disabled
WARNING("The shuttle that the selected shuttle will replace \
is the backup shuttle. Backup shuttle is required to be \
intact for round sanity.")
else if(S && !shuttle_loading)
. = TRUE
shuttle_loading = TRUE
// If successful, returns the mobile docking port
var/obj/docking_port/mobile/mdp = action_load(S, replace = TRUE)
if(mdp)
user.forceMove(get_turf(mdp))
message_admins("[key_name_admin(usr)] load/replaced [mdp] with the shuttle manipulator.")
log_admin("[key_name(usr)] load/replaced [mdp] with the shuttle manipulator.</span>")
SSblackbox.record_feedback("text", "shuttle_manipulator", 1, "[mdp.name]")
shuttle_loading = FALSE
if(emergency == mdp) //you just changed the emergency shuttle, there are events in game + captains that can change your snowflake choice.
var/set_purchase = alert(usr, "Do you want to also disable shuttle purchases/random events that would change the shuttle?", "Butthurt Admin Prevention", "Yes, disable purchases/events", "No, I want to possibly get owned")
if(set_purchase == "Yes, disable purchases/events")
SSshuttle.shuttle_purchased = SHUTTLEPURCHASE_FORCED

View File

@@ -60,15 +60,10 @@ SUBSYSTEM_DEF(stickyban)
/datum/controller/subsystem/stickyban/proc/Populatedbcache()
var/newdbcache = list() //so if we runtime or the db connection dies we don't kill the existing cache
// var/datum/db_query/query_stickybans = SSdbcore.NewQuery("SELECT ckey, reason, banning_admin, datetime FROM [format_table_name("stickyban")] ORDER BY ckey")
// var/datum/db_query/query_ckey_matches = SSdbcore.NewQuery("SELECT stickyban, matched_ckey, first_matched, last_matched, exempt FROM [format_table_name("stickyban_matched_ckey")] ORDER BY first_matched")
// var/datum/db_query/query_cid_matches = SSdbcore.NewQuery("SELECT stickyban, matched_cid, first_matched, last_matched FROM [format_table_name("stickyban_matched_cid")] ORDER BY first_matched")
// var/datum/db_query/query_ip_matches = SSdbcore.NewQuery("SELECT stickyban, INET_NTOA(matched_ip), first_matched, last_matched FROM [format_table_name("stickyban_matched_ip")] ORDER BY first_matched")
var/datum/DBQuery/query_stickybans = SSdbcore.NewQuery("SELECT ckey, reason, banning_admin, datetime FROM [format_table_name("stickyban")] ORDER BY ckey")
var/datum/DBQuery/query_ckey_matches = SSdbcore.NewQuery("SELECT stickyban, matched_ckey, first_matched, last_matched, exempt FROM [format_table_name("stickyban_matched_ckey")] ORDER BY first_matched")
var/datum/DBQuery/query_cid_matches = SSdbcore.NewQuery("SELECT stickyban, matched_cid, first_matched, last_matched FROM [format_table_name("stickyban_matched_cid")] ORDER BY first_matched")
var/datum/DBQuery/query_ip_matches = SSdbcore.NewQuery("SELECT stickyban, INET_NTOA(matched_ip), first_matched, last_matched FROM [format_table_name("stickyban_matched_ip")] ORDER BY first_matched")
var/datum/db_query/query_stickybans = SSdbcore.NewQuery("SELECT ckey, reason, banning_admin, datetime FROM [format_table_name("stickyban")] ORDER BY ckey")
var/datum/db_query/query_ckey_matches = SSdbcore.NewQuery("SELECT stickyban, matched_ckey, first_matched, last_matched, exempt FROM [format_table_name("stickyban_matched_ckey")] ORDER BY first_matched")
var/datum/db_query/query_cid_matches = SSdbcore.NewQuery("SELECT stickyban, matched_cid, first_matched, last_matched FROM [format_table_name("stickyban_matched_cid")] ORDER BY first_matched")
var/datum/db_query/query_ip_matches = SSdbcore.NewQuery("SELECT stickyban, INET_NTOA(matched_ip), first_matched, last_matched FROM [format_table_name("stickyban_matched_ip")] ORDER BY first_matched")
SSdbcore.QuerySelect(list(query_stickybans, query_ckey_matches, query_cid_matches, query_ip_matches))
@@ -161,25 +156,15 @@ SUBSYSTEM_DEF(stickyban)
if (!ban["message"])
ban["message"] = "Evasion"
// TODO: USE NEW DB IMPLEMENTATION
var/datum/DBQuery/query_create_stickyban = SSdbcore.NewQuery(
"INSERT IGNORE INTO [format_table_name("stickyban")] (ckey, reason, banning_admin) VALUES ([ckey], [ban["message"]], ban["admin"]))"
var/datum/db_query/query_create_stickyban = SSdbcore.NewQuery(
"INSERT IGNORE INTO [format_table_name("stickyban")] (ckey, reason, banning_admin) VALUES (:ckey, :message, :admin)",
list("ckey" = ckey, "message" = ban["message"], "admin" = ban["admin"])
)
if (query_create_stickyban.warn_execute())
if (!query_create_stickyban.warn_execute())
qdel(query_create_stickyban)
return
qdel(query_create_stickyban)
// var/datum/db_query/query_create_stickyban = SSdbcore.NewQuery(
// "INSERT IGNORE INTO [format_table_name("stickyban")] (ckey, reason, banning_admin) VALUES (:ckey, :message, :admin)",
// list("ckey" = ckey, "message" = ban["message"], "admin" = ban["admin"])
// )
// if (!query_create_stickyban.warn_execute())
// qdel(query_create_stickyban)
// return
// qdel(query_create_stickyban)
var/list/sqlckeys = list()
var/list/sqlcids = list()
var/list/sqlips = list()

View File

@@ -1,32 +1,63 @@
#define OCCLUSION_DISTANCE 20
/datum/sun
var/azimuth = 0 // clockwise, top-down rotation from 0 (north) to 359
var/power_mod = 1 // how much power this sun is outputting relative to standard
/datum/sun/vv_edit_var(var_name, var_value)
. = ..()
if(var_name == NAMEOF(src, azimuth))
SSsun.complete_movement()
/atom/proc/check_obscured(datum/sun/sun, distance = OCCLUSION_DISTANCE)
var/target_x = round(sin(sun.azimuth), 0.01)
var/target_y = round(cos(sun.azimuth), 0.01)
var/x_hit = x
var/y_hit = y
var/turf/hit
for(var/run in 1 to distance)
x_hit += target_x
y_hit += target_y
hit = locate(round(x_hit, 1), round(y_hit, 1), z)
if(hit.opacity)
return TRUE
if(hit.x == 1 || hit.x == world.maxx || hit.y == 1 || hit.y == world.maxy) //edge of the map
break
return FALSE
SUBSYSTEM_DEF(sun)
name = "Sun"
wait = 1 MINUTES
flags = SS_NO_TICK_CHECK
var/azimuth = 0 ///clockwise, top-down rotation from 0 (north) to 359
var/list/datum/sun/suns = list()
var/datum/sun/primary_sun
var/azimuth_mod = 1 ///multiplier against base_rotation
var/base_rotation = 6 ///base rotation in degrees per fire
/datum/controller/subsystem/sun/Initialize(start_timeofday)
azimuth = rand(0, 359)
primary_sun = new
suns += primary_sun
primary_sun.azimuth = rand(0, 359)
azimuth_mod = round(rand(50, 200)/100, 0.01) // 50% - 200% of standard rotation
if(prob(50))
azimuth_mod *= -1
return ..()
/datum/controller/subsystem/sun/fire(resumed = FALSE)
azimuth += azimuth_mod * base_rotation
azimuth = round(azimuth, 0.01)
if(azimuth >= 360)
azimuth -= 360
if(azimuth < 0)
azimuth += 360
for(var/S in suns)
var/datum/sun/sun = S
sun.azimuth += azimuth_mod * base_rotation
sun.azimuth = round(sun.azimuth, 0.01)
if(sun.azimuth >= 360)
sun.azimuth -= 360
if(sun.azimuth < 0)
sun.azimuth += 360
complete_movement()
/datum/controller/subsystem/sun/proc/complete_movement()
SEND_SIGNAL(src, COMSIG_SUN_MOVED, azimuth)
SEND_SIGNAL(src, COMSIG_SUN_MOVED, primary_sun, suns)
/datum/controller/subsystem/sun/vv_edit_var(var_name, var_value)
. = ..()
if(var_name == NAMEOF(src, azimuth))
complete_movement()
#undef OCCLUSION_DISTANCE

View File

@@ -69,6 +69,7 @@ SUBSYSTEM_DEF(ticker)
var/modevoted = FALSE //Have we sent a vote for the gamemode?
var/station_integrity = 100 // stored at roundend for use in some antag goals
var/emergency_reason
/datum/controller/subsystem/ticker/Initialize(timeofday)
load_mode()
@@ -268,7 +269,7 @@ SUBSYSTEM_DEF(ticker)
if(!GLOB.Debug2)
if(!can_continue)
log_game("[mode.name] failed pre_setup, cause: [mode.setup_error]")
send2irc("SSticker", "[mode.name] failed pre_setup, cause: [mode.setup_error]")
send2adminchat("SSticker", "[mode.name] failed pre_setup, cause: [mode.setup_error]")
message_admins("<span class='notice'>[mode.name] failed pre_setup, cause: [mode.setup_error]</span>")
QDEL_NULL(mode)
to_chat(world, "<B>Error setting up [GLOB.master_mode].</B> Reverting to pre-game lobby.")
@@ -334,7 +335,7 @@ SUBSYSTEM_DEF(ticker)
var/list/adm = get_admin_counts()
var/list/allmins = adm["present"]
send2irc("Server", "Round [GLOB.round_id ? "#[GLOB.round_id]:" : "of"] [hide_mode ? "secret":"[mode.name]"] has started[allmins.len ? ".":" with no active admins online!"]")
send2adminchat("Server", "Round [GLOB.round_id ? "#[GLOB.round_id]:" : "of"] [hide_mode ? "secret":"[mode.name]"] has started[allmins.len ? ".":" with no active admins online!"]")
setup_done = TRUE
for(var/i in GLOB.start_landmarks_list)
@@ -563,7 +564,10 @@ SUBSYSTEM_DEF(ticker)
if(STATION_DESTROYED_NUKE)
news_message = "We would like to reassure all employees that the reports of a Syndicate backed nuclear attack on [station_name()] are, in fact, a hoax. Have a secure day!"
if(STATION_EVACUATED)
news_message = "The crew of [station_name()] has been evacuated amid unconfirmed reports of enemy activity."
if(emergency_reason)
news_message = "[station_name()] has been evacuated after transmitting the following distress beacon:\n\n[emergency_reason]"
else
news_message = "The crew of [station_name()] has been evacuated amid unconfirmed reports of enemy activity."
if(BLOB_WIN)
news_message = "[station_name()] was overcome by an unknown biological outbreak, killing all crew on board. Don't let it happen to you! Remember, a clean work station is a safe work station."
if(BLOB_NUKE)
@@ -589,7 +593,7 @@ SUBSYSTEM_DEF(ticker)
if(WIZARD_KILLED)
news_message = "Tensions have flared with the Space Wizard Federation following the death of one of their members aboard [station_name()]."
if(STATION_NUKED)
news_message = "[station_name()] activated its self destruct device for unknown reasons. Attempts to clone the Captain so he can be arrested and executed are underway."
news_message = "[station_name()] activated its self-destruct device for unknown reasons. Attempts to clone the Captain so he can be arrested and executed are underway."
if(CLOCK_SUMMON)
news_message = "The garbled messages about hailing a mouse and strange energy readings from [station_name()] have been discovered to be an ill-advised, if thorough, prank by a clown."
if(CLOCK_SILICONS)
@@ -604,7 +608,8 @@ SUBSYSTEM_DEF(ticker)
if(SSblackbox.first_death)
var/list/ded = SSblackbox.first_death
if(ded.len)
news_message += " NT Sanctioned Psykers picked up faint traces of someone near the station, allegedly having had died. Their name was: [ded["name"]], [ded["role"]], at [ded["area"]].[ded["last_words"] ? " Their last words were: \"[ded["last_words"]]\"" : ""]"
var/last_words = ded["last_words"] ? " Their last words were: \"[ded["last_words"]]\"" : ""
news_message += " NT Sanctioned Psykers picked up faint traces of someone near the station, allegedly having had died. Their name was: [ded["name"]], [ded["role"]], at [ded["area"]].[last_words]"
else
news_message += " NT Sanctioned Psykers proudly confirm reports that nobody died this shift!"