mirror of
https://github.com/vgstation-coders/vgstation13.git
synced 2025-12-09 16:14:13 +00:00
167 lines
5.0 KiB
Plaintext
167 lines
5.0 KiB
Plaintext
// AI EYE
|
|
//
|
|
// An invisible (no icon) mob that the AI controls to look around the station with.
|
|
// It streams chunks as it moves around, which will show it what the AI can and cannot see.
|
|
|
|
/mob/camera/aiEye
|
|
name = "Inactive AI Eye"
|
|
anchored = TRUE
|
|
|
|
var/list/visibleCameraChunks = list()
|
|
var/mob/living/silicon/ai/ai = null
|
|
|
|
|
|
// Use this when setting the aiEye's location.
|
|
// It will also stream the chunk that the new loc is in.
|
|
|
|
/mob/camera/aiEye/forceMove(var/atom/destination)
|
|
if(ai)
|
|
if(!isturf(ai.loc))
|
|
return
|
|
if(!isturf(destination))
|
|
for(destination = destination.loc; !isturf(destination); destination = destination.loc);
|
|
forceEnter(destination)
|
|
|
|
cameranet.visibility(src)
|
|
if(ai.client && ai.client.eye != src) // Set the eye to us and give the AI the sight & visibility flags it needs.
|
|
ai.client.eye = src
|
|
ai.sight |= SEE_TURFS
|
|
ai.sight |= SEE_MOBS
|
|
ai.sight |= SEE_OBJS
|
|
ai.see_in_dark = 8
|
|
ai.see_invisible = SEE_INVISIBLE_LEVEL_TWO
|
|
|
|
//Holopad
|
|
if(istype(ai.current, /obj/machinery/hologram/holopad))
|
|
var/obj/machinery/hologram/holopad/H = ai.current
|
|
H.move_hologram()
|
|
|
|
/mob/camera/aiEye/Move()
|
|
return 0
|
|
|
|
//An AI eyeobj mob cant have a virtualhearer to hear with unless it gets one from a malf module
|
|
/mob/camera/aiEye/Hear(message, atom/movable/speaker, var/datum/language/speaking, raw_message, radio_freq)
|
|
if(radio_freq) //HOW CAN IT POSSIBLY READ LIPS THROUGH RADIOS
|
|
return
|
|
|
|
var/mob/M = speaker
|
|
if(istype(M))
|
|
if(ishuman(M))
|
|
var/mob/living/carbon/human/H = speaker
|
|
if(H.check_body_part_coverage(MOUTH)) //OR MASKS
|
|
return
|
|
ai.Hear(args) //He can only read the lips of mobs, I cant think of objects using lips
|
|
|
|
|
|
// AI MOVEMENT
|
|
|
|
// The AI's "eye". Described on the top of the page.
|
|
|
|
/mob/living/silicon/ai
|
|
var/mob/camera/aiEye/eyeobj = new()
|
|
var/sprint = 10
|
|
var/cooldown = 0
|
|
var/acceleration = 1
|
|
|
|
|
|
// Intiliaze the eye by assigning it's "ai" variable to us. Then set it's loc to us.
|
|
/mob/living/silicon/ai/New()
|
|
..()
|
|
eyeobj.ai = src
|
|
eyeobj.name = "[src.name] (AI Eye)" // Give it a name
|
|
spawn(5)
|
|
eyeobj.loc = src.loc
|
|
|
|
/mob/living/silicon/ai/Destroy()
|
|
eyeobj.ai = null
|
|
del(eyeobj) // No AI, no Eye
|
|
..()
|
|
|
|
/atom/proc/move_camera_by_click()
|
|
//writepanic("[__FILE__].[__LINE__] ([src.type])([usr ? usr.ckey : ""]) \\/atom/proc/move_camera_by_click() called tick#: [world.time]")
|
|
if(istype(usr, /mob/living/silicon/ai))
|
|
var/mob/living/silicon/ai/AI = usr
|
|
if(AI.eyeobj && AI.client.eye == AI.eyeobj)
|
|
AI.cameraFollow = null
|
|
//AI.eyeobj.forceMove(src)
|
|
if (isturf(src.loc) || isturf(src))
|
|
AI.eyeobj.forceMove(src)
|
|
|
|
/mob/living/Click()
|
|
if(isAI(usr))
|
|
return
|
|
..()
|
|
|
|
/mob/living/DblClick()
|
|
if(isAI(usr) && usr != src)
|
|
var/mob/living/silicon/ai/A = usr
|
|
A.ai_actual_track(src)
|
|
return
|
|
..()
|
|
|
|
// This will move the AIEye. It will also cause lights near the eye to light up, if toggled.
|
|
// This is handled in the proc below this one.
|
|
|
|
/client/proc/AIMove(n, direct, var/mob/living/silicon/ai/user)
|
|
|
|
//writepanic("[__FILE__].[__LINE__] ([src.type])([usr ? usr.ckey : ""]) \\/client/proc/AIMove() called tick#: [world.time]")
|
|
|
|
var/initial = initial(user.sprint)
|
|
var/max_sprint = 50
|
|
|
|
if(user.cooldown && user.cooldown < world.timeofday) // 3 seconds
|
|
user.sprint = initial
|
|
|
|
for(var/i = 0; i < max(user.sprint, initial); i += 20)
|
|
var/turf/step = get_turf(get_step(user.eyeobj, direct))
|
|
if(step)
|
|
user.eyeobj.forceMove(step)
|
|
|
|
user.cooldown = world.timeofday + 5
|
|
if(user.acceleration)
|
|
user.sprint = min(user.sprint + 0.5, max_sprint)
|
|
else
|
|
user.sprint = initial
|
|
|
|
user.cameraFollow = null
|
|
|
|
//user.unset_machine() //Uncomment this if it causes problems.
|
|
//user.lightNearbyCamera()
|
|
if (user.camera_light_on)
|
|
user.light_cameras()
|
|
|
|
/mob/living/silicon/ai/proc/view_core()
|
|
|
|
//writepanic("[__FILE__].[__LINE__] ([src.type])([usr ? usr.ckey : ""]) \\/mob/living/silicon/ai/proc/view_core() called tick#: [world.time]")
|
|
|
|
current = null
|
|
cameraFollow = null
|
|
unset_machine()
|
|
|
|
if(src.eyeobj && src.loc)
|
|
//src.eyeobj.loc = src.loc
|
|
src.eyeobj.forceMove(src.loc)
|
|
else
|
|
src << "ERROR: Eyeobj not found. Creating new eye..."
|
|
src.eyeobj = new(src.loc)
|
|
src.eyeobj.ai = src
|
|
src.eyeobj.name = "[src.name] (AI Eye)" // Give it a name
|
|
src.eyeobj.forceMove(src.loc)
|
|
|
|
if(client && client.eye) // Reset these things so the AI can't view through walls and stuff.
|
|
client.eye = src
|
|
sight &= ~(SEE_TURFS | SEE_MOBS | SEE_OBJS)
|
|
see_in_dark = 0
|
|
see_invisible = SEE_INVISIBLE_LIVING
|
|
|
|
for(var/datum/camerachunk/c in eyeobj.visibleCameraChunks)
|
|
c.remove(eyeobj)
|
|
|
|
/mob/living/silicon/ai/verb/toggle_acceleration()
|
|
set category = "AI Commands"
|
|
set name = "Toggle Camera Acceleration"
|
|
//writepanic("[__FILE__].[__LINE__] ([src.type])([usr ? usr.ckey : ""]) \\/mob/living/silicon/ai/verb/toggle_acceleration() called tick#: [world.time]")
|
|
|
|
acceleration = !acceleration
|
|
usr << "Camera acceleration has been toggled [acceleration ? "on" : "off"]."
|