87 lines
2.5 KiB
TypeScript
87 lines
2.5 KiB
TypeScript
import type { SimulationState } from './types';
|
|
import { hasLineOfSight } from './sensors';
|
|
|
|
/**
|
|
* Fitness calculation for NEAT Arena.
|
|
*
|
|
* Fitness rewards:
|
|
* - +10 per hit on opponent
|
|
* - -10 per being hit
|
|
* - -0.002 per tick (time penalty to encourage aggression)
|
|
* - -0.2 per shot fired (ammo management)
|
|
* - +0.01 per tick when aiming well at visible opponent
|
|
*/
|
|
|
|
export interface FitnessTracker {
|
|
agentId: number;
|
|
fitness: number;
|
|
|
|
// For incremental calculation
|
|
lastKills: number;
|
|
lastHits: number;
|
|
shotsFired: number;
|
|
}
|
|
|
|
/**
|
|
* Create a new fitness tracker
|
|
*/
|
|
export function createFitnessTracker(agentId: number): FitnessTracker {
|
|
return {
|
|
agentId,
|
|
fitness: 0,
|
|
lastKills: 0,
|
|
lastHits: 0,
|
|
shotsFired: 0,
|
|
};
|
|
}
|
|
|
|
/**
|
|
* Update fitness based on current simulation state
|
|
*/
|
|
export function updateFitness(tracker: FitnessTracker, state: SimulationState): FitnessTracker {
|
|
const agent = state.agents.find(a => a.id === tracker.agentId)!;
|
|
const opponent = state.agents.find(a => a.id !== tracker.agentId)!;
|
|
|
|
const newTracker = { ...tracker };
|
|
|
|
// Reward for new kills
|
|
const newKills = agent.kills - tracker.lastKills;
|
|
newTracker.fitness += newKills * 10;
|
|
newTracker.lastKills = agent.kills;
|
|
|
|
// Penalty for being hit
|
|
const newHits = agent.hits - tracker.lastHits;
|
|
newTracker.fitness -= newHits * 10;
|
|
newTracker.lastHits = agent.hits;
|
|
|
|
// Time penalty (encourages finishing quickly)
|
|
newTracker.fitness -= 0.002;
|
|
|
|
// Check if agent fired this tick (cooldown just set)
|
|
if (agent.fireCooldown === 10) {
|
|
newTracker.shotsFired++;
|
|
newTracker.fitness -= 0.2;
|
|
}
|
|
|
|
// Reward for aiming at visible opponent
|
|
if (hasLineOfSight(agent, opponent, state.map.walls)) {
|
|
const dx = opponent.position.x - agent.position.x;
|
|
const dy = opponent.position.y - agent.position.y;
|
|
const angleToOpponent = Math.atan2(dy, dx);
|
|
|
|
// Normalize angle difference
|
|
let angleDiff = angleToOpponent - agent.aimAngle;
|
|
while (angleDiff > Math.PI) angleDiff -= 2 * Math.PI;
|
|
while (angleDiff < -Math.PI) angleDiff += 2 * Math.PI;
|
|
|
|
const cosAngleDiff = Math.cos(angleDiff);
|
|
|
|
// Reward if aiming close (cos > 0.95 ≈ within ~18°)
|
|
if (cosAngleDiff > 0.95) {
|
|
newTracker.fitness += 0.01;
|
|
}
|
|
}
|
|
|
|
return newTracker;
|
|
}
|