Source: smoke_baseline1_20260429_085822

RawBack
# your_baseline1.py
# Risk-aware reflex team for CS470 Assignment 3.

from captureAgents import CaptureAgent
import random
import util
from game import Directions
from util import nearestPoint


def createTeam(firstIndex, secondIndex, isRed,
               first='RiskAwareOffensiveAgent',
               second='PatrolDefensiveAgent'):
  return [eval(first)(firstIndex), eval(second)(secondIndex)]


class ReflexCaptureAgent(CaptureAgent):
  def registerInitialState(self, gameState):
    self.start = gameState.getAgentPosition(self.index)
    self.width = gameState.data.layout.width
    self.height = gameState.data.layout.height
    CaptureAgent.registerInitialState(self, gameState)
    self.homeEntries = self.getHomeEntries(gameState)

  def chooseAction(self, gameState):
    actions = gameState.getLegalActions(self.index)
    values = [self.evaluate(gameState, action) for action in actions]
    bestValue = max(values)
    bestActions = [a for a, v in zip(actions, values) if v == bestValue]
    if Directions.STOP in bestActions and len(bestActions) > 1:
      bestActions.remove(Directions.STOP)
    return random.choice(bestActions)

  def getSuccessor(self, gameState, action):
    successor = gameState.generateSuccessor(self.index, action)
    pos = successor.getAgentState(self.index).getPosition()
    if pos != nearestPoint(pos):
      return successor.generateSuccessor(self.index, action)
    return successor

  def evaluate(self, gameState, action):
    return self.getFeatures(gameState, action) * self.getWeights(gameState, action)

  def getFeatures(self, gameState, action):
    features = util.Counter()
    successor = self.getSuccessor(gameState, action)
    features['successorScore'] = self.getScore(successor)
    return features

  def getWeights(self, gameState, action):
    return {'successorScore': 1.0}

  def getHomeEntries(self, gameState):
    walls = gameState.getWalls()
    if self.red:
      x = self.width // 2 - 1
    else:
      x = self.width // 2
    entries = []
    for y in range(1, self.height - 1):
      if not walls[x][y]:
        entries.append((x, y))
    return entries or [self.start]

  def minDistance(self, pos, targets):
    if not pos or not targets:
      return 0
    return min(self.getMazeDistance(pos, target) for target in targets)

  def activeEnemyGhostDistances(self, gameState, myPos):
    distances = []
    for opponent in self.getOpponents(gameState):
      enemy = gameState.getAgentState(opponent)
      enemyPos = enemy.getPosition()
      if enemyPos is None:
        continue
      if not enemy.isPacman and enemy.scaredTimer <= 1:
        distances.append(self.getMazeDistance(myPos, enemyPos))
    return distances


class RiskAwareOffensiveAgent(ReflexCaptureAgent):
  def getFeatures(self, gameState, action):
    features = util.Counter()
    successor = self.getSuccessor(gameState, action)
    myState = successor.getAgentState(self.index)
    myPos = myState.getPosition()

    foodList = self.getFood(successor).asList()
    capsules = self.getCapsules(successor)
    carrying = myState.numCarrying
    ghostDistances = self.activeEnemyGhostDistances(successor, myPos)
    closestGhost = min(ghostDistances) if ghostDistances else None

    features['successorScore'] = self.getScore(successor)
    features['foodRemaining'] = len(foodList)
    features['distanceToFood'] = self.minDistance(myPos, foodList)
    features['distanceHome'] = self.minDistance(myPos, self.homeEntries)
    features['distanceToCapsule'] = self.minDistance(myPos, capsules)

    if action == Directions.STOP:
      features['stop'] = 1
    reverse = Directions.REVERSE[gameState.getAgentState(self.index).configuration.direction]
    if action == reverse:
      features['reverse'] = 1

    if closestGhost is not None:
      features['ghostDistance'] = closestGhost
      if closestGhost <= 2:
        features['immediateDanger'] = 1
      elif closestGhost <= 5:
        features['nearGhost'] = 1

    if carrying >= 3 or len(foodList) <= 2:
      features['shouldReturn'] = 1
    if carrying > 0 and closestGhost is not None and closestGhost <= 5:
      features['shouldReturn'] = 1

    if gameState.data.timeleft < features['distanceHome'] + 20:
      features['shouldReturn'] = 1

    return features

  def getWeights(self, gameState, action):
    features = self.getFeatures(gameState, action)
    weights = {
      'successorScore': 200,
      'foodRemaining': -100,
      'distanceToFood': -3,
      'distanceToCapsule': -2,
      'ghostDistance': 2,
      'immediateDanger': -1000,
      'nearGhost': -180,
      'stop': -100,
      'reverse': -3,
      'shouldReturn': 0,
      'distanceHome': 0,
    }
    if features['shouldReturn']:
      weights['distanceHome'] = -15
      weights['distanceToFood'] = -1
      weights['distanceToCapsule'] = -1
    if features['nearGhost'] or features['immediateDanger']:
      weights['distanceToCapsule'] = -8
    return weights


class PatrolDefensiveAgent(ReflexCaptureAgent):
  def registerInitialState(self, gameState):
    ReflexCaptureAgent.registerInitialState(self, gameState)
    self.patrolTarget = self.choosePatrolTarget(gameState)
    self.lastEatenFood = None

  def chooseAction(self, gameState):
    self.updateLastEatenFood(gameState)
    invaders = self.visibleInvaders(gameState)
    if invaders:
      self.patrolTarget = min(
        [a.getPosition() for a in invaders],
        key=lambda p: self.getMazeDistance(gameState.getAgentPosition(self.index), p)
      )
    elif self.lastEatenFood is not None:
      self.patrolTarget = self.lastEatenFood
    elif self.patrolTarget is None:
      self.patrolTarget = self.choosePatrolTarget(gameState)
    return ReflexCaptureAgent.chooseAction(self, gameState)

  def getFeatures(self, gameState, action):
    features = util.Counter()
    successor = self.getSuccessor(gameState, action)
    myState = successor.getAgentState(self.index)
    myPos = myState.getPosition()

    invaders = self.visibleInvaders(successor)
    features['onDefense'] = 1
    if myState.isPacman:
      features['onDefense'] = 0
    features['numInvaders'] = len(invaders)
    if invaders:
      features['invaderDistance'] = min(
        self.getMazeDistance(myPos, invader.getPosition()) for invader in invaders
      )
    else:
      features['distanceToPatrol'] = self.getMazeDistance(myPos, self.patrolTarget)

    if action == Directions.STOP:
      features['stop'] = 1
    reverse = Directions.REVERSE[gameState.getAgentState(self.index).configuration.direction]
    if action == reverse:
      features['reverse'] = 1
    return features

  def getWeights(self, gameState, action):
    return {
      'numInvaders': -1000,
      'onDefense': 120,
      'invaderDistance': -20,
      'distanceToPatrol': -4,
      'stop': -100,
      'reverse': -2,
    }

  def visibleInvaders(self, gameState):
    enemies = [gameState.getAgentState(i) for i in self.getOpponents(gameState)]
    return [enemy for enemy in enemies if enemy.isPacman and enemy.getPosition() is not None]

  def updateLastEatenFood(self, gameState):
    previous = self.getPreviousObservation()
    if previous is None:
      return
    previousFood = set(self.getFoodYouAreDefending(previous).asList())
    currentFood = set(self.getFoodYouAreDefending(gameState).asList())
    eaten = list(previousFood - currentFood)
    if eaten:
      myPos = gameState.getAgentPosition(self.index)
      self.lastEatenFood = min(eaten, key=lambda p: self.getMazeDistance(myPos, p))

  def choosePatrolTarget(self, gameState):
    defendingFood = self.getFoodYouAreDefending(gameState).asList()
    if not defendingFood:
      return random.choice(self.homeEntries)
    foodCenter = (
      sum(x for x, y in defendingFood) / float(len(defendingFood)),
      sum(y for x, y in defendingFood) / float(len(defendingFood))
    )
    return min(
      self.homeEntries,
      key=lambda p: abs(p[0] - foodCenter[0]) + abs(p[1] - foodCenter[1])
    )