Source: food_roi_targeting_20260430_094618

RawBack
# your_best.py
# Goal-directed Food ROI targeting candidate for CS470 Assignment 3.

from captureAgents import CaptureAgent
import random
from game import Directions
from util import nearestPoint


def createTeam(firstIndex, secondIndex, isRed,
               first='FoodROIPlannerAgent',
               second='HomeSentinelAgent'):
  return [eval(first)(firstIndex), eval(second)(secondIndex)]


class DirectCaptureAgent(CaptureAgent):
  def registerInitialState(self, gameState):
    self.start = gameState.getAgentPosition(self.index)
    self.width = gameState.data.layout.width
    self.height = gameState.data.layout.height
    CaptureAgent.registerInitialState(self, gameState)
    self.homeEntries = self.getHomeEntries(gameState)

  def getSuccessor(self, gameState, action):
    successor = gameState.generateSuccessor(self.index, action)
    pos = successor.getAgentState(self.index).getPosition()
    if pos != nearestPoint(pos):
      return successor.generateSuccessor(self.index, action)
    return successor

  def legalActions(self, gameState):
    actions = gameState.getLegalActions(self.index)
    if Directions.STOP in actions and len(actions) > 1:
      actions.remove(Directions.STOP)
    return actions

  def chooseByScore(self, gameState, scorer):
    actions = self.legalActions(gameState)
    scored = [(scorer(action), action) for action in actions]
    bestScore = max(score for score, action in scored)
    bestActions = [action for score, action in scored if score == bestScore]
    return random.choice(bestActions)

  def getHomeEntries(self, gameState):
    walls = gameState.getWalls()
    x = self.width // 2 - 1 if self.red else self.width // 2
    entries = []
    for y in range(1, self.height - 1):
      if not walls[x][y]:
        entries.append((x, y))
    return entries or [self.start]

  def minDistance(self, pos, targets):
    if pos is None or not targets:
      return 0
    return min(self.getMazeDistance(pos, target) for target in targets)

  def nearestTarget(self, pos, targets):
    if pos is None or not targets:
      return None
    return min(targets, key=lambda target: self.getMazeDistance(pos, target))

  def activeEnemyGhosts(self, gameState):
    ghosts = []
    for opponent in self.getOpponents(gameState):
      enemy = gameState.getAgentState(opponent)
      enemyPos = enemy.getPosition()
      if enemyPos is None:
        continue
      if not enemy.isPacman and enemy.scaredTimer <= 1:
        ghosts.append(enemyPos)
    return ghosts

  def activeGhostDistances(self, gameState, pos):
    return [self.getMazeDistance(pos, ghostPos)
            for ghostPos in self.activeEnemyGhosts(gameState)]

  def closestActiveGhostDistance(self, gameState, pos):
    distances = self.activeGhostDistances(gameState, pos)
    if not distances:
      return None
    return min(distances)

  def visibleInvaders(self, gameState):
    enemies = [gameState.getAgentState(i) for i in self.getOpponents(gameState)]
    return [enemy for enemy in enemies if enemy.isPacman and enemy.getPosition() is not None]

  def reversePenalty(self, gameState, action):
    reverse = Directions.REVERSE[gameState.getAgentState(self.index).configuration.direction]
    return 1 if action == reverse else 0


class FoodROIPlannerAgent(DirectCaptureAgent):
  def chooseAction(self, gameState):
    myState = gameState.getAgentState(self.index)
    myPos = myState.getPosition()
    foodList = self.getFood(gameState).asList()
    capsules = self.getCapsules(gameState)
    homeTarget = self.nearestTarget(myPos, self.homeEntries)
    homeDistance = self.minDistance(myPos, self.homeEntries)
    closestGhost = self.closestActiveGhostDistance(gameState, myPos)

    foodTarget = self.bestFoodTarget(gameState, myPos, foodList)
    capsuleTarget = self.nearestTarget(myPos, capsules)
    returnMode = self.shouldReturn(gameState, myState, foodList, homeDistance, closestGhost)

    if capsuleTarget is not None and closestGhost is not None and closestGhost <= 5 and myState.isPacman:
      plan = ('capsule', capsuleTarget)
    elif returnMode:
      plan = ('home', homeTarget)
    else:
      plan = ('food', foodTarget)

    return self.chooseByScore(
      gameState,
      lambda action: self.scoreOffensiveAction(gameState, action, plan)
    )

  def shouldReturn(self, gameState, myState, foodList, homeDistance, closestGhost):
    if len(foodList) <= 2:
      return True
    if myState.numCarrying >= 4:
      return True
    if myState.numCarrying >= 2 and closestGhost is not None and closestGhost <= 5:
      return True
    if myState.numCarrying > 0 and closestGhost is not None and closestGhost <= 3:
      return True
    if myState.numCarrying > 0 and gameState.data.timeleft < homeDistance + 30:
      return True
    return False

  def scoreOffensiveAction(self, gameState, action, plan):
    successor = self.getSuccessor(gameState, action)
    myState = successor.getAgentState(self.index)
    myPos = myState.getPosition()
    foodLeft = self.getFood(successor).asList()
    closestGhost = self.closestActiveGhostDistance(successor, myPos)

    score = 300.0 * self.getScore(successor)
    score -= 110.0 * len(foodLeft)
    score -= 4.0 * self.reversePenalty(gameState, action)

    planKind, target = plan
    if target is not None:
      targetDistance = self.getMazeDistance(myPos, target)
      if planKind == 'home':
        score -= (22.0 + 4.0 * myState.numCarrying) * targetDistance
      elif planKind == 'capsule':
        score -= 16.0 * targetDistance
      else:
        score -= 8.0 * targetDistance

    if myState.isPacman and closestGhost is not None:
      if closestGhost <= 1:
        score -= 2500.0
      elif closestGhost == 2:
        score -= 900.0 + 80.0 * myState.numCarrying
      elif closestGhost <= 4:
        score -= 160.0 + 35.0 * myState.numCarrying
      else:
        score += min(closestGhost, 8) * 4.0

    if myState.numCarrying > 0:
      score -= 1.5 * self.minDistance(myPos, self.homeEntries)

    return score

  def bestFoodTarget(self, gameState, myPos, foodList):
    if myPos is None or not foodList:
      return None

    clusterBonuses = self.foodClusterBonuses(foodList)
    ghosts = self.activeEnemyGhosts(gameState)
    bestFood = None
    bestCost = None

    for food in foodList:
      myDistance = self.getMazeDistance(myPos, food)
      homeDistance = self.minDistance(food, self.homeEntries)
      cost = (
        myDistance +
        0.6 * homeDistance +
        self.ghostRiskPenalty(food, myDistance, ghosts) -
        clusterBonuses.get(food, 0.0) +
        self.teammateOverlapPenalty(gameState, food, myDistance)
      )

      if bestCost is None or cost < bestCost:
        bestCost = cost
        bestFood = food
      elif cost == bestCost and bestFood is not None:
        if myDistance < self.getMazeDistance(myPos, bestFood):
          bestFood = food
    return bestFood

  def foodClusterBonuses(self, foodList):
    bonuses = {}
    for food in foodList:
      bonus = 0.0
      for other in foodList:
        if other == food:
          continue
        distance = abs(food[0] - other[0]) + abs(food[1] - other[1])
        if distance <= 2:
          bonus += 2.0
        elif distance <= 4:
          bonus += 1.0
        elif distance <= 6:
          bonus += 0.4
      bonuses[food] = min(7.0, bonus)
    return bonuses

  def ghostRiskPenalty(self, food, myDistance, ghosts):
    penalty = 0.0
    for ghostPos in ghosts:
      ghostDistance = self.getMazeDistance(ghostPos, food)
      margin = ghostDistance - myDistance
      if margin <= 0:
        penalty += 16.0 + min(10.0, abs(margin) * 2.0)
      elif margin <= 2:
        penalty += 9.0 - 2.0 * margin
      elif margin <= 4:
        penalty += 2.5
    return min(35.0, penalty)

  def teammateOverlapPenalty(self, gameState, food, myDistance):
    penalty = 0.0
    for teammate in self.getTeam(gameState):
      if teammate == self.index:
        continue
      teammatePos = gameState.getAgentPosition(teammate)
      if teammatePos is None:
        continue
      teammateDistance = self.getMazeDistance(teammatePos, food)
      if teammateDistance + 1 < myDistance:
        penalty += 8.0
      elif teammateDistance <= myDistance + 1:
        penalty += 3.0

    team = sorted(self.getTeam(gameState))
    if len(team) >= 2:
      midpoint = self.height / 2.0
      lowerLaneAgent = team[0]
      if self.index == lowerLaneAgent and food[1] > midpoint:
        penalty += 1.5
      elif self.index != lowerLaneAgent and food[1] < midpoint:
        penalty += 1.5
    return penalty


class HomeSentinelAgent(DirectCaptureAgent):
  def registerInitialState(self, gameState):
    DirectCaptureAgent.registerInitialState(self, gameState)
    self.patrolTarget = self.choosePatrolTarget(gameState)
    self.lastEatenFood = None

  def chooseAction(self, gameState):
    self.updateLastEatenFood(gameState)
    invaders = self.visibleInvaders(gameState)
    myPos = gameState.getAgentPosition(self.index)

    if invaders:
      target = min(
        [invader.getPosition() for invader in invaders],
        key=lambda pos: self.getMazeDistance(myPos, pos)
      )
      mode = 'chase'
    elif self.lastEatenFood is not None:
      target = self.lastEatenFood
      mode = 'search'
    else:
      if self.patrolTarget is None:
        self.patrolTarget = self.choosePatrolTarget(gameState)
      target = self.patrolTarget
      mode = 'patrol'

    return self.chooseByScore(
      gameState,
      lambda action: self.scoreDefensiveAction(gameState, action, target, mode)
    )

  def scoreDefensiveAction(self, gameState, action, target, mode):
    successor = self.getSuccessor(gameState, action)
    myState = successor.getAgentState(self.index)
    myPos = myState.getPosition()
    invaders = self.visibleInvaders(successor)

    score = 120.0 * self.getScore(successor)
    score -= 3.0 * self.reversePenalty(gameState, action)
    if myState.isPacman:
      score -= 350.0
    else:
      score += 60.0

    if invaders:
      nearestInvader = min(
        [invader.getPosition() for invader in invaders],
        key=lambda pos: self.getMazeDistance(myPos, pos)
      )
      score -= 35.0 * self.getMazeDistance(myPos, nearestInvader)
      score -= 500.0 * len(invaders)
    elif target is not None:
      weight = 9.0 if mode == 'search' else 5.0
      score -= weight * self.getMazeDistance(myPos, target)

    if action == Directions.STOP:
      score -= 100.0
    return score

  def visibleInvaders(self, gameState):
    enemies = [gameState.getAgentState(i) for i in self.getOpponents(gameState)]
    return [enemy for enemy in enemies if enemy.isPacman and enemy.getPosition() is not None]

  def updateLastEatenFood(self, gameState):
    previous = self.getPreviousObservation()
    if previous is None:
      return
    previousFood = set(self.getFoodYouAreDefending(previous).asList())
    currentFood = set(self.getFoodYouAreDefending(gameState).asList())
    eaten = list(previousFood - currentFood)
    if eaten:
      myPos = gameState.getAgentPosition(self.index)
      self.lastEatenFood = min(eaten, key=lambda pos: self.getMazeDistance(myPos, pos))
    elif self.lastEatenFood is not None:
      myPos = gameState.getAgentPosition(self.index)
      if myPos is not None and self.getMazeDistance(myPos, self.lastEatenFood) > 1:
        return
      self.lastEatenFood = None

  def choosePatrolTarget(self, gameState):
    defendingFood = self.getFoodYouAreDefending(gameState).asList()
    if not defendingFood:
      return random.choice(self.homeEntries)
    foodCenter = (
      sum(x for x, y in defendingFood) / float(len(defendingFood)),
      sum(y for x, y in defendingFood) / float(len(defendingFood))
    )
    return min(
      self.homeEntries,
      key=lambda pos: abs(pos[0] - foodCenter[0]) + abs(pos[1] - foodCenter[1])
    )