javascriptarraysminimaxnegamax

Translating minimax function from Java to JavaScript


This is gonna be a big wall of code, but I hope somebody out there has the time and patience to help me.

I'm currently trying to create an AI player for my HTML Tic-Tac-Toe game. I'm using this resource which contains working AI player code programmed in Java using the minimax algorithm: https://www3.ntu.edu.sg/home/ehchua/programming/java/JavaGame_TicTacToe_AI.html (Section 1.5)

I want to translate this given Java code to JavaScript. One major difference between the resource's code and my HTML/JS code is that the resource uses a 2-dimensional array for the game board and I use a 1-dimensional array.

That means the resource's array looks like this: Cell[][] cells; where the first index represents the row and the second index represents the column; and my array looks like this: let board_array = []; which has a length of 9 and represents the board read from top left to bottom right, so for example the top left cell is at index 0 and the center right cell is at index 5.

Another minor difference is that the resource uses player seeds to store in the cells array while I just use the strings 'X' for the human and 'O' for the AI.

I've spend many hours trying to translate the given Java functions to my JS script but no matter what I try, minimax always returns -1 as the final best cell which, from my understanding, should only happen if there are no more available cells, indicating that the game has ended. I've done some console.log debugging and I can see that during the recursion at some point there are actually legit best cells being returned (e.g. 0 or 4) but in the final return it's always -1.

I bet that my mistake lies either somewhere in a flawed translation from the 2-dimensional array into the 1-dimensional array, or it has something to do with JS doing something entirely different than Java in a specific line. I can imagine that the binary values in the WINNING_PATTERNS array or the operations on them in the hasWon function may cause trouble. I don't know if it works like that in JS, but it doesn't throw any errors so I can't figure it out by myself.

Anyway, here's my code:

let board_array = ['X', '', '', '', '', '', '', '', '']; // X at a random index because the human player always makes the first turn
const MY_SEED = 'O';
const OPP_SEED = 'X';

function moveAI() {
  let result = minimax(2, MY_SEED);
  let best_cell = result[1];
  console.log(best_cell); // always returns -1
  // if best_cell would be properly returned I could do "board_array[best_cell] = 'O'" here
}

function minimax(depth, player) {
  let next_moves = generateMoves();
  let best_score = (player === MY_SEED) ? Number.MIN_VALUE : Number.MAX_VALUE;
  let current_score;
  let best_cell = -1;

  if (next_moves.length === 0 || depth === 0) {
    best_score = evaluate();
  }
  else {
    for (let move in next_moves) {
      move = parseInt(move);
      board_array[move] = player;
      if (player === MY_SEED) {
        current_score = minimax(depth - 1, OPP_SEED)[0];
        if (current_score > best_score) {
          best_score = current_score;
          best_cell = move;
        }
      }
      else {
        current_score = minimax(depth - 1, MY_SEED)[0];
        if (current_score < best_score) {
          best_score = current_score;
          best_cell = move;
        }
      }
      board_array[move] = '';
    }
  }
  return [best_score, best_cell];
}

function generateMoves() {
  let next_moves = [];
  if (hasWon(MY_SEED) || hasWon(OPP_SEED)) {
    return next_moves;
  }
  for (let i = 0; i < board_array.length; i++) {
    if (board_array[i] === '') { next_moves.push(i); }
  }
  return next_moves;
}

function evaluate() {
  let score = 0;
  score += evaluateLine(0, 1, 2);
  score += evaluateLine(3, 4, 5);
  score += evaluateLine(6, 7, 8);
  score += evaluateLine(0, 3, 6);
  score += evaluateLine(1, 4, 7);
  score += evaluateLine(2, 5, 8);
  score += evaluateLine(0, 4, 8);
  score += evaluateLine(2, 4, 6);
  return score;
}

function evaluateLine(c1, c2, c3) {
  let score = 0;

  if (board_array[c1] === MY_SEED) { score = 1; }
  else if (board_array[c1] === OPP_SEED) { score = -1; }

  if (board_array[c2] === MY_SEED) {
    if (score === 1) { score = 10; }
    else if (score === -1) { return 0; }
    else { score = 1; }
  }
  else if (board_array[c2] === OPP_SEED) {
    if (score === -1) { score = -10; }
    else if (score === 1) { return 0; }
    else { score = -1; }
  }

  if (board_array[c3] === MY_SEED) {
    if (score > 0) { score *= 10; }
    else if (score < 0) { return 0; }
    else { score = 1; }
  }
  else if (board_array[c3] === OPP_SEED) {
    if (score < 0) { score *= 10; }
    else if (score > 1) { return 0; }
    else { score = -1; }
  }

  return score;
}

const WINNING_PATTERNS = [
  0b111000000, 0b000111000, 0b000000111,
  0b100100100, 0b010010010, 0b001001001,
  0b100010001, 0b001010100
];

function hasWon(player) {
  let pattern = 0b000000000; // does JS recognize this as a binary number like Java does?
  for (let i = 0; i < board_array.length; i++) {
    if (board_array[i] === player) {
      pattern |= (1 << (9 - i)); // does this actually do what I think it does? (compare to Java code)
    }
  }
  for (let winning_pattern in WINNING_PATTERNS) {
    if ((pattern & winning_pattern) === winning_pattern) { return true; }
  }
  return false;
}

moveAI(); // usually called after the human player sets their X

Solution

  • Binary numbers are possible in JavaScript since ES6 - ecma-262

    and bitwise operators in JavaScript (e.g. <<) are possible - bitwise link

    the rest of the code should be much simpler to compare to Java

    The final difference I see: Number.MAX_VALUE (1.79E+308) is not the same as Java's Integer.MAX_VALUE (2147483647) - not sure how important that is?