remove all of the old cli impl from fish/, fumen-utils/
This commit is contained in:
parent
fdae258965
commit
66bb54a48c
|
@ -113,15 +113,10 @@ name = "fish"
|
|||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"ahash",
|
||||
"anyhow",
|
||||
"bumpalo",
|
||||
"clap",
|
||||
"hashbrown 0.13.1",
|
||||
"mino",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
|
|
@ -1,34 +1,13 @@
|
|||
[package]
|
||||
name = "fish"
|
||||
description = "Bot?"
|
||||
description = "Blockfish engine implementation"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[features]
|
||||
default = ["io"]
|
||||
io = ["serde"]
|
||||
fish-cli = [
|
||||
"dep:anyhow",
|
||||
"dep:clap",
|
||||
"dep:serde_json",
|
||||
"dep:tracing-subscriber",
|
||||
"io",
|
||||
]
|
||||
|
||||
[dependencies]
|
||||
mino = { path = "../mino" }
|
||||
|
||||
ahash = "0.8"
|
||||
anyhow = { version = "1.0", optional = true }
|
||||
bumpalo = "3.12"
|
||||
clap = { version = "4.0", features = ["derive"], optional = true }
|
||||
hashbrown = "0.13"
|
||||
serde = { version = "1.0", features = ["derive"], optional = true }
|
||||
serde_json = { version = "1.0", optional = true }
|
||||
tracing = { version = "0.1", default_features = false }
|
||||
tracing-subscriber = { version = "0.3", features = ["fmt", "env-filter"], optional = true }
|
||||
|
||||
[[bin]]
|
||||
name = "fish-cli"
|
||||
path = "src/bin/cli.rs"
|
||||
required-features = ["fish-cli"]
|
||||
|
|
249
fish/src/ai.rs
249
fish/src/ai.rs
|
@ -1,249 +0,0 @@
|
|||
//! AI engine.
|
||||
|
||||
use core::cell::Cell;
|
||||
use core::ops::Deref;
|
||||
use core::pin::Pin;
|
||||
|
||||
use alloc::boxed::Box;
|
||||
use mino::srs::{Piece, PieceType};
|
||||
use mino::{Mat, MatBuf};
|
||||
|
||||
use alloc::vec::Vec;
|
||||
use bumpalo::Bump;
|
||||
|
||||
use crate::eval::evaluate;
|
||||
use crate::find::find_locations;
|
||||
|
||||
use self::search::ModifiedAStar;
|
||||
|
||||
mod search;
|
||||
|
||||
pub struct Ai {
|
||||
search: search::ModifiedAStar<Graph>,
|
||||
best: Option<Node>,
|
||||
_arena: Pin<Box<Bump>>,
|
||||
}
|
||||
|
||||
pub struct Exhausted;
|
||||
|
||||
impl Ai {
|
||||
pub fn new(init_mat: &Mat, init_previews: &[PieceType], init_hold: Option<PieceType>) -> Self {
|
||||
let arena = Box::pin(Bump::new());
|
||||
let init_queue = Queue::alloc(&*arena, init_previews, init_hold);
|
||||
let graph = Graph::new(&*arena, init_mat, init_queue);
|
||||
let search = ModifiedAStar::new(graph);
|
||||
Self {
|
||||
best: None,
|
||||
search,
|
||||
_arena: arena,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn think(&mut self) -> Result<(), Exhausted> {
|
||||
let node = self.search.step().ok_or(Exhausted)?;
|
||||
if self.best.map_or(true, |best| node > best) {
|
||||
tracing::debug!("new best: {node:?} ({})", node.rating);
|
||||
self.best = Some(node);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn suggestion(&self) -> impl Iterator<Item = Piece> + '_ {
|
||||
self.best.iter().flat_map(|n| n.trace())
|
||||
}
|
||||
}
|
||||
|
||||
struct Graph {
|
||||
arena: *const Bump,
|
||||
root: Node,
|
||||
children_buf: Vec<Node>,
|
||||
}
|
||||
|
||||
impl Graph {
|
||||
fn new(arena: *const Bump, root_mat: &Mat, root_queue: Queue) -> Self {
|
||||
let root = Node::new_root(arena, root_mat, root_queue);
|
||||
Self {
|
||||
arena,
|
||||
root,
|
||||
children_buf: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl search::Graph for Graph {
|
||||
type Node = Node;
|
||||
|
||||
fn root(&mut self) -> Self::Node {
|
||||
self.root.clone()
|
||||
}
|
||||
|
||||
fn expand(&mut self, node: Self::Node) -> &[Self::Node] {
|
||||
self.children_buf.clear();
|
||||
|
||||
for ty in node.queue.current() {
|
||||
for loc in find_locations(&node.mat, ty) {
|
||||
let piece = Piece { ty, loc };
|
||||
let node = node.succ(self.arena, piece);
|
||||
self.children_buf.push(node);
|
||||
}
|
||||
}
|
||||
|
||||
tracing::trace!("expanded to create {} children", self.children_buf.len());
|
||||
&self.children_buf
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
#[repr(transparent)]
|
||||
struct Node(*const NodeData);
|
||||
|
||||
struct NodeData {
|
||||
mat: MatBuf,
|
||||
queue: Queue,
|
||||
pcnt: usize,
|
||||
rating: i32,
|
||||
back_edge: Cell<Option<Edge>>,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
struct Edge {
|
||||
piece: Piece,
|
||||
pred: Node,
|
||||
}
|
||||
|
||||
impl search::Node for Node {
|
||||
fn is_terminal(&self) -> bool {
|
||||
self.queue.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
impl Node {
|
||||
fn new_root(arena: *const Bump, mat: &Mat, queue: Queue) -> Self {
|
||||
Self::new(arena, mat, queue, 0)
|
||||
}
|
||||
|
||||
fn succ(self, arena: *const Bump, piece: Piece) -> Self {
|
||||
let mut mat: MatBuf = MatBuf::new();
|
||||
mat.copy_from(&self.mat);
|
||||
piece.cells().fill(&mut mat);
|
||||
mat.clear_lines();
|
||||
let queue = self.queue.succ(piece.ty);
|
||||
let pcnt = self.pcnt + 1;
|
||||
let succ = Self::new(arena, &mat, queue, pcnt);
|
||||
succ.back_edge.set(Some(Edge {
|
||||
piece,
|
||||
pred: self.clone(),
|
||||
}));
|
||||
succ
|
||||
}
|
||||
|
||||
fn new(arena: *const Bump, mat: &Mat, queue: Queue, pcnt: usize) -> Self {
|
||||
let arena = unsafe { &*arena };
|
||||
let rating = evaluate(mat, pcnt);
|
||||
let node_data = NodeData::alloc(arena, mat, queue, pcnt, rating);
|
||||
Self(node_data)
|
||||
}
|
||||
|
||||
fn trace(self) -> Vec<Piece> {
|
||||
let mut pieces = Vec::with_capacity(self.pcnt);
|
||||
let mut parent = Some(self);
|
||||
while let Some(node) = parent.take() {
|
||||
if let Some(edge) = node.back_edge.get() {
|
||||
pieces.push(edge.piece);
|
||||
parent = Some(edge.pred);
|
||||
}
|
||||
}
|
||||
pieces.reverse();
|
||||
pieces
|
||||
}
|
||||
}
|
||||
|
||||
impl core::cmp::Ord for Node {
|
||||
fn cmp(&self, other: &Self) -> core::cmp::Ordering {
|
||||
other.rating.cmp(&self.rating)
|
||||
}
|
||||
}
|
||||
|
||||
impl core::cmp::Eq for Node {}
|
||||
|
||||
impl core::cmp::PartialOrd for Node {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<core::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl core::cmp::PartialEq for Node {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.cmp(other).is_eq()
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Node {
|
||||
type Target = NodeData;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
unsafe { &*self.0 }
|
||||
}
|
||||
}
|
||||
|
||||
impl NodeData {
|
||||
fn alloc<'a>(arena: &'a Bump, mat: &Mat, queue: Queue, pcnt: usize, rating: i32) -> &'a Self {
|
||||
let node = arena.alloc_with(|| NodeData {
|
||||
mat: MatBuf::new(),
|
||||
rating,
|
||||
pcnt,
|
||||
queue,
|
||||
back_edge: Cell::new(None),
|
||||
});
|
||||
node.mat.copy_from(mat);
|
||||
node
|
||||
}
|
||||
}
|
||||
|
||||
struct Queue {
|
||||
next: *const [PieceType],
|
||||
held: Option<PieceType>,
|
||||
}
|
||||
|
||||
impl Queue {
|
||||
fn alloc(arena: &Bump, mut previews: &[PieceType], mut hold: Option<PieceType>) -> Self {
|
||||
if hold.is_none() && !previews.is_empty() {
|
||||
hold = Some(previews[0]);
|
||||
previews = &previews[1..];
|
||||
}
|
||||
|
||||
Queue {
|
||||
next: arena.alloc_slice_copy(previews),
|
||||
held: hold,
|
||||
}
|
||||
}
|
||||
|
||||
fn next(&self) -> &[PieceType] {
|
||||
unsafe { &*self.next }
|
||||
}
|
||||
|
||||
fn current(&self) -> impl Iterator<Item = PieceType> {
|
||||
[self.next().first().copied(), self.held]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
}
|
||||
|
||||
fn is_empty(&self) -> bool {
|
||||
self.next().is_empty() && self.held.is_none()
|
||||
}
|
||||
|
||||
fn succ(&self, ty: PieceType) -> Self {
|
||||
let (hd, tl) = match self.next() {
|
||||
[hd, tl @ ..] => (Some(*hd), tl),
|
||||
[] => (None, &[][..]),
|
||||
};
|
||||
if self.held == Some(ty) {
|
||||
Self { next: tl, held: hd }
|
||||
} else {
|
||||
debug_assert_eq!(hd, Some(ty));
|
||||
Self {
|
||||
next: tl,
|
||||
held: self.held,
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,83 +0,0 @@
|
|||
use alloc::{collections::BinaryHeap, vec::Vec};
|
||||
|
||||
pub trait Graph {
|
||||
type Node: Node;
|
||||
|
||||
fn root(&mut self) -> Self::Node;
|
||||
fn expand(&mut self, node: Self::Node) -> &[Self::Node];
|
||||
}
|
||||
|
||||
pub trait Node: Clone + core::fmt::Debug + Ord {
|
||||
fn is_terminal(&self) -> bool;
|
||||
}
|
||||
|
||||
pub struct ModifiedAStar<G: Graph> {
|
||||
graph: G,
|
||||
fringe: Vec<BinaryHeap<G::Node>>,
|
||||
depth: usize,
|
||||
}
|
||||
|
||||
struct NoneAvailable;
|
||||
|
||||
impl<G: Graph> ModifiedAStar<G> {
|
||||
pub fn new(mut graph: G) -> Self {
|
||||
Self {
|
||||
fringe: Vec::from_iter([BinaryHeap::from_iter([graph.root()])]),
|
||||
depth: 0,
|
||||
graph,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step(&mut self) -> Option<G::Node> {
|
||||
loop {
|
||||
match self.expand() {
|
||||
Ok(Some(term_node)) => break Some(term_node),
|
||||
Ok(None) => continue,
|
||||
Err(NoneAvailable) => match self.select() {
|
||||
Ok(()) => continue,
|
||||
Err(NoneAvailable) => break None,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn expand(&mut self) -> Result<Option<G::Node>, NoneAvailable> {
|
||||
tracing::trace!("expand depth = {}", self.depth);
|
||||
|
||||
let set = self.fringe.get_mut(self.depth);
|
||||
self.depth += 1;
|
||||
|
||||
let node = set.and_then(|s| s.pop()).ok_or(NoneAvailable)?;
|
||||
if node.is_terminal() {
|
||||
tracing::trace!("found terminal node {node:?}");
|
||||
return Ok(Some(node));
|
||||
}
|
||||
|
||||
let children = self.graph.expand(node).iter().cloned();
|
||||
self.fringe.resize_with(self.depth + 1, BinaryHeap::new);
|
||||
self.fringe[self.depth].extend(children);
|
||||
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
fn select(&mut self) -> Result<(), NoneAvailable> {
|
||||
let mut best = None;
|
||||
|
||||
for (depth, set) in self.fringe.iter().enumerate() {
|
||||
if let Some(node) = set.peek() {
|
||||
if best.as_ref().map_or(true, |best| node > best) {
|
||||
best = Some(node.clone());
|
||||
self.depth = depth;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(best) = best {
|
||||
tracing::trace!("selected depth = {}, best = {:?}", self.depth, best);
|
||||
Ok(())
|
||||
} else {
|
||||
tracing::trace!("fringe exhausted; no nodes remaining");
|
||||
Err(NoneAvailable)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,145 +0,0 @@
|
|||
use anyhow::{anyhow, Context as _};
|
||||
|
||||
/// Headless fish simulator.
|
||||
#[derive(Debug, clap::Parser)]
|
||||
struct Args {
|
||||
#[command(subcommand)]
|
||||
cmd: Cmd,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
enum Cmd {
|
||||
/// List all reachable moves from the input game state.
|
||||
List(ListCmd),
|
||||
/// Prints the best suggested play for the input game state.
|
||||
Best(BestCmd),
|
||||
/// Simulate solo play until some end condition is met.
|
||||
Play(PlayCmd),
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
struct ListCmd {}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
struct BestCmd {
|
||||
#[command(flatten)]
|
||||
settings: Settings,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
struct PlayCmd {
|
||||
/// Game seed for generating pieces and garbage.
|
||||
#[arg(short = 's', long)]
|
||||
seed: u64,
|
||||
/// Number of moves to play before exiting. If omitted, will play until top out or
|
||||
/// until a dig race is finished.
|
||||
#[arg(short = 'n', long)]
|
||||
count: Option<u64>,
|
||||
/// Plays dig race (AKA "cheese race"), inserting rows of garbage onto the playfield
|
||||
/// until the specified number of them have been cleared.
|
||||
#[arg(short = 'd', long, name = "ROWS")]
|
||||
dig: Option<u64>,
|
||||
/// Minimum number of garbage rows present at once during a dig race.
|
||||
#[arg(long, name = "MIN-ROWS", default_value = "3")]
|
||||
dig_min: u64,
|
||||
/// Maximum number of garbage rows present at once during a dig race.
|
||||
#[arg(long, name = "MAX-ROWS", default_value = "9")]
|
||||
dig_max: u64,
|
||||
/// Number of previews available to the bot.
|
||||
#[arg(short = 'p', long = "previews", name = "PIECES", default_value = "5")]
|
||||
previews: u64,
|
||||
#[command(flatten)]
|
||||
settings: Settings,
|
||||
}
|
||||
|
||||
#[derive(Debug, clap::Args)]
|
||||
struct Settings {
|
||||
/// Limit analysis by given number of expanded nodes.
|
||||
#[arg(short = 'N', long = "limit-nodes")]
|
||||
nodes: Option<u64>,
|
||||
/// Limit analysis by given number of search steps.
|
||||
#[arg(short = 'S', long = "limit-steps")]
|
||||
steps: Option<u64>,
|
||||
/// Limit analysis to given time in milliseconds.
|
||||
#[arg(short = 'T', long = "limit-time")]
|
||||
ms: Option<u64>,
|
||||
// TODO: personality / heuristics configuration
|
||||
// TODO: capabilities
|
||||
// TODO: output progress format
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
tracing_subscriber::fmt::fmt()
|
||||
.with_writer(std::io::stderr)
|
||||
.with_env_filter(tracing_subscriber::filter::EnvFilter::from_default_env())
|
||||
.init();
|
||||
|
||||
let args = <Args as clap::Parser>::parse();
|
||||
match args.cmd {
|
||||
Cmd::List(_) => list_moves(),
|
||||
Cmd::Best(cmd) => print_best_move(cmd.settings),
|
||||
Cmd::Play(_) => Err(anyhow!("'play' command is not implemented")),
|
||||
}
|
||||
}
|
||||
|
||||
fn list_moves() -> anyhow::Result<()> {
|
||||
let input: fish::io::InputState =
|
||||
serde_json::from_reader(std::io::stdin()).context("error parsing input state")?;
|
||||
|
||||
let mut output = fish::io::OutputMoves::default();
|
||||
|
||||
let mut mat = input.matrix.to_mat();
|
||||
mat.clear_lines();
|
||||
|
||||
let front = input.queue.previews.get(0).copied();
|
||||
let hold = input.queue.hold;
|
||||
for ty in [front, hold].into_iter().flatten() {
|
||||
for loc in fish::find_locations(&mat, ty) {
|
||||
output.moves.push(fish::io::OutputMove {
|
||||
location: mino::Piece { ty, loc }.into(),
|
||||
spin: fish::io::Spin::None,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
serde_json::to_writer(std::io::stdout(), &output).context("error serializing output")?;
|
||||
println!();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn print_best_move(_settings: Settings) -> anyhow::Result<()> {
|
||||
let input: fish::io::InputState =
|
||||
serde_json::from_reader(std::io::stdin()).context("error parsing input state")?;
|
||||
|
||||
let mat = input.matrix.to_mat();
|
||||
// mat.clear_lines();
|
||||
|
||||
// TODO: ai init config, e.g. personality
|
||||
// TODO: attack state (combo/b2b)
|
||||
let mut ai = fish::Ai::new(&mat, &input.queue.previews, input.queue.hold);
|
||||
|
||||
// TODO: resource limits (cycles,nodes,time)
|
||||
let mut cycles = 0;
|
||||
loop {
|
||||
tracing::trace!("thinking... ({cycles})");
|
||||
if matches!(ai.think(), Err(fish::ai::Exhausted)) || cycles > 100_000 {
|
||||
break;
|
||||
}
|
||||
cycles += 1;
|
||||
}
|
||||
|
||||
// print suggestions trace
|
||||
let mut output = fish::io::OutputMoves::default();
|
||||
for pc in ai.suggestion() {
|
||||
output.moves.push(fish::io::OutputMove {
|
||||
location: pc.into(),
|
||||
// TODO: spin suggestions
|
||||
spin: fish::io::Spin::None,
|
||||
});
|
||||
}
|
||||
|
||||
serde_json::to_writer(std::io::stdout(), &output).context("error writing output state")?;
|
||||
println!();
|
||||
Ok(())
|
||||
}
|
194
fish/src/io.rs
194
fish/src/io.rs
|
@ -1,194 +0,0 @@
|
|||
use alloc::boxed::Box;
|
||||
use alloc::string::String;
|
||||
use alloc::vec::Vec;
|
||||
use mino::srs::PieceType;
|
||||
use mino::MatBuf;
|
||||
use mino::{srs::Piece, Loc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Deserializable description of an initial game state. This is intentionally similar to
|
||||
/// the TBP "start" message.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct InputState {
|
||||
#[serde(flatten)]
|
||||
pub queue: InputQueue,
|
||||
#[allow(dead_code)]
|
||||
#[serde(flatten)]
|
||||
_attack: AttackState,
|
||||
// XXX(iitalics): TBP uses the terminology "board" but we use "matrix".
|
||||
#[serde(rename = "board")]
|
||||
pub matrix: InputMatrix,
|
||||
}
|
||||
|
||||
/// Deserializable description of the queue.
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
pub struct InputQueue {
|
||||
#[serde(deserialize_with = "deserialize_hold")]
|
||||
pub hold: Option<PieceType>,
|
||||
#[serde(deserialize_with = "deserialize_previews")]
|
||||
#[serde(rename = "queue")]
|
||||
pub previews: Vec<PieceType>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize)]
|
||||
#[allow(dead_code)]
|
||||
struct AttackState {
|
||||
combo: u32,
|
||||
back_to_back: u32,
|
||||
// TODO: use this?
|
||||
}
|
||||
|
||||
// use FromStr to deserialize a PieceType
|
||||
|
||||
fn deserialize_hold<'de, D>(de: D) -> Result<Option<PieceType>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Option::<String>::deserialize(de)?
|
||||
.map(|s| s.parse())
|
||||
.transpose()
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
fn deserialize_previews<'de, D>(de: D) -> Result<Vec<PieceType>, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
Vec::<String>::deserialize(de)?
|
||||
.iter()
|
||||
.map(|s| s.parse())
|
||||
.collect::<Result<Vec<_>, _>>()
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
|
||||
/// Deserializable description of the game matrix.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct InputMatrix {
|
||||
pub cells: Box<[[Option<Color>; 10]; 40]>,
|
||||
}
|
||||
|
||||
impl InputMatrix {
|
||||
/// Converts this matrix to a [`MatBuf`], which discards color information but is
|
||||
/// stored more efficiently.
|
||||
pub fn to_mat(&self) -> MatBuf {
|
||||
let mut mat = MatBuf::new();
|
||||
for (y, row) in self.cells.iter().enumerate() {
|
||||
for (x, cell) in row.iter().enumerate() {
|
||||
if cell.is_some() {
|
||||
mat.set(x as i16, y as i16);
|
||||
}
|
||||
}
|
||||
}
|
||||
mat
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Eq, PartialEq, Debug, Deserialize)]
|
||||
#[repr(u8)]
|
||||
pub enum Color {
|
||||
I = 1,
|
||||
J = 2,
|
||||
L = 3,
|
||||
O = 4,
|
||||
S = 5,
|
||||
T = 6,
|
||||
Z = 7,
|
||||
G = 8,
|
||||
}
|
||||
|
||||
impl<'de> serde::de::Deserialize<'de> for InputMatrix {
|
||||
fn deserialize<D>(de: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::Deserializer<'de>,
|
||||
{
|
||||
// XXX(iitalics): serde doesn't let you deserialize a [T; 40] so we have to go
|
||||
// through Vec first and then check the length :|
|
||||
let cells: Vec<_> = serde::de::Deserialize::deserialize(de)?;
|
||||
let cells: [_; 40] = cells
|
||||
.try_into()
|
||||
.map_err(|_| serde::de::Error::custom("board must contain 40 rows"))?;
|
||||
Ok(Self {
|
||||
cells: Box::new(cells),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize, Default)]
|
||||
pub struct OutputMoves {
|
||||
pub moves: Vec<OutputMove>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct OutputMove {
|
||||
pub location: OutputLocation,
|
||||
pub spin: Spin,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Serialize)]
|
||||
pub struct OutputLocation {
|
||||
#[serde(serialize_with = "serialize_piece_type")]
|
||||
#[serde(rename = "type")]
|
||||
pub ty: PieceType,
|
||||
#[serde(serialize_with = "serialize_loc")]
|
||||
#[serde(flatten)]
|
||||
pub location: Loc,
|
||||
}
|
||||
|
||||
impl From<Piece> for OutputLocation {
|
||||
fn from(pc: Piece) -> Self {
|
||||
Self {
|
||||
ty: pc.ty,
|
||||
location: pc.loc,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Default, Serialize)]
|
||||
#[serde(rename_all = "lowercase")]
|
||||
pub enum Spin {
|
||||
#[default]
|
||||
None,
|
||||
Mini,
|
||||
Full,
|
||||
}
|
||||
|
||||
fn serialize_piece_type<S>(ty: &PieceType, ser: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
ty.as_char().serialize(ser)
|
||||
}
|
||||
|
||||
fn serialize_loc<S>(loc: &Loc, ser: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::Serializer,
|
||||
{
|
||||
use serde::ser::SerializeStruct;
|
||||
let mut ser = ser.serialize_struct("Location", 3)?;
|
||||
ser.serialize_field("x", &loc.x)?;
|
||||
ser.serialize_field("y", &loc.y)?;
|
||||
let r = ["north", "east", "south", "west"][loc.r as usize];
|
||||
ser.serialize_field("orientation", r)?;
|
||||
ser.end()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_color_repr() {
|
||||
assert_eq!(
|
||||
core::mem::size_of::<Option<Color>>(),
|
||||
core::mem::size_of::<u8>()
|
||||
);
|
||||
assert_eq!(
|
||||
core::mem::size_of::<[Option<Color>; 10]>(),
|
||||
core::mem::size_of::<[u8; 10]>()
|
||||
);
|
||||
assert_eq!(
|
||||
unsafe { core::mem::transmute::<Option<Color>, u8>(None) },
|
||||
0u8
|
||||
);
|
||||
}
|
||||
}
|
|
@ -2,15 +2,6 @@
|
|||
|
||||
extern crate alloc;
|
||||
|
||||
pub mod ai;
|
||||
pub mod bot;
|
||||
pub mod eval;
|
||||
pub mod find;
|
||||
|
||||
pub mod bot;
|
||||
|
||||
#[cfg(feature = "io")]
|
||||
pub mod io;
|
||||
|
||||
pub use ai::Ai;
|
||||
pub use bot::Bot;
|
||||
pub use find::find_locations;
|
||||
|
|
|
@ -1,2 +0,0 @@
|
|||
node_modules
|
||||
package-lock.json
|
|
@ -1,33 +0,0 @@
|
|||
const fumen = require('tetris-fumen');
|
||||
const { argv } = require('node:process');
|
||||
const { fumenPageToInputState, fumenPagePlayMove, runFishCli } = require('./src/utils');
|
||||
|
||||
const CONFIG = [
|
||||
// '--limit-nodes', '1000',
|
||||
// '--limit-steps', '5',
|
||||
// '--limit-time', '1000',
|
||||
];
|
||||
|
||||
async function main() {
|
||||
if (argv.length != 3) {
|
||||
throw new Error('usage: node list <url>');
|
||||
}
|
||||
|
||||
let url = argv[2];
|
||||
let pages = fumen.decoder.decode(url);
|
||||
if (pages.length !== 1) {
|
||||
throw new Error('expected exactly one page');
|
||||
}
|
||||
|
||||
let inputState = fumenPageToInputState(pages[0]);
|
||||
let output = await runFishCli(['best', ...CONFIG], inputState);
|
||||
for (let move of output.moves) {
|
||||
pages.push(fumenPagePlayMove(pages[0], move, { lock: true }));
|
||||
}
|
||||
|
||||
pages.push({ comment: '' });
|
||||
|
||||
console.log(fumen.encoder.encode(pages));
|
||||
}
|
||||
|
||||
main().catch(e => console.error(e));
|
|
@ -1 +0,0 @@
|
|||
v115@lgA8CeA8OeA8CeC8BeC8BeC8AeE8AeC8AeN8AeA8Je?AgWZAFLDmClcJSAVzbSAVG88AYP88A5sjxCvAAAA
|
|
@ -1,25 +0,0 @@
|
|||
const fumen = require('tetris-fumen');
|
||||
const { argv } = require('node:process');
|
||||
const { fumenPageToInputState, fumenPagePlayMove, runFishCli } = require('./src/utils');
|
||||
|
||||
async function main() {
|
||||
if (argv.length != 3) {
|
||||
throw new Error('usage: node list <url>');
|
||||
}
|
||||
|
||||
let url = argv[2];
|
||||
let pages = fumen.decoder.decode(url);
|
||||
if (pages.length !== 1) {
|
||||
throw new Error('expected exactly one page');
|
||||
}
|
||||
|
||||
let inputState = fumenPageToInputState(pages[0]);
|
||||
let output = await runFishCli(['list'], inputState);
|
||||
for (let move of output.moves) {
|
||||
pages.push(fumenPagePlayMove(pages[0], move, { lock: false }));
|
||||
}
|
||||
|
||||
console.log(fumen.encoder.encode(pages));
|
||||
}
|
||||
|
||||
main().catch(e => console.error(e));
|
|
@ -1,10 +0,0 @@
|
|||
{
|
||||
"name": "fumen-utils",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"author": "iitalics",
|
||||
"license": "LGPL-2.1-or-later",
|
||||
"dependencies": {
|
||||
"tetris-fumen": "^1.1.3"
|
||||
}
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
#!/usr/bin/env sh
|
||||
exec cargo build --manifest-path ../fish/Cargo.toml --release --features=fish-cli
|
|
@ -1,98 +0,0 @@
|
|||
const fumen = require('tetris-fumen');
|
||||
const { Quiz } = require('tetris-fumen/lib/quiz');
|
||||
const { Page } = require('tetris-fumen/lib/decoder');
|
||||
const { env } = require('node:process');
|
||||
const { spawn } = require('node:child_process');
|
||||
const path = require('node:path');
|
||||
|
||||
function fumenPageToInputState(page) {
|
||||
let board = new Array(40);
|
||||
for (let y = 0; y < 23; y++) {
|
||||
let row = board[y] = new Array(10);
|
||||
for (let x = 0; x < 10; x++) {
|
||||
row[x] = page.field.at(x, y);
|
||||
if (row[x] === '_') {
|
||||
row[x] = null;
|
||||
} else if (row[x] === 'X') {
|
||||
row[x] = 'G';
|
||||
}
|
||||
}
|
||||
}
|
||||
board[23] = new Array(10);
|
||||
for (let x = 0; x < 10; x++) {
|
||||
board[23][x] = null
|
||||
}
|
||||
for (let y = 24; y < 40; y++) {
|
||||
board[y] = board[23];
|
||||
}
|
||||
|
||||
if (!page.flags.quiz) {
|
||||
throw new Error('page doesnot specify queue');
|
||||
}
|
||||
let quiz = new Quiz(page.comment);
|
||||
let hold = quiz.hold;
|
||||
let queue = [quiz.current, ...quiz.least];
|
||||
|
||||
return {
|
||||
hold,
|
||||
queue,
|
||||
combo: 0,
|
||||
back_to_back: 0,
|
||||
board,
|
||||
};
|
||||
}
|
||||
|
||||
function fumenPagePlayMove(page, move, flags) {
|
||||
let quiz = new Quiz(page.comment);
|
||||
// console.log(`${move.location.x},${move.location.y},${move.location.orientation[0]}`);
|
||||
return {
|
||||
operation: {
|
||||
type: move.location.type,
|
||||
x: move.location.x,
|
||||
y: move.location.y,
|
||||
rotation: {
|
||||
'north': 'spawn',
|
||||
'east': 'right',
|
||||
'south': 'reverse',
|
||||
'west': 'left',
|
||||
}[move.location.orientation],
|
||||
},
|
||||
flags,
|
||||
};
|
||||
}
|
||||
|
||||
function runFishCli(args, input, cmd = getFishCliPath()) {
|
||||
let fish = spawn(cmd, args, { stdio: ['pipe', 'pipe', 'inherit'] });
|
||||
|
||||
let data = [];
|
||||
let output = null;
|
||||
fish.stdout.on('data', (buf) => data.push(buf));
|
||||
fish.stdout.on('end', () => {
|
||||
output = JSON.parse(Buffer.concat(data).toString());
|
||||
});
|
||||
|
||||
let exitPromise = new Promise((res, rej) => {
|
||||
fish.on('exit', (code) => {
|
||||
if (code === 0) {
|
||||
res(output);
|
||||
} else {
|
||||
rej(new Error(`exit code: ${code}`));
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (input !== undefined) {
|
||||
fish.stdin.end(JSON.stringify(input));
|
||||
}
|
||||
return exitPromise;
|
||||
}
|
||||
|
||||
function getFishCliPath() {
|
||||
return process.env['FISH_CLI'] || path.join(__dirname, '../../target/release/fish-cli');
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
fumenPageToInputState,
|
||||
fumenPagePlayMove,
|
||||
runFishCli,
|
||||
};
|
Loading…
Reference in New Issue