KiteFish-Math
Collection
Math specific model series β’ 5 items β’ Updated
# Load model directly
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("KiteFishAI/Minnow-Math-2B")
model = AutoModelForCausalLM.from_pretrained("KiteFishAI/Minnow-Math-2B")Minnow-Math-2B is a 2B-parameter language model by Kitefish, focused on mathematical reasoning, symbolic understanding, and structured problem solving.
This is an early release and part of our ongoing effort to build strong, efficient models for reasoning-heavy tasks.
from transformers import AutoTokenizer, AutoModelForCausalLM
tokenizer = AutoTokenizer.from_pretrained("kitefish/Minnow-Math-2B")
model = AutoModelForCausalLM.from_pretrained(
"kitefish/Minnow-Math-2B",
torch_dtype="auto",
device_map="auto"
)
prompt = "Solve: 2x + 5 = 13"
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
outputs = model.generate(**inputs, max_new_tokens=100)
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
# Use a pipeline as a high-level helper from transformers import pipeline pipe = pipeline("text-generation", model="KiteFishAI/Minnow-Math-2B")