# Quick Inference Example from transformers import AutoModelForCausalLM, AutoTokenizer import torch # Load the model model = AutoModelForCausalLM.from_pretrained("kalkiai3000/mathAI-Gemma") tokenizer = AutoTokenizer.from_pretrained("kalkiai3000/mathAI-Gemma") # Example problem prompt = """Question: Solve for x: 2x² + 5x - 3 = 0 Let me solve this step by step: """ inputs = tokenizer(prompt, return_tensors="pt") outputs = model.generate(**inputs, max_new_tokens=256, temperature=0.7) result = tokenizer.decode(outputs[0], skip_special_tokens=True) print(result)