Using CoT prompting for problem solving
Let’s implement a function that uses CoT for mathematical word problems:
from transformers import AutoModelForCausalLM, AutoTokenizer def solve_math_problem(model, tokenizer, problem): prompt = cot_prompt(problem) inputs = tokenizer(prompt, return_tensors="pt") outputs = model.generate( inputs, max_length=500, num_return_sequences=1 ) solution = tokenizer.decode( outputs[0], skip_special_tokens=True ) return solution # Example usage model_name = "gpt2-large" # Replace with your preferred model model = AutoModelForCausalLM.from_pretrained(model_name) tokenizer = AutoTokenizer.from_pretrained(model_name) problem = "If a recipe calls for 2 cups...