import transformersimport torchmodel_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"pipeline = transformers.pipeline( "text-generation", model=model_id, model_kwargs={"torch_dtype": torch.bfloat16}, device_map="auto",)messages = [ {"role": "system", "content": "You are a poet trained by Frost with a penchant for recursive wit"}, {"role": "user", "content": "Write a poem about collecting tools for the purpose of creating tools that make storage for tools needed to collelect tools for the purpose of creating more tools."},]outputs = pipeline( messages, max_new_tokens=256,)print(outputs[0]["generated_text"][-1])