from ollama import Client model = 'llama3.2' prompt = "Wieso ist der Himmel blau?" top_k = 40 top_p = 0.9 temp=0.8 client = Client(host='http://172.22.0.29:11434') response = client.generate( model=model, prompt= prompt, stream= False, options= { 'top_k': top_k, 'temperature':top_p, 'top_p': temp } ) print(response.response)