Wednesday, December 11, 2024

Run Ollama locally

 

Python


pip install ollama


import ollama

response = ollama.chat(model='gemma2', messages=[  {    'role': 'user',    'content': 'Why is the sky blue?',  },])

print(response['message']['content'])


Java 

Gradle 

// https://mvnrepository.com/artifact/dev.langchain4j/langchain4j-ollama

implementation 'dev.langchain4j:langchain4j-ollama:0.36.2'




package ollama.test;


import dev.langchain4j.model.chat.ChatLanguageModel;

import dev.langchain4j.model.ollama.OllamaChatModel;


public class Test {


public static void main(String[] args) {


ChatLanguageModel model = OllamaChatModel.builder()

.baseUrl("http://localhost:11434")

.modelName("gemma2")

.build();

String answer = model.generate("Provide 3 short bullet points explaining why Java is awesome");


System.out.println(answer);

}

}

No comments: