feat: Implement Ollama as a high-level service (#510)

* Initial implementation of Ollama as a service

* Fix model selector in tool window

* Enable image attachment

* Rewrite OllamaSettingsForm in Kt

* Create OllamaInlineCompletionModel and use it for building completion template

* Add support for blocking code completion on models that we don't know support it

* Allow disabling code completion settings

* Disable code completion settings when an unsupported model is entered

* Track FIM template in settings as a derived state

* Update llm-client

* Initial implementation of model combo box

* Add Ollama icon and display models as list

* Make OllamaSettingsState immutable & convert OllamaSettings to Kotlin

* Add refresh models button

* Distinguish between empty/needs refresh/loading

* Avoid storing any model if the combo box is empty

* Fix icon size

* Back to mutable settings
There were some bugs with immutable settings

* Store available models in settings state

* Expose available models in model dropdown

* Add dark icon

* Cleanups for CompletionRequestProvider

* Fix checkstyle issues

* refactor: migrate to SimplePersistentStateComponent

* fix: add code completion stop tokens

* fix: display only one item in the model popup action group

* fix: add back multi model selection

---------

Co-authored-by: Carl-Robert Linnupuu <carlrobertoh@gmail.com>
This commit is contained in:
Jack Boswell 2024-05-08 10:11:13 +12:00 committed by GitHub
parent 7f7b35d3be
commit e40630d796
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
23 changed files with 505 additions and 39 deletions

View file

@ -16,6 +16,7 @@ public final class Icons {
public static final Icon Sparkle = IconLoader.getIcon("/icons/sparkle.svg", Icons.class);
public static final Icon You = IconLoader.getIcon("/icons/you.svg", Icons.class);
public static final Icon YouSmall = IconLoader.getIcon("/icons/you_small.png", Icons.class);
public static final Icon Ollama = IconLoader.getIcon("/icons/ollama.svg", Icons.class);
public static final Icon User = IconLoader.getIcon("/icons/user.svg", Icons.class);
public static final Icon Upload = IconLoader.getIcon("/icons/upload.svg", Icons.class);
}

View file

@ -1,5 +1,6 @@
package ee.carlrobert.codegpt.completions;
import com.intellij.openapi.application.ApplicationManager;
import ee.carlrobert.codegpt.CodeGPTPlugin;
import ee.carlrobert.codegpt.completions.you.YouUserManager;
import ee.carlrobert.codegpt.credentials.CredentialsStore;
@ -8,11 +9,13 @@ import ee.carlrobert.codegpt.settings.advanced.AdvancedSettings;
import ee.carlrobert.codegpt.settings.service.anthropic.AnthropicSettings;
import ee.carlrobert.codegpt.settings.service.azure.AzureSettings;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import ee.carlrobert.llm.client.anthropic.ClaudeClient;
import ee.carlrobert.llm.client.azure.AzureClient;
import ee.carlrobert.llm.client.azure.AzureCompletionRequestParams;
import ee.carlrobert.llm.client.llama.LlamaClient;
import ee.carlrobert.llm.client.ollama.OllamaClient;
import ee.carlrobert.llm.client.openai.OpenAIClient;
import ee.carlrobert.llm.client.you.UTMParameters;
import ee.carlrobert.llm.client.you.YouClient;
@ -92,6 +95,16 @@ public class CompletionClientProvider {
return builder.build(getDefaultClientBuilder());
}
public static OllamaClient getOllamaClient() {
var host = ApplicationManager.getApplication()
.getService(OllamaSettings.class)
.getState()
.getHost();
return new OllamaClient.Builder()
.setHost(host)
.build(getDefaultClientBuilder());
}
public static OkHttpClient.Builder getDefaultClientBuilder() {
OkHttpClient.Builder builder = new OkHttpClient.Builder();
var advancedSettings = AdvancedSettings.getCurrentState();

View file

@ -26,8 +26,8 @@ import ee.carlrobert.codegpt.settings.service.ServiceType;
import ee.carlrobert.codegpt.settings.service.anthropic.AnthropicSettings;
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceChatCompletionSettingsState;
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceSettings;
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceState;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import ee.carlrobert.codegpt.settings.service.you.YouSettings;
import ee.carlrobert.codegpt.telemetry.core.configuration.TelemetryConfiguration;
@ -41,6 +41,8 @@ import ee.carlrobert.llm.client.anthropic.completion.ClaudeCompletionStandardMes
import ee.carlrobert.llm.client.anthropic.completion.ClaudeMessageImageContent;
import ee.carlrobert.llm.client.anthropic.completion.ClaudeMessageTextContent;
import ee.carlrobert.llm.client.llama.completion.LlamaCompletionRequest;
import ee.carlrobert.llm.client.ollama.completion.request.OllamaChatCompletionMessage;
import ee.carlrobert.llm.client.ollama.completion.request.OllamaChatCompletionRequest;
import ee.carlrobert.llm.client.openai.completion.OpenAIChatCompletionModel;
import ee.carlrobert.llm.client.openai.completion.request.OpenAIChatCompletionDetailedMessage;
import ee.carlrobert.llm.client.openai.completion.request.OpenAIChatCompletionMessage;
@ -56,6 +58,7 @@ import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Base64;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
@ -140,7 +143,8 @@ public class CompletionRequestProvider {
public static Request buildCustomOpenAILookupCompletionRequest(String context) {
return buildCustomOpenAIChatCompletionRequest(
ApplicationManager.getApplication().getService(CustomServiceState.class)
ApplicationManager.getApplication().getService(CustomServiceSettings.class)
.getState()
.getChatCompletionSettings(),
List.of(
new OpenAIChatCompletionStandardMessage(
@ -210,7 +214,7 @@ public class CompletionRequestProvider {
@Nullable String model,
CallParameters callParameters) {
var configuration = ConfigurationSettings.getCurrentState();
return new OpenAIChatCompletionRequest.Builder(buildMessages(model, callParameters))
return new OpenAIChatCompletionRequest.Builder(buildOpenAIMessages(model, callParameters))
.setModel(model)
.setMaxTokens(configuration.getMaxTokens())
.setStream(true)
@ -222,7 +226,7 @@ public class CompletionRequestProvider {
CallParameters callParameters) {
return buildCustomOpenAIChatCompletionRequest(
settings,
buildMessages(callParameters),
buildOpenAIMessages(callParameters),
true);
}
@ -307,7 +311,68 @@ public class CompletionRequestProvider {
return request;
}
private List<OpenAIChatCompletionMessage> buildMessages(CallParameters callParameters) {
public OllamaChatCompletionRequest buildOllamaChatCompletionRequest(
CallParameters callParameters
) {
var settings = ApplicationManager.getApplication().getService(OllamaSettings.class).getState();
return new OllamaChatCompletionRequest
.Builder(settings.getModel(), buildOllamaMessages(callParameters))
.build();
}
private List<OllamaChatCompletionMessage> buildOllamaMessages(CallParameters callParameters) {
var message = callParameters.getMessage();
var messages = new ArrayList<OllamaChatCompletionMessage>();
if (callParameters.getConversationType() == ConversationType.DEFAULT) {
String systemPrompt = ConfigurationSettings.getCurrentState().getSystemPrompt();
messages.add(new OllamaChatCompletionMessage("system", systemPrompt, null));
}
if (callParameters.getConversationType() == ConversationType.FIX_COMPILE_ERRORS) {
messages.add(
new OllamaChatCompletionMessage("system", FIX_COMPILE_ERRORS_SYSTEM_PROMPT, null)
);
}
for (var prevMessage : conversation.getMessages()) {
if (callParameters.isRetry() && prevMessage.getId().equals(message.getId())) {
break;
}
var prevMessageImageFilePath = prevMessage.getImageFilePath();
if (prevMessageImageFilePath != null && !prevMessageImageFilePath.isEmpty()) {
try {
var imageFilePath = Path.of(prevMessageImageFilePath);
var imageBytes = Files.readAllBytes(imageFilePath);
var imageBase64 = Base64.getEncoder().encodeToString(imageBytes);
messages.add(
new OllamaChatCompletionMessage(
"user", prevMessage.getPrompt(), List.of(imageBase64)
)
);
} catch (IOException e) {
throw new RuntimeException(e);
}
} else {
messages.add(
new OllamaChatCompletionMessage("user", prevMessage.getPrompt(), null)
);
}
messages.add(
new OllamaChatCompletionMessage("assistant", prevMessage.getResponse(), null)
);
}
if (callParameters.getImageMediaType() != null && callParameters.getImageData().length > 0) {
var imageBase64 = Base64.getEncoder().encodeToString(callParameters.getImageData());
messages.add(
new OllamaChatCompletionMessage("user", message.getPrompt(), List.of(imageBase64))
);
} else {
messages.add(new OllamaChatCompletionMessage("user", message.getPrompt(), null));
}
return messages;
}
private List<OpenAIChatCompletionMessage> buildOpenAIMessages(CallParameters callParameters) {
var message = callParameters.getMessage();
var messages = new ArrayList<OpenAIChatCompletionMessage>();
if (callParameters.getConversationType() == ConversationType.DEFAULT) {
@ -339,7 +404,9 @@ public class CompletionRequestProvider {
} else {
messages.add(new OpenAIChatCompletionStandardMessage("user", prevMessage.getPrompt()));
}
messages.add(new OpenAIChatCompletionStandardMessage("assistant", prevMessage.getResponse()));
messages.add(
new OpenAIChatCompletionStandardMessage("assistant", prevMessage.getResponse())
);
}
if (callParameters.getImageMediaType() != null && callParameters.getImageData().length > 0) {
@ -355,10 +422,10 @@ public class CompletionRequestProvider {
return messages;
}
private List<OpenAIChatCompletionMessage> buildMessages(
private List<OpenAIChatCompletionMessage> buildOpenAIMessages(
@Nullable String model,
CallParameters callParameters) {
var messages = buildMessages(callParameters);
var messages = buildOpenAIMessages(callParameters);
if (model == null
|| GeneralSettings.getCurrentState().getSelectedService() == ServiceType.YOU) {

View file

@ -21,11 +21,14 @@ import ee.carlrobert.codegpt.settings.service.anthropic.AnthropicSettings;
import ee.carlrobert.codegpt.settings.service.azure.AzureSettings;
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceSettings;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import ee.carlrobert.llm.client.DeserializationUtil;
import ee.carlrobert.llm.client.anthropic.completion.ClaudeCompletionRequest;
import ee.carlrobert.llm.client.anthropic.completion.ClaudeCompletionStandardMessage;
import ee.carlrobert.llm.client.llama.completion.LlamaCompletionRequest;
import ee.carlrobert.llm.client.ollama.completion.request.OllamaChatCompletionMessage;
import ee.carlrobert.llm.client.ollama.completion.request.OllamaChatCompletionRequest;
import ee.carlrobert.llm.client.openai.completion.OpenAIChatCompletionEventSourceListener;
import ee.carlrobert.llm.client.openai.completion.OpenAITextCompletionEventSourceListener;
import ee.carlrobert.llm.client.openai.completion.request.OpenAIChatCompletionRequest;
@ -104,6 +107,9 @@ public final class CompletionRequestService {
callParameters.getMessage(),
callParameters.getConversationType()),
eventListener);
case OLLAMA -> CompletionClientProvider.getOllamaClient().getChatCompletionAsync(
requestProvider.buildOllamaChatCompletionRequest(callParameters),
eventListener);
};
}
@ -123,6 +129,9 @@ public final class CompletionRequestService {
.getInfillAsync(
CodeCompletionRequestFactory.buildLlamaRequest(requestDetails),
eventListener);
case OLLAMA -> CompletionClientProvider.getOllamaClient().getCompletionAsync(
CodeCompletionRequestFactory.INSTANCE.buildOllamaRequest(requestDetails),
eventListener);
default ->
throw new IllegalArgumentException("Code completion not supported for selected service");
};
@ -189,6 +198,20 @@ public final class CompletionRequestService {
.setRepeat_penalty(settings.getRepeatPenalty())
.build(), eventListener);
break;
case OLLAMA:
var model = ApplicationManager.getApplication()
.getService(OllamaSettings.class)
.getState()
.getModel();
var request = new OllamaChatCompletionRequest.Builder(
model,
List.of(
new OllamaChatCompletionMessage("system", systemPrompt, null),
new OllamaChatCompletionMessage("user", gitDiff, null)
)
).build();
CompletionClientProvider.getOllamaClient().getChatCompletionAsync(request, eventListener);
break;
default:
LOG.debug("Unknown service: {}", selectedService);
break;
@ -228,9 +251,9 @@ public final class CompletionRequestService {
case OPENAI -> CredentialsStore.INSTANCE.isCredentialSet(CredentialKey.OPENAI_API_KEY);
case AZURE -> CredentialsStore.INSTANCE.isCredentialSet(
AzureSettings.getCurrentState().isUseAzureApiKeyAuthentication()
? CredentialKey.AZURE_OPENAI_API_KEY
: CredentialKey.AZURE_ACTIVE_DIRECTORY_TOKEN);
case CUSTOM_OPENAI, ANTHROPIC, LLAMA_CPP -> true;
? CredentialKey.AZURE_OPENAI_API_KEY
: CredentialKey.AZURE_ACTIVE_DIRECTORY_TOKEN);
case CUSTOM_OPENAI, ANTHROPIC, LLAMA_CPP, OLLAMA -> true;
case YOU -> false;
};
}

View file

@ -9,6 +9,7 @@ import ee.carlrobert.codegpt.settings.service.ServiceType;
import ee.carlrobert.codegpt.settings.service.anthropic.AnthropicSettings;
import ee.carlrobert.codegpt.settings.service.azure.AzureSettings;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import java.time.LocalDateTime;
import java.util.ArrayList;
@ -195,9 +196,13 @@ public final class ConversationService {
case LLAMA_CPP -> {
var llamaSettings = LlamaSettings.getCurrentState();
yield llamaSettings.isUseCustomModel()
? llamaSettings.getCustomLlamaModelPath()
: llamaSettings.getHuggingFaceModel().getCode();
? llamaSettings.getCustomLlamaModelPath()
: llamaSettings.getHuggingFaceModel().getCode();
}
case OLLAMA -> ApplicationManager.getApplication()
.getService(OllamaSettings.class)
.getState()
.getModel();
};
}
}

View file

@ -11,6 +11,7 @@ import ee.carlrobert.codegpt.settings.service.ServiceType;
import ee.carlrobert.codegpt.settings.service.anthropic.AnthropicSettings;
import ee.carlrobert.codegpt.settings.service.azure.AzureSettings;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import org.jetbrains.annotations.NotNull;
@ -69,6 +70,9 @@ public class GeneralSettings implements PersistentStateComponent<GeneralSettings
if ("you.chat.completion".equals(clientCode)) {
state.setSelectedService(ServiceType.YOU);
}
if ("ollama.chat.completion".equals(clientCode)) {
state.setSelectedService(ServiceType.OLLAMA);
}
}
public String getModel() {
@ -98,6 +102,11 @@ public class GeneralSettings implements PersistentStateComponent<GeneralSettings
llamaModel.getLabel(),
huggingFaceModel.getParameterSize(),
huggingFaceModel.getQuantization());
case OLLAMA:
return ApplicationManager.getApplication()
.getService(OllamaSettings.class)
.getState()
.getModel();
default:
return "Unknown";
}

View file

@ -4,6 +4,7 @@ import static ee.carlrobert.codegpt.settings.service.ServiceType.ANTHROPIC;
import static ee.carlrobert.codegpt.settings.service.ServiceType.AZURE;
import static ee.carlrobert.codegpt.settings.service.ServiceType.CUSTOM_OPENAI;
import static ee.carlrobert.codegpt.settings.service.ServiceType.LLAMA_CPP;
import static ee.carlrobert.codegpt.settings.service.ServiceType.OLLAMA;
import static ee.carlrobert.codegpt.settings.service.ServiceType.OPENAI;
import static ee.carlrobert.codegpt.settings.service.ServiceType.YOU;
@ -20,6 +21,8 @@ import ee.carlrobert.codegpt.settings.service.azure.AzureSettingsForm;
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceForm;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.llama.form.LlamaSettingsForm;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettingsForm;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettingsForm;
import ee.carlrobert.codegpt.settings.service.you.YouSettings;
@ -45,6 +48,7 @@ public class GeneralSettingsComponent {
private final AzureSettingsForm azureSettingsForm;
private final YouSettingsForm youSettingsForm;
private final LlamaSettingsForm llamaSettingsForm;
private final OllamaSettingsForm ollamaSettingsForm;
public GeneralSettingsComponent(Disposable parentDisposable, GeneralSettings settings) {
displayNameField = new JBTextField(settings.getState().getDisplayName(), 20);
@ -54,6 +58,7 @@ public class GeneralSettingsComponent {
azureSettingsForm = new AzureSettingsForm(AzureSettings.getCurrentState());
youSettingsForm = new YouSettingsForm(YouSettings.getCurrentState(), parentDisposable);
llamaSettingsForm = new LlamaSettingsForm(LlamaSettings.getCurrentState());
ollamaSettingsForm = new OllamaSettingsForm();
var cardLayout = new DynamicCardLayout();
var cards = new JPanel(cardLayout);
@ -63,6 +68,7 @@ public class GeneralSettingsComponent {
cards.add(azureSettingsForm.getForm(), AZURE.getCode());
cards.add(youSettingsForm, YOU.getCode());
cards.add(llamaSettingsForm, LLAMA_CPP.getCode());
cards.add(ollamaSettingsForm.getForm(), OLLAMA.getCode());
var serviceComboBoxModel = new DefaultComboBoxModel<ServiceType>();
serviceComboBoxModel.addAll(Arrays.stream(ServiceType.values()).toList());
serviceComboBox = new ComboBox<>(serviceComboBoxModel);
@ -106,6 +112,10 @@ public class GeneralSettingsComponent {
return youSettingsForm;
}
public OllamaSettingsForm getOllamaSettingsForm() {
return ollamaSettingsForm;
}
public ServiceType getSelectedService() {
return serviceComboBox.getItem();
}
@ -137,6 +147,7 @@ public class GeneralSettingsComponent {
azureSettingsForm.resetForm();
youSettingsForm.resetForm();
llamaSettingsForm.resetForm();
ollamaSettingsForm.resetForm();
}
static class DynamicCardLayout extends CardLayout {

View file

@ -20,6 +20,8 @@ import ee.carlrobert.codegpt.settings.service.azure.AzureSettingsForm;
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceForm;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.llama.form.LlamaSettingsForm;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettingsForm;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettingsForm;
import ee.carlrobert.codegpt.settings.service.you.YouSettings;
@ -68,7 +70,8 @@ public class GeneralSettingsConfigurable implements Configurable {
|| AnthropicSettings.getInstance().isModified(component.getAnthropicSettingsForm())
|| AzureSettings.getInstance().isModified(component.getAzureSettingsForm())
|| YouSettings.getInstance().isModified(component.getYouSettingsForm())
|| LlamaSettings.getInstance().isModified(component.getLlamaSettingsForm());
|| LlamaSettings.getInstance().isModified(component.getLlamaSettingsForm())
|| component.getOllamaSettingsForm().isModified();
}
@Override
@ -84,6 +87,7 @@ public class GeneralSettingsConfigurable implements Configurable {
applyAzureSettings(component.getAzureSettingsForm());
applyYouSettings(component.getYouSettingsForm());
applyLlamaSettings(component.getLlamaSettingsForm());
component.getOllamaSettingsForm().applyChanges();
var serviceChanged = component.getSelectedService() != settings.getSelectedService();
var modelChanged = !OpenAISettings.getCurrentState().getModel()
@ -133,6 +137,10 @@ public class GeneralSettingsConfigurable implements Configurable {
form.getActiveDirectoryToken());
}
private void applyOllamaSettings(OllamaSettingsForm form) {
form.applyChanges();
}
@Override
public void reset() {
var settings = GeneralSettings.getCurrentState();

View file

@ -8,7 +8,8 @@ public enum ServiceType {
ANTHROPIC("ANTHROPIC", "service.anthropic.title", "anthropic.chat.completion"),
AZURE("AZURE", "service.azure.title", "azure.chat.completion"),
YOU("YOU", "service.you.title", "you.chat.completion"),
LLAMA_CPP("LLAMA_CPP", "service.llama.title", "llama.chat.completion");
LLAMA_CPP("LLAMA_CPP", "service.llama.title", "llama.chat.completion"),
OLLAMA("OLLAMA", "service.ollama.title", "ollama.chat.completion");
private final String code;
private final String label;

View file

@ -22,7 +22,8 @@ public class LlamaSettingsForm extends JPanel {
llamaRequestPreferencesForm = new LlamaRequestPreferencesForm(settings);
codeCompletionConfigurationForm = new CodeCompletionConfigurationForm(
settings.isCodeCompletionsEnabled(),
settings.getCodeCompletionMaxTokens());
settings.getCodeCompletionMaxTokens(),
null);
init();
}

View file

@ -36,7 +36,8 @@ public class OpenAISettingsForm {
OpenAIChatCompletionModel.findByCode(settings.getModel()));
codeCompletionConfigurationForm = new CodeCompletionConfigurationForm(
settings.isCodeCompletionsEnabled(),
settings.getCodeCompletionMaxTokens());
settings.getCodeCompletionMaxTokens(),
null);
}
public JPanel getForm() {

View file

@ -1,6 +1,7 @@
package ee.carlrobert.codegpt.toolwindow.chat.ui.textarea;
import static ee.carlrobert.codegpt.settings.service.ServiceType.CUSTOM_OPENAI;
import static ee.carlrobert.codegpt.settings.service.ServiceType.OLLAMA;
import static ee.carlrobert.codegpt.settings.service.ServiceType.OPENAI;
import static ee.carlrobert.codegpt.settings.service.ServiceType.YOU;
import static java.lang.String.format;
@ -23,6 +24,8 @@ import ee.carlrobert.codegpt.settings.GeneralSettingsState;
import ee.carlrobert.codegpt.settings.service.ServiceType;
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceSettings;
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings;
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettingsState;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings;
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettingsState;
import ee.carlrobert.codegpt.settings.service.you.YouSettings;
@ -41,12 +44,16 @@ public class ModelComboBoxAction extends ComboBoxAction {
private final GeneralSettingsState settings;
private final OpenAISettingsState openAISettings;
private final YouSettingsState youSettings;
private final OllamaSettingsState ollamaSettings;
public ModelComboBoxAction(Runnable onModelChange, ServiceType selectedService) {
this.onModelChange = onModelChange;
settings = GeneralSettings.getCurrentState();
openAISettings = OpenAISettings.getCurrentState();
youSettings = YouSettings.getCurrentState();
ollamaSettings = ApplicationManager.getApplication()
.getService(OllamaSettings.class)
.getState();
updateTemplatePresentation(selectedService);
subscribeToYouSignedOutTopic(ApplicationManager.getApplication().getMessageBus().connect());
@ -103,6 +110,9 @@ public class ModelComboBoxAction extends ComboBoxAction {
getLlamaCppPresentationText(),
Icons.Llama,
presentation));
actionGroup.addSeparator("Ollama");
ollamaSettings.getAvailableModels().forEach(model ->
actionGroup.add(createOllamaModelAction(model, presentation)));
if (YouUserManager.getInstance().isSubscribed()) {
actionGroup.addSeparator("You.com");
@ -179,7 +189,12 @@ public class ModelComboBoxAction extends ComboBoxAction {
templatePresentation.setText(getLlamaCppPresentationText());
templatePresentation.setIcon(Icons.Llama);
break;
case OLLAMA:
templatePresentation.setIcon(Icons.Ollama);
templatePresentation.setText(ollamaSettings.getModel());
break;
default:
break;
}
}
@ -235,6 +250,34 @@ public class ModelComboBoxAction extends ComboBoxAction {
onModelChange.run();
}
private AnAction createOllamaModelAction(
String model,
Presentation comboBoxPresentation
) {
return new DumbAwareAction(model, "", Icons.Ollama) {
@Override
public void update(@NotNull AnActionEvent event) {
var presentation = event.getPresentation();
presentation.setEnabled(!presentation.getText().equals(comboBoxPresentation.getText()));
}
@Override
public void actionPerformed(@NotNull AnActionEvent e) {
ollamaSettings.setModel(model);
handleModelChange(
OLLAMA,
model,
Icons.Ollama,
comboBoxPresentation);
}
@Override
public @NotNull ActionUpdateThread getActionUpdateThread() {
return ActionUpdateThread.BGT;
}
};
}
private AnAction createOpenAIModelAction(
OpenAIChatCompletionModel model,
Presentation comboBoxPresentation) {

View file

@ -1,6 +1,7 @@
package ee.carlrobert.codegpt.toolwindow.chat.ui.textarea;
import static ee.carlrobert.codegpt.settings.service.ServiceType.ANTHROPIC;
import static ee.carlrobert.codegpt.settings.service.ServiceType.OLLAMA;
import static ee.carlrobert.codegpt.settings.service.ServiceType.OPENAI;
import static ee.carlrobert.llm.client.openai.completion.OpenAIChatCompletionModel.GPT_4_VISION_PREVIEW;
@ -192,6 +193,7 @@ public class UserPromptTextArea extends JPanel {
}));
var selectedService = GeneralSettings.getCurrentState().getSelectedService();
if (selectedService == ANTHROPIC
|| selectedService == OLLAMA
|| (selectedService == OPENAI
&& GPT_4_VISION_PREVIEW.getCode().equals(OpenAISettings.getCurrentState().getModel()))) {
iconsPanel.add(new IconActionButton(new AttachImageAction()));

View file

@ -9,34 +9,44 @@ import ee.carlrobert.codegpt.settings.service.ServiceType
import ee.carlrobert.codegpt.settings.service.ServiceType.*
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceSettings
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings
abstract class CodeCompletionFeatureToggleActions(
private val enableFeatureAction: Boolean
) : DumbAwareAction() {
override fun actionPerformed(e: AnActionEvent) {
when (GeneralSettings.getCurrentState().selectedService) {
OPENAI -> OpenAISettings.getCurrentState().isCodeCompletionsEnabled = enableFeatureAction
LLAMA_CPP -> LlamaSettings.getCurrentState().isCodeCompletionsEnabled = enableFeatureAction
CUSTOM_OPENAI -> service<CustomServiceSettings>().state.codeCompletionSettings.codeCompletionsEnabled =
enableFeatureAction
OPENAI ->
OpenAISettings.getCurrentState().isCodeCompletionsEnabled = enableFeatureAction
LLAMA_CPP ->
LlamaSettings.getCurrentState().isCodeCompletionsEnabled = enableFeatureAction
OLLAMA -> service<OllamaSettings>().state.codeCompletionsEnabled = enableFeatureAction
CUSTOM_OPENAI -> service<CustomServiceSettings>().state
.codeCompletionSettings
.codeCompletionsEnabled = enableFeatureAction
ANTHROPIC,
AZURE,
YOU,
null -> { /* no-op for these services */ }
null -> { /* no-op for these services */
}
}
}
override fun update(e: AnActionEvent) {
val selectedService = GeneralSettings.getCurrentState().selectedService
val codeCompletionEnabled = isCodeCompletionsEnabled(selectedService)
e.presentation.isEnabled = codeCompletionEnabled != enableFeatureAction
e.presentation.isVisible = when (selectedService) {
e.presentation.isVisible = codeCompletionEnabled != enableFeatureAction
e.presentation.isEnabled = when (selectedService) {
OPENAI,
CUSTOM_OPENAI,
LLAMA_CPP -> true
LLAMA_CPP,
OLLAMA -> true
ANTHROPIC,
AZURE,
YOU,
@ -53,6 +63,7 @@ abstract class CodeCompletionFeatureToggleActions(
OPENAI -> OpenAISettings.getCurrentState().isCodeCompletionsEnabled
CUSTOM_OPENAI -> service<CustomServiceSettings>().state.codeCompletionSettings.codeCompletionsEnabled
LLAMA_CPP -> LlamaSettings.isCodeCompletionsPossible()
OLLAMA -> service<OllamaSettings>().state.codeCompletionsEnabled
ANTHROPIC,
AZURE,
YOU -> false

View file

@ -10,9 +10,12 @@ import ee.carlrobert.codegpt.settings.configuration.Placeholder
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceSettings
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettingsState
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings
import ee.carlrobert.llm.client.llama.completion.LlamaCompletionRequest
import ee.carlrobert.llm.client.llama.completion.LlamaInfillRequest
import ee.carlrobert.llm.client.ollama.completion.request.OllamaCompletionRequest
import ee.carlrobert.llm.client.ollama.completion.request.OllamaParameters
import ee.carlrobert.llm.client.openai.completion.request.OpenAITextCompletionRequest
import okhttp3.MediaType.Companion.toMediaType
import okhttp3.Request
@ -82,11 +85,29 @@ object CodeCompletionRequestFactory {
fun buildLlamaRequest(details: InfillRequestDetails): LlamaInfillRequest {
val settings = LlamaSettings.getCurrentState()
val promptTemplate = getLlamaInfillPromptTemplate(settings)
return LlamaInfillRequest(LlamaCompletionRequest.Builder(null)
.setN_predict(settings.codeCompletionMaxTokens)
.setStream(true)
.setTemperature(0.4)
.setStop(promptTemplate.stopTokens), details.prefix, details.suffix)
return LlamaInfillRequest(
LlamaCompletionRequest.Builder(null)
.setN_predict(settings.codeCompletionMaxTokens)
.setStream(true)
.setTemperature(0.4)
.setStop(promptTemplate.stopTokens), details.prefix, details.suffix
)
}
fun buildOllamaRequest(details: InfillRequestDetails): OllamaCompletionRequest {
val settings = service<OllamaSettings>().state
return OllamaCompletionRequest.Builder(
settings.model,
settings.fimTemplate.buildPrompt(details.prefix, details.suffix)
)
.setOptions(
OllamaParameters.Builder()
.stop(settings.fimTemplate.stopTokens)
.numPredict(settings.codeCompletionMaxTokens)
.build()
)
.setRaw(true)
.build()
}
private fun getLlamaInfillPromptTemplate(settings: LlamaSettingsState): InfillPromptTemplate {
@ -112,4 +133,4 @@ object CodeCompletionRequestFactory {
else -> value
}
}
}
}

View file

@ -12,6 +12,7 @@ import ee.carlrobert.codegpt.settings.GeneralSettings
import ee.carlrobert.codegpt.settings.service.ServiceType
import ee.carlrobert.codegpt.settings.service.custom.CustomServiceSettings
import ee.carlrobert.codegpt.settings.service.llama.LlamaSettings
import ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings
import ee.carlrobert.codegpt.settings.service.openai.OpenAISettings
import ee.carlrobert.codegpt.ui.OverlayUtil.showNotification
import ee.carlrobert.llm.client.openai.completion.ErrorDetails
@ -70,6 +71,7 @@ class CodeGPTInlineCompletionProvider : InlineCompletionProvider {
ServiceType.OPENAI -> OpenAISettings.getCurrentState().isCodeCompletionsEnabled
ServiceType.CUSTOM_OPENAI -> service<CustomServiceSettings>().state.codeCompletionSettings.codeCompletionsEnabled
ServiceType.LLAMA_CPP -> LlamaSettings.getCurrentState().isCodeCompletionsEnabled
ServiceType.OLLAMA -> service<OllamaSettings>().state.codeCompletionsEnabled
ServiceType.ANTHROPIC,
ServiceType.AZURE,
ServiceType.YOU,

View file

@ -1,13 +1,26 @@
package ee.carlrobert.codegpt.settings.service
import com.intellij.icons.AllIcons.General
import com.intellij.ide.HelpTooltip
import com.intellij.openapi.ui.ComboBox
import com.intellij.openapi.ui.panel.ComponentPanelBuilder
import com.intellij.ui.EnumComboBoxModel
import com.intellij.ui.components.JBCheckBox
import com.intellij.ui.components.JBLabel
import com.intellij.ui.components.fields.IntegerField
import com.intellij.util.ui.FormBuilder
import ee.carlrobert.codegpt.CodeGPTBundle
import ee.carlrobert.codegpt.codecompletions.InfillPromptTemplate
import org.apache.commons.text.StringEscapeUtils
import java.awt.FlowLayout
import javax.swing.Box
import javax.swing.JPanel
class CodeCompletionConfigurationForm(codeCompletionsEnabled: Boolean, maxTokens: Int) {
class CodeCompletionConfigurationForm(
codeCompletionsEnabled: Boolean,
maxTokens: Int,
fimTemplate: InfillPromptTemplate?
) {
private val codeCompletionsEnabledCheckBox = JBCheckBox(
CodeGPTBundle.get("codeCompletionsForm.enableFeatureText"),
@ -18,15 +31,33 @@ class CodeCompletionConfigurationForm(codeCompletionsEnabled: Boolean, maxTokens
columns = 12
value = maxTokens
}
private val promptTemplateComboBox =
ComboBox(EnumComboBoxModel(InfillPromptTemplate::class.java)).apply {
item = fimTemplate
addItemListener {
updatePromptTemplateHelpTooltip(it.item as InfillPromptTemplate)
}
}
private val promptTemplateHelpText = JBLabel(General.ContextHelp)
fun getForm(): JPanel {
return FormBuilder.createFormBuilder()
val formBuilder = FormBuilder.createFormBuilder()
.addComponent(codeCompletionsEnabledCheckBox)
.addVerticalGap(4)
.addLabeledComponent(
CodeGPTBundle.get("codeCompletionsForm.maxTokensLabel"),
codeCompletionMaxTokensField
)
.addVerticalGap(4);
if (fimTemplate != null) {
formBuilder.addVerticalGap(4)
.addLabeledComponent(
"FIM template:",
JPanel(FlowLayout(FlowLayout.LEADING, 0, 0)).apply {
add(promptTemplateComboBox)
add(Box.createHorizontalStrut(4))
add(promptTemplateHelpText)
})
}
return formBuilder.addLabeledComponent(
CodeGPTBundle.get("codeCompletionsForm.maxTokensLabel"),
codeCompletionMaxTokensField
)
.addComponentToRightColumn(
ComponentPanelBuilder.createCommentComponent(
CodeGPTBundle.get("codeCompletionsForm.maxTokensComment"), true, 48, true
@ -46,4 +77,20 @@ class CodeCompletionConfigurationForm(codeCompletionsEnabled: Boolean, maxTokens
set(maxTokens) {
codeCompletionMaxTokensField.value = maxTokens
}
var fimTemplate: InfillPromptTemplate?
get() = promptTemplateComboBox.item
set(template) {
promptTemplateComboBox.item = template
}
private fun updatePromptTemplateHelpTooltip(template: InfillPromptTemplate) {
promptTemplateHelpText.setToolTipText(null)
val description = StringEscapeUtils.escapeHtml4(template.buildPrompt("PREFIX", "SUFFIX"))
HelpTooltip()
.setTitle(template.toString())
.setDescription("<html><p>$description</p></html>")
.installOn(promptTemplateHelpText)
}
}

View file

@ -0,0 +1,20 @@
package ee.carlrobert.codegpt.settings.service.ollama
import com.intellij.openapi.components.BaseState
import com.intellij.openapi.components.SimplePersistentStateComponent
import com.intellij.openapi.components.State
import com.intellij.openapi.components.Storage
import ee.carlrobert.codegpt.codecompletions.InfillPromptTemplate
@State(name = "CodeGPT_OllamaSettings_210", storages = [Storage("CodeGPT_OllamaSettings_210.xml")])
class OllamaSettings :
SimplePersistentStateComponent<OllamaSettingsState>(OllamaSettingsState())
class OllamaSettingsState : BaseState() {
var host by string("http://localhost:11434")
var model by string()
var codeCompletionsEnabled by property(true)
var codeCompletionMaxTokens by property(128)
var fimTemplate by enum<InfillPromptTemplate>(InfillPromptTemplate.CODE_LLAMA)
var availableModels by list<String>()
}

View file

@ -0,0 +1,163 @@
package ee.carlrobert.codegpt.settings.service.ollama
import com.intellij.notification.NotificationType
import com.intellij.openapi.application.invokeLater
import com.intellij.openapi.components.service
import com.intellij.openapi.diagnostic.thisLogger
import com.intellij.openapi.observable.util.whenTextChangedFromUi
import com.intellij.openapi.ui.ComboBox
import com.intellij.ui.TitledSeparator
import com.intellij.ui.components.JBTextField
import com.intellij.util.ui.FormBuilder
import ee.carlrobert.codegpt.CodeGPTBundle
import ee.carlrobert.codegpt.settings.service.CodeCompletionConfigurationForm
import ee.carlrobert.codegpt.ui.OverlayUtil
import ee.carlrobert.codegpt.ui.UIUtil
import ee.carlrobert.llm.client.ollama.OllamaClient
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.runBlocking
import java.awt.BorderLayout
import java.net.ConnectException
import javax.swing.ComboBoxModel
import javax.swing.DefaultComboBoxModel
import javax.swing.JButton
import javax.swing.JPanel
class OllamaSettingsForm {
private val refreshModelsButton =
JButton(CodeGPTBundle.get("settingsConfigurable.service.ollama.models.refresh"))
private val hostField: JBTextField
private val modelComboBox: ComboBox<String>
private val codeCompletionConfigurationForm: CodeCompletionConfigurationForm
companion object {
private val logger = thisLogger()
}
init {
val settings = service<OllamaSettings>().state
codeCompletionConfigurationForm = CodeCompletionConfigurationForm(
settings.codeCompletionsEnabled,
settings.codeCompletionMaxTokens,
settings.fimTemplate
)
val emptyModelsComboBoxModel =
DefaultComboBoxModel(arrayOf("Hit refresh to see models for this host"))
modelComboBox = ComboBox(emptyModelsComboBoxModel).apply {
isEnabled = false
}
hostField = JBTextField().apply {
text = settings.host
whenTextChangedFromUi {
modelComboBox.model = emptyModelsComboBoxModel
modelComboBox.isEnabled = false
}
}
refreshModelsButton.addActionListener { refreshModels() }
refreshModels()
}
fun getForm(): JPanel = FormBuilder.createFormBuilder()
.addComponent(TitledSeparator(CodeGPTBundle.get("shared.configuration")))
.addComponent(
FormBuilder.createFormBuilder()
.setFormLeftIndent(16)
.addLabeledComponent(
CodeGPTBundle.get("settingsConfigurable.shared.baseHost.label"),
hostField
)
.addLabeledComponent(
CodeGPTBundle.get("settingsConfigurable.shared.model.label"),
JPanel(BorderLayout(8, 0)).apply {
add(modelComboBox, BorderLayout.CENTER)
add(refreshModelsButton, BorderLayout.EAST)
}
)
.panel
)
.addComponent(TitledSeparator(CodeGPTBundle.get("shared.codeCompletions")))
.addComponent(UIUtil.withEmptyLeftBorder(codeCompletionConfigurationForm.getForm()))
.panel
fun getModel(): String {
return if (modelComboBox.isEnabled) {
modelComboBox.item
} else {
""
}
}
fun resetForm() {
service<OllamaSettings>().state.run {
hostField.text = host
modelComboBox.item = model
codeCompletionConfigurationForm.isCodeCompletionsEnabled = codeCompletionsEnabled
codeCompletionConfigurationForm.maxTokens = codeCompletionMaxTokens
codeCompletionConfigurationForm.fimTemplate = fimTemplate
}
}
fun applyChanges() {
service<OllamaSettings>().state.run {
host = hostField.text
model = modelComboBox.item
codeCompletionsEnabled = codeCompletionConfigurationForm.isCodeCompletionsEnabled
codeCompletionMaxTokens = codeCompletionConfigurationForm.maxTokens
fimTemplate = codeCompletionConfigurationForm.fimTemplate!!
}
}
fun isModified() = service<OllamaSettings>().state.run {
hostField.text != host
|| modelComboBox.item != model
|| codeCompletionConfigurationForm.isCodeCompletionsEnabled != codeCompletionsEnabled
|| codeCompletionConfigurationForm.maxTokens != codeCompletionMaxTokens
|| codeCompletionConfigurationForm.fimTemplate != fimTemplate
}
private fun disableModelComboBoxWithPlaceholder(placeholderModel: ComboBoxModel<String>) {
invokeLater {
modelComboBox.apply {
model = placeholderModel
isEnabled = false
}
}
}
private fun refreshModels() {
disableModelComboBoxWithPlaceholder(DefaultComboBoxModel(arrayOf("Loading")))
try {
val models = runBlocking(Dispatchers.IO) {
OllamaClient.Builder()
.setHost(hostField.text)
.build()
.modelTags
.models
.map { it.name }
}
service<OllamaSettings>().state.availableModels = models.toMutableList()
invokeLater {
modelComboBox.apply {
if (models.isNotEmpty()) {
model = DefaultComboBoxModel(models.toTypedArray())
isEnabled = true
} else {
model = DefaultComboBoxModel(arrayOf("No models"))
}
}
}
} catch (ex: RuntimeException) {
logger.error(ex)
if (ex.cause is ConnectException) {
OverlayUtil.showNotification(
"Unable to connect to Ollama server",
NotificationType.ERROR
)
} else {
OverlayUtil.showNotification(ex.message, NotificationType.ERROR)
}
disableModelComboBoxWithPlaceholder(DefaultComboBoxModel(arrayOf("Unable to load models")))
}
}
}

View file

@ -36,6 +36,7 @@
<applicationService serviceImplementation="ee.carlrobert.codegpt.settings.service.openai.OpenAISettings"/>
<applicationService serviceImplementation="ee.carlrobert.codegpt.settings.service.you.YouSettings"/>
<applicationService serviceImplementation="ee.carlrobert.codegpt.settings.service.llama.LlamaSettings"/>
<applicationService serviceImplementation="ee.carlrobert.codegpt.settings.service.ollama.OllamaSettings"/>
<applicationService serviceImplementation="ee.carlrobert.codegpt.settings.IncludedFilesSettings"/>
<applicationService serviceImplementation="ee.carlrobert.codegpt.settings.configuration.ConfigurationSettings"/>
<applicationService serviceImplementation="ee.carlrobert.codegpt.settings.advanced.AdvancedSettings"/>

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.3 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 8.3 KiB

View file

@ -116,6 +116,7 @@ settingsConfigurable.service.custom.openai.url.label=URL:
settingsConfigurable.service.custom.openai.linkToDocs=Link to API docs
settingsConfigurable.service.custom.openai.connectionSuccess=Connection successful.
settingsConfigurable.service.custom.openai.connectionFailed=Connection failed.
settingsConfigurable.service.ollama.models.refresh=Refresh Models
configurationConfigurable.section.commitMessage.title=Commit Message Template
configurationConfigurable.section.commitMessage.systemPromptField.label=Prompt template:
configurationConfigurable.section.inlineCompletion.title=Inline Completion
@ -173,6 +174,7 @@ service.anthropic.title=Anthropic Service
service.azure.title=Azure Service
service.you.title=You.com Service (Free, Cloud)
service.llama.title=LLaMA C/C++ Port (Free, Local)
service.ollama.title=Ollama (Free, Local)
validation.error.fieldRequired=This field is required.
validation.error.invalidEmail=The email you entered is invalid.
validation.error.mustBeNumber=Value must be number.