fix: NPE when using unsupported model for code completions (#499)

This commit is contained in:
Rene Leonhardt 2024-04-24 09:24:44 +02:00 committed by GitHub
parent 9823010526
commit a9e147ffc7
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
2 changed files with 12 additions and 2 deletions

View file

@ -6,6 +6,7 @@ import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.PersistentStateComponent;
import com.intellij.openapi.components.State;
import com.intellij.openapi.components.Storage;
import ee.carlrobert.codegpt.completions.llama.LlamaModel;
import ee.carlrobert.codegpt.credentials.CredentialsStore;
import ee.carlrobert.codegpt.settings.service.llama.form.LlamaSettingsForm;
import org.apache.commons.lang3.StringUtils;
@ -31,6 +32,15 @@ public class LlamaSettings implements PersistentStateComponent<LlamaSettingsStat
return getInstance().getState();
}
/**
* Code Completions enabled in settings and a model with InfillPromptTemplate selected.
*/
public static boolean isCodeCompletionsPossible() {
return getInstance().getState().isCodeCompletionsEnabled()
&& LlamaModel.findByHuggingFaceModel(getInstance().getState().getHuggingFaceModel())
.getInfillPromptTemplate() != null;
}
public static LlamaSettings getInstance() {
return ApplicationManager.getApplication().getService(LlamaSettings.class);
}