mirror of
https://github.com/carlrobertoh/ProxyAI.git
synced 2026-05-13 07:02:34 +00:00
fix: NPE when using unsupported model for code completions (#499)
This commit is contained in:
parent
9823010526
commit
a9e147ffc7
2 changed files with 12 additions and 2 deletions
|
|
@ -6,6 +6,7 @@ import com.intellij.openapi.application.ApplicationManager;
|
|||
import com.intellij.openapi.components.PersistentStateComponent;
|
||||
import com.intellij.openapi.components.State;
|
||||
import com.intellij.openapi.components.Storage;
|
||||
import ee.carlrobert.codegpt.completions.llama.LlamaModel;
|
||||
import ee.carlrobert.codegpt.credentials.CredentialsStore;
|
||||
import ee.carlrobert.codegpt.settings.service.llama.form.LlamaSettingsForm;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
|
|
@ -31,6 +32,15 @@ public class LlamaSettings implements PersistentStateComponent<LlamaSettingsStat
|
|||
return getInstance().getState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Code Completions enabled in settings and a model with InfillPromptTemplate selected.
|
||||
*/
|
||||
public static boolean isCodeCompletionsPossible() {
|
||||
return getInstance().getState().isCodeCompletionsEnabled()
|
||||
&& LlamaModel.findByHuggingFaceModel(getInstance().getState().getHuggingFaceModel())
|
||||
.getInfillPromptTemplate() != null;
|
||||
}
|
||||
|
||||
public static LlamaSettings getInstance() {
|
||||
return ApplicationManager.getApplication().getService(LlamaSettings.class);
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue