enable flash attention for image generation (#1633)

This commit is contained in:
Wagner Bruna 2025-07-05 00:20:51 -03:00 committed by GitHub
parent bc3e4c1197
commit d74c16e6e0
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
3 changed files with 8 additions and 0 deletions

View file

@ -161,6 +161,7 @@ struct sd_load_model_inputs
const char * vulkan_info = nullptr;
const int threads = 0;
const int quant = 0;
const bool flash_attention = false;
const bool taesd = false;
const int tiled_vae_threshold = 0;
const char * t5xxl_filename = nullptr;