Spaces:
Running
on
Zero
Running
on
Zero
Upload 3 files
Browse files- genimage.py +1 -1
- llmenv.py +5 -0
- requirements.txt +1 -1
genimage.py
CHANGED
|
@@ -19,7 +19,7 @@ def load_pipeline():
|
|
| 19 |
#vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=dtype)
|
| 20 |
pipe = DiffusionPipeline.from_pretrained(
|
| 21 |
#"John6666/rae-diffusion-xl-v2-sdxl-spo-pcm",
|
| 22 |
-
"Raelina/Raehoshi-illust-XL-
|
| 23 |
#custom_pipeline="lpw_stable_diffusion_xl",
|
| 24 |
#custom_pipeline="nyanko7/sdxl_smoothed_energy_guidance",
|
| 25 |
torch_dtype=dtype,
|
|
|
|
| 19 |
#vae = AutoencoderKL.from_pretrained("madebyollin/sdxl-vae-fp16-fix", torch_dtype=dtype)
|
| 20 |
pipe = DiffusionPipeline.from_pretrained(
|
| 21 |
#"John6666/rae-diffusion-xl-v2-sdxl-spo-pcm",
|
| 22 |
+
"Raelina/Raehoshi-illust-XL-8",
|
| 23 |
#custom_pipeline="lpw_stable_diffusion_xl",
|
| 24 |
#custom_pipeline="nyanko7/sdxl_smoothed_energy_guidance",
|
| 25 |
torch_dtype=dtype,
|
llmenv.py
CHANGED
|
@@ -166,6 +166,8 @@ llm_models = {
|
|
| 166 |
"Lunar-Abyss-12B.Q4_K_S.gguf": ["mradermacher/Lunar-Abyss-12B-GGUF", MessagesFormatterType.CHATML],
|
| 167 |
"prototype-x-12b-q6_k.gguf": ["Vortex5/Prototype-X-12b-Q6_K-GGUF", MessagesFormatterType.MISTRAL],
|
| 168 |
"Violet-Mist-12B.i1-Q4_K_M.gguf": ["mradermacher/Violet-Mist-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
|
|
|
| 169 |
"Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner.Q4_K_M.gguf": ["mradermacher/Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner-GGUF", MessagesFormatterType.MISTRAL],
|
| 170 |
"Gemma-2-Llama-Swallow-9b-pt-v0.1.Q4_K_M.gguf": ["mradermacher/Gemma-2-Llama-Swallow-9b-pt-v0.1-GGUF", MessagesFormatterType.ALPACA],
|
| 171 |
"Tlacuilo-12B.Q4_K_M.gguf": ["mradermacher/Tlacuilo-12B-GGUF", MessagesFormatterType.MISTRAL],
|
|
@@ -184,6 +186,9 @@ llm_models = {
|
|
| 184 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
| 185 |
#"": ["", MessagesFormatterType.CHATML],
|
| 186 |
#"": ["", MessagesFormatterType.PHI_3],
|
|
|
|
|
|
|
|
|
|
| 187 |
"PokeeAI_pokee_research_7b-Q5_K_M.gguf": ["bartowski/PokeeAI_pokee_research_7b-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 188 |
"FARE-8B.Q5_K_M.gguf": ["mradermacher/FARE-8B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 189 |
"AdaVaR-3B.Q5_K_M.gguf": ["mradermacher/AdaVaR-3B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
|
|
|
| 166 |
"Lunar-Abyss-12B.Q4_K_S.gguf": ["mradermacher/Lunar-Abyss-12B-GGUF", MessagesFormatterType.CHATML],
|
| 167 |
"prototype-x-12b-q6_k.gguf": ["Vortex5/Prototype-X-12b-Q6_K-GGUF", MessagesFormatterType.MISTRAL],
|
| 168 |
"Violet-Mist-12B.i1-Q4_K_M.gguf": ["mradermacher/Violet-Mist-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 169 |
+
"Dreamstar-12B.Q4_K_M.gguf": ["mradermacher/Dreamstar-12B-GGUF", MessagesFormatterType.MISTRAL],
|
| 170 |
+
"AuroSlayerEtherealKrixUnslopMellRPMaxDARKNESS-12B.i1-Q4_K_M.gguf": ["mradermacher/AuroSlayerEtherealKrixUnslopMellRPMaxDARKNESS-12B-i1-GGUF", MessagesFormatterType.MISTRAL],
|
| 171 |
"Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner.Q4_K_M.gguf": ["mradermacher/Irixxed_Homunculus-12B-Q3T-v.0.3-Reasoner-GGUF", MessagesFormatterType.MISTRAL],
|
| 172 |
"Gemma-2-Llama-Swallow-9b-pt-v0.1.Q4_K_M.gguf": ["mradermacher/Gemma-2-Llama-Swallow-9b-pt-v0.1-GGUF", MessagesFormatterType.ALPACA],
|
| 173 |
"Tlacuilo-12B.Q4_K_M.gguf": ["mradermacher/Tlacuilo-12B-GGUF", MessagesFormatterType.MISTRAL],
|
|
|
|
| 186 |
#"": ["", MessagesFormatterType.OPEN_CHAT],
|
| 187 |
#"": ["", MessagesFormatterType.CHATML],
|
| 188 |
#"": ["", MessagesFormatterType.PHI_3],
|
| 189 |
+
"Mangione-12B-Model_Stock.Q4_K_M.gguf": ["mradermacher/Mangione-12B-Model_Stock-GGUF", MessagesFormatterType.MISTRAL],
|
| 190 |
+
"gemma-3-12b-it-biprojected-abliterated.Q4_K_M.gguf": ["mradermacher/gemma-3-12b-it-biprojected-abliterated-GGUF", MessagesFormatterType.ALPACA],
|
| 191 |
+
"mistralai-Mistral-Nemo-Instruct-2407-extensive-BP-abliteration-12B.Q4_K_M.gguf": ["mradermacher/mistralai-Mistral-Nemo-Instruct-2407-extensive-BP-abliteration-12B-GGUF", MessagesFormatterType.MISTRAL],
|
| 192 |
"PokeeAI_pokee_research_7b-Q5_K_M.gguf": ["bartowski/PokeeAI_pokee_research_7b-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 193 |
"FARE-8B.Q5_K_M.gguf": ["mradermacher/FARE-8B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
| 194 |
"AdaVaR-3B.Q5_K_M.gguf": ["mradermacher/AdaVaR-3B-GGUF", MessagesFormatterType.OPEN_CHAT],
|
requirements.txt
CHANGED
|
@@ -4,7 +4,7 @@ hf_transfer
|
|
| 4 |
scikit-build-core
|
| 5 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu124/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl
|
| 6 |
#git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
| 7 |
-
https://github.com/John6666cat/llama-cpp-python/releases/download/v0.3.
|
| 8 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.16-cu124/llama_cpp_python-0.3.16-cp310-cp310-linux_x86_64.whl
|
| 9 |
git+https://github.com/John6666cat/llama-cpp-agent
|
| 10 |
pybind11>=2.12
|
|
|
|
| 4 |
scikit-build-core
|
| 5 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.4-cu124/llama_cpp_python-0.3.4-cp310-cp310-linux_x86_64.whl
|
| 6 |
#git+https://github.com/Maximilian-Winter/llama-cpp-agent
|
| 7 |
+
https://github.com/John6666cat/llama-cpp-python/releases/download/v0.3.17-cu124-AVX-linux-20251205/llama_cpp_python-0.3.17-cp310-cp310-linux_x86_64.whl
|
| 8 |
#https://github.com/abetlen/llama-cpp-python/releases/download/v0.3.16-cu124/llama_cpp_python-0.3.16-cp310-cp310-linux_x86_64.whl
|
| 9 |
git+https://github.com/John6666cat/llama-cpp-agent
|
| 10 |
pybind11>=2.12
|