Skip to content

Commit

Permalink
dowloadable models changed
Browse files Browse the repository at this point in the history
  • Loading branch information
guinmoon committed Oct 7, 2024
1 parent de1897f commit e1ff52c
Show file tree
Hide file tree
Showing 2 changed files with 28 additions and 28 deletions.
54 changes: 27 additions & 27 deletions LLMFarm/Settings/downloadable_models.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@
"Q": "Q8_0"
},
{
"url": "https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-IQ4_XS.gguf?download=true",
"file_name": "Llama-3.2-1B-Instruct-IQ4_XS.gguf",
"url": "https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-Q4_K_S.gguf?download=true",
"file_name": "Llama-3.2-1B-Instruct-Q4_K_S.gguf",
"size": "",
"Q": "IQ4_XS"
"Q": "Q4_K_S"
},
{
"url": "https://huggingface.co/bartowski/Llama-3.2-1B-Instruct-GGUF/resolve/main/Llama-3.2-1B-Instruct-IQ3_M.gguf?download=true",
Expand All @@ -31,18 +31,18 @@
{
"name": "Gemma v2 2B",
"models": [
{
"url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma%202b_it_v2_IQ4_NL.gguf",
"file_name": "gemma_2b_it_v2_IQ4_NL.gguf",
"size": "",
"Q": "IQ4_NL"
},
{
"url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma%202b_it_v2_Q5_K_S.gguf",
"file_name": "gemma_2b_it_v2_Q5_K_S.gguf",
"size": "",
"Q": "Q5_K_S"
},
{
"url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma%202b_it_v2_IQ4_NL.gguf",
"file_name": "gemma_2b_it_v2_IQ4_NL.gguf",
"size": "",
"Q": "IQ4_NL"
},
{
"url": "https://huggingface.co/guinmoon/LLMFarm_Models/resolve/main/gemma%202b_it_v2_Q6_K.gguf",
"file_name": "gemma_2b_it_v2_Q6_K.gguf",
Expand Down Expand Up @@ -96,10 +96,10 @@
"Q": "Q4_K_S"
},
{
"url": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q4_0_8_8.gguf?download=true",
"file_name": "Llama-3.2-3B-Instruct-Q4_0_8_8.gguf",
"url": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q4_K_M.gguf?download=true",
"file_name": "Llama-3.2-3B-Instruct-Q4_K_M.gguf",
"size": "",
"Q": "Q4088"
"Q": "Q4_K_M"
},
{
"url": "https://huggingface.co/bartowski/Llama-3.2-3B-Instruct-GGUF/resolve/main/Llama-3.2-3B-Instruct-Q5_K_M.gguf?download=true",
Expand Down Expand Up @@ -142,22 +142,22 @@
"name": "Phi 3 mini 128k instruct",
"models": [
{
"file_name": "Phi-3-mini-128k-instruct.IQ4_NL.gguf",
"url": "https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/resolve/main/Phi-3-mini-128k-instruct.IQ4_NL.gguf?download=true",
"file_name": "Phi-3-mini-128k-instruct.Q4_K_S.gguf",
"url": "https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/resolve/main/Phi-3-mini-128k-instruct.Q4_K_S.gguf?download=true",
"size": "1.48",
"Q": "IQ4_NL"
"Q": "Q4_K_S"
},
{
"url": "https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/resolve/main/Phi-3-mini-128k-instruct.Q5_K_M.gguf?download=true",
"file_name": "Phi-3-mini-128k-instruct.Q5_K_M.gguf",
"size": "",
"Q": "Q5_K_M"
},
{
"url": "https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/resolve/main/Phi-3-mini-128k-instruct.IQ3_S.gguf?download=true",
"file_name": "Phi-3-mini-128k-instruct.IQ3_S.gguf",
"size": "",
"Q": "IQ3_S"
},
{
"url": "https://huggingface.co/PrunaAI/Phi-3-mini-128k-instruct-GGUF-Imatrix-smashed/resolve/main/Phi-3-mini-128k-instruct.Q5_K_M.gguf?download=true",
"file_name": "Phi-3-mini-128k-instruct.Q5_K_M.gguf",
"size": "",
"Q": "Q5_K_M"
}
]
},
Expand All @@ -182,16 +182,16 @@
"name": "Bunny 1.0 4B",
"models": [
{
"file_name": "Bunny-v1_0-4B-IQ3_M.gguf",
"url": "https://huggingface.co/guinmoon/Bunny-v1_0-4B-GGUF/resolve/main/Bunny-v1_0-4Bl-IQ3_M.gguf?download=true",
"file_name": "Bunny-v1_0-4B-Q3_K_M.gguf",
"url": "https://huggingface.co/guinmoon/Bunny-v1_0-4B-GGUF/resolve/main/Bunny-v1_0-4B-Q3_K_M.gguf?download=true",
"size": "",
"Q": "IQ3_M"
"Q": "Q3_K_M"
},
{
"file_name": "Bunny-v1_0-4B-IQ4_XS.gguf",
"url": "https://huggingface.co/guinmoon/Bunny-v1_0-4B-GGUF/resolve/main/Bunny-v1_0-4B-IQ4_XS.gguf?download=true",
"file_name": "Bunny-v1_0-4B-Q4_K_S.gguf",
"url": "https://huggingface.co/guinmoon/Bunny-v1_0-4B-GGUF/resolve/main/Bunny-v1_0-4B-Q4_K_S.gguf?download=true",
"size": "",
"Q": "IQ4_XS"
"Q": "Q4_K_S"
},
{
"file_name": "Bunny-v1_0-4B-IQ3_XXS.gguf",
Expand Down
2 changes: 1 addition & 1 deletion ModelTest/main.swift
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ func main(){

// ai!.modelPath = "/Users/guinmoon/dev/alpaca_llama_etc/tinydolphin-2.8-1.1b.Q8_0.imx.gguf"
// ai.modelPath = "/Users/guinmoon/dev/alpaca_llama_etc/gemma-2b-it.Q8_0.gguf"
ai.modelPath = "/Users/guinmoon/dev/alpaca_llama_etc/LaMini-Flan-T5-248M.Q8_0.gguf"
ai!.modelPath = "/Users/guinmoon/dev/alpaca_llama_etc/LaMini-Flan-T5-248M.Q8_0.gguf"


// ai.modelPath = "/Users/guinmoon/Library/Containers/com.guinmoon.LLMFarm/Data/Documents/models/llama-2-7b-chat-q4_K_M.gguf"
Expand Down

0 comments on commit e1ff52c

Please sign in to comment.