Skip to content

Commit

Permalink
automatic garak/resources/plugin_cache.json update
Browse files Browse the repository at this point in the history
  • Loading branch information
github-actions[bot] authored Nov 19, 2024
1 parent c7a9fa6 commit e28aeca
Showing 1 changed file with 11 additions and 11 deletions.
22 changes: 11 additions & 11 deletions garak/resources/plugin_cache.json
Original file line number Diff line number Diff line change
Expand Up @@ -6000,7 +6000,7 @@
},
"parallel_capable": true,
"supports_multiple_generations": true,
"mod_time": "2024-08-29 13:35:37 +0000"
"mod_time": "2024-11-12 21:52:33 +0000"
},
"generators.function.Single": {
"description": "pass a module#function to be called as generator, with format function(prompt:str, **kwargs)->List[Union(str, None)] the parameter `name` is reserved",
Expand All @@ -6019,7 +6019,7 @@
},
"parallel_capable": true,
"supports_multiple_generations": false,
"mod_time": "2024-08-29 13:35:37 +0000"
"mod_time": "2024-11-12 21:52:33 +0000"
},
"generators.ggml.GgmlGenerator": {
"description": "Generator interface for ggml models in gguf format.",
Expand Down Expand Up @@ -6048,7 +6048,7 @@
},
"parallel_capable": true,
"supports_multiple_generations": false,
"mod_time": "2024-08-29 13:35:37 +0000"
"mod_time": "2024-11-12 21:52:33 +0000"
},
"generators.groq.GroqChat": {
"description": "Wrapper for Groq-hosted LLM models.",
Expand Down Expand Up @@ -6139,7 +6139,7 @@
},
"parallel_capable": false,
"supports_multiple_generations": true,
"mod_time": "2024-11-11 14:12:52 +0000"
"mod_time": "2024-11-18 18:23:48 +0000"
},
"generators.huggingface.InferenceAPI": {
"description": "Get text generations from Hugging Face Inference API",
Expand All @@ -6164,7 +6164,7 @@
},
"parallel_capable": true,
"supports_multiple_generations": true,
"mod_time": "2024-11-11 14:12:52 +0000"
"mod_time": "2024-11-18 18:23:48 +0000"
},
"generators.huggingface.InferenceEndpoint": {
"description": "Interface for Hugging Face private endpoints",
Expand All @@ -6189,7 +6189,7 @@
},
"parallel_capable": true,
"supports_multiple_generations": false,
"mod_time": "2024-11-11 14:12:52 +0000"
"mod_time": "2024-11-18 18:23:48 +0000"
},
"generators.huggingface.LLaVA": {
"description": "Get LLaVA ([ text + image ] -> text) generations",
Expand Down Expand Up @@ -6217,7 +6217,7 @@
},
"parallel_capable": false,
"supports_multiple_generations": false,
"mod_time": "2024-11-11 14:12:52 +0000"
"mod_time": "2024-11-18 18:23:48 +0000"
},
"generators.huggingface.Model": {
"description": "Get text generations from a locally-run Hugging Face model",
Expand All @@ -6244,7 +6244,7 @@
},
"parallel_capable": false,
"supports_multiple_generations": true,
"mod_time": "2024-11-11 14:12:52 +0000"
"mod_time": "2024-11-18 18:23:48 +0000"
},
"generators.huggingface.OptimumPipeline": {
"description": "Get text generations from a locally-run Hugging Face pipeline using NVIDIA Optimum",
Expand All @@ -6271,7 +6271,7 @@
},
"parallel_capable": false,
"supports_multiple_generations": true,
"mod_time": "2024-11-11 14:12:52 +0000"
"mod_time": "2024-11-18 18:23:48 +0000"
},
"generators.huggingface.Pipeline": {
"description": "Get text generations from a locally-run Hugging Face pipeline",
Expand All @@ -6298,7 +6298,7 @@
},
"parallel_capable": false,
"supports_multiple_generations": true,
"mod_time": "2024-11-11 14:12:52 +0000"
"mod_time": "2024-11-18 18:23:48 +0000"
},
"generators.langchain.LangChainLLMGenerator": {
"description": "Class supporting LangChain LLM interfaces",
Expand Down Expand Up @@ -7024,7 +7024,7 @@
"active": true,
"bcp47": null,
"doc_uri": "",
"mod_time": "2024-10-25 09:35:40 +0000"
"mod_time": "2024-11-12 21:52:33 +0000"
},
"buffs.encoding.Base64": {
"description": "Base64 buff",
Expand Down

0 comments on commit e28aeca

Please sign in to comment.