We read every piece of feedback, and take your input very seriously.
To see all available qualifiers, see our documentation.
There was an error while loading. Please reload this page.
1 parent c2d69ad commit cefb68bCopy full SHA for cefb68b
pgml-extension/src/bindings/transformers/transformers.py
@@ -294,13 +294,16 @@ def create_pipeline(task):
294
ensure_device(task)
295
convert_dtype(task)
296
model_name = task.get("model", None)
297
+ model_type = None
298
+ if "model_type" in task:
299
+ model_type = task["model_type"]
300
if model_name:
301
lower = model_name.lower()
302
else:
303
lower = None
304
if lower and ("-ggml" in lower or "-gguf" in lower):
305
pipe = GGMLPipeline(model_name, **task)
- elif lower and "-gptq" in lower:
306
+ elif lower and "-gptq" in lower and not (model_type == "mistral" or model_type == "llama"):
307
pipe = GPTQPipeline(model_name, **task)
308
309
try:
0 commit comments