Skip to content
Closed
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 29 additions & 3 deletions tests/models/test_outlines.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import json
import os
import platform
from collections.abc import Callable
from pathlib import Path
from typing import Any
Expand Down Expand Up @@ -81,11 +82,30 @@
not transformer_imports_successful(), reason='transformers not available'
)

# We only run this on the latest Python as the llama_cpp tests have been regularly failing in CI with `Fatal Python error: Illegal instruction`:

def _has_avx2_support() -> bool:
"""Check if the CPU supports AVX2 instructions required by llama_cpp.

The llama_cpp library crashes with 'Fatal Python error: Illegal instruction' on CPUs without AVX2.
This check allows us to skip the tests gracefully on such machines (e.g., some GitHub Actions runners).
"""
if platform.system() == 'Linux':
try:
with open('/proc/cpuinfo', encoding='utf-8') as f:
return 'avx2' in f.read().lower()
except Exception:
return False
return True


# The llama_cpp tests have been regularly failing in CI with `Fatal Python error: Illegal instruction`
# due to AVX2 instructions not being supported on some GitHub Actions runners:
# https://github.com/pydantic/pydantic-ai/actions/runs/19547773220/job/55970947389
skip_if_llama_cpp_imports_unsuccessful = pytest.mark.skipif(
not llama_cpp_imports_successful() or os.getenv('RUN_LLAMA_CPP_TESTS', 'true').lower() == 'false',
reason='llama_cpp not available',
not llama_cpp_imports_successful()
or os.getenv('RUN_LLAMA_CPP_TESTS', 'true').lower() == 'false'
or not _has_avx2_support(),
reason='llama_cpp not available or AVX2 not supported',
)

skip_if_vllm_imports_unsuccessful = pytest.mark.skipif(not vllm_imports_successful(), reason='vllm not available')
Expand Down Expand Up @@ -156,6 +176,12 @@ def transformers_multimodal_model() -> OutlinesModel:

@pytest.fixture
def llamacpp_model() -> OutlinesModel:
if (
not llama_cpp_imports_successful()
or os.getenv('RUN_LLAMA_CPP_TESTS', 'true').lower() == 'false'
or not _has_avx2_support()
):
pytest.skip('llama_cpp not available or AVX2 not supported')
outlines_model_llamacpp = outlines.models.llamacpp.from_llamacpp(
llama_cpp.Llama.from_pretrained(
repo_id='M4-ai/TinyMistral-248M-v2-Instruct-GGUF',
Expand Down
Loading