|
| 1 | +name: CI |
| 2 | + |
| 3 | +# This GitHub Action workflow is designed to automate the Continuous Integration process for the Crawl4AI-LLM-Optimized-Web-Crawler-And-Scraper-Python-Engine. |
| 4 | +# It ensures code quality, dependency integrity, and basic functionality testing. |
| 5 | + |
| 6 | +# Trigger the workflow on push events to the main branch and on pull request events targeting the main branch. |
| 7 | +on: |
| 8 | + push: |
| 9 | + branches: [ main ] |
| 10 | + pull_request: |
| 11 | + branches: [ main ] |
| 12 | + |
| 13 | +# Define the jobs that will run as part of the CI workflow. |
| 14 | +jobs: |
| 15 | + build: |
| 16 | + # Specify the operating system environment for the job runner. |
| 17 | + runs-on: ubuntu-latest |
| 18 | + |
| 19 | + # Define the steps to be executed within the build job. |
| 20 | + steps: |
| 21 | + # Step 1: Check out the repository code. |
| 22 | + - name: Check out code |
| 23 | + uses: actions/checkout@v4 |
| 24 | + |
| 25 | + # Step 2: Set up Python environment. |
| 26 | + # Uses the "actions/setup-python" action to configure a Python environment. |
| 27 | + # "python-version: '3.10'" specifies the desired Python version. |
| 28 | + - name: Set up Python |
| 29 | + uses: actions/setup-python@v5 |
| 30 | + with: |
| 31 | + python-version: '3.10' |
| 32 | + cache: 'uv' |
| 33 | + |
| 34 | + # Step 3: Install dependencies using uv. |
| 35 | + # This step ensures that all necessary Python packages are installed. |
| 36 | + - name: Install dependencies with uv |
| 37 | + run: | |
| 38 | + python -m pip install --upgrade pip |
| 39 | + uv pip install --system -r requirements.txt |
| 40 | + # If a dev requirements file exists, install dev dependencies too. |
| 41 | + if [ -f requirements-dev.txt ]; then uv pip install --system -r requirements-dev.txt; fi |
| 42 | +
|
| 43 | + # Step 4: Run linters and formatters with Ruff. |
| 44 | + # Ruff is used for static analysis and code formatting to maintain code quality. |
| 45 | + - name: Lint and format with Ruff |
| 46 | + run: | |
| 47 | + uv pip install ruff |
| 48 | + ruff check . |
| 49 | + ruff format --check . |
| 50 | +
|
| 51 | + # Step 5: Run tests with Pytest. |
| 52 | + # Pytest is executed to run all defined unit and integration tests. |
| 53 | + # "--cov=crawl4ai" measures code coverage for the 'crawl4ai' package. |
| 54 | + # "--cov-report=xml" generates a coverage report in XML format for potential integration with coverage reporting tools. |
| 55 | + - name: Test with Pytest |
| 56 | + run: | |
| 57 | + uv pip install pytest pytest-cov |
| 58 | + pytest --cov=crawl4ai --cov-report=xml |
| 59 | +
|
| 60 | + # Step 6: Upload coverage reports. |
| 61 | + # Uploads the generated XML coverage report to Codecov for analysis and visualization. |
| 62 | + # "file: ./coverage.xml" specifies the path to the coverage report file. |
| 63 | + # "token: ${{ secrets.CODECOV_TOKEN }}" uses a GitHub secret to authenticate with Codecov. |
| 64 | + # This step is conditional and only runs if the main branch is pushed to. |
| 65 | + - name: Upload coverage to Codecov |
| 66 | + if: github.event_name != 'pull_request' |
| 67 | + uses: codecov/codecov-action@v4 |
| 68 | + with: |
| 69 | + token: ${{ secrets.CODECOV_TOKEN }} |
| 70 | + file: ./coverage.xml |
| 71 | + fail_ci_if_error: true |
0 commit comments