Add it on merge queue (#2112)

* Github action to run integration tests

* Improve

* Fix build

* Add pull

* Fix readiness script

* Add IT runner

* Add IT runner

* Add logs

* update

* Fix

* Fix path

* file path

* test

* fix

* fix

* fix

* test

* network

* fix

* cleanup

* fix

* test

* Fix downgrade

* Add OpenAI API key

* Add VESPA_HOST

* test pulling first

* Add API server host

* Cache tweak

* Fix pull/push settings:

* Stop pushing to latest tag

* test cache change

* test

* test

* test

* remove cache temporarily

* Fix

* Enable EE

* test

* Remove duplicate funcs

* add back build

* Update all

* Fix stop cmd

* Add to merge queue

* Cleanup image tag
This commit is contained in:
Chris Weaver
2024-08-26 00:20:28 -07:00
committed by GitHub
parent 205c3c3fc8
commit c0e1a02e8e
11 changed files with 300 additions and 58 deletions

172
.github/workflows/run-it.yml vendored Normal file
View File

@@ -0,0 +1,172 @@
name: Run Integration Tests
concurrency:
group: Run-Integration-Tests-${{ github.head_ref }}
cancel-in-progress: true
on:
merge_group:
pull_request:
branches: [ main ]
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
jobs:
integration-tests:
runs-on:
group: 'arm64-image-builders'
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3
- name: Login to Docker Hub
uses: docker/login-action@v3
with:
username: ${{ secrets.DOCKER_USERNAME }}
password: ${{ secrets.DOCKER_TOKEN }}
- name: Build Web Docker image
uses: docker/build-push-action@v5
with:
context: ./web
file: ./web/Dockerfile
platforms: linux/arm64
pull: true
push: true
load: true
tags: danswer/danswer-web-server:it
cache-from: type=registry,ref=danswer/danswer-web-server:it
cache-to: |
type=registry,ref=danswer/danswer-web-server:it,mode=max
type=inline
- name: Build Backend Docker image
uses: docker/build-push-action@v5
with:
context: ./backend
file: ./backend/Dockerfile
platforms: linux/arm64
pull: true
push: true
load: true
tags: danswer/danswer-backend:it
cache-from: type=registry,ref=danswer/danswer-backend:it
cache-to: |
type=registry,ref=danswer/danswer-backend:it,mode=max
type=inline
- name: Build Model Server Docker image
uses: docker/build-push-action@v5
with:
context: ./backend
file: ./backend/Dockerfile.model_server
platforms: linux/arm64
pull: true
push: true
load: true
tags: danswer/danswer-model-server:it
cache-from: type=registry,ref=danswer/danswer-model-server:it
cache-to: |
type=registry,ref=danswer/danswer-model-server:it,mode=max
type=inline
- name: Build integration test Docker image
uses: docker/build-push-action@v5
with:
context: ./backend
file: ./backend/tests/integration/Dockerfile
platforms: linux/arm64
pull: true
push: true
load: true
tags: danswer/integration-test-runner:it
cache-from: type=registry,ref=danswer/integration-test-runner:it
cache-to: |
type=registry,ref=danswer/integration-test-runner:it,mode=max
type=inline
- name: Start Docker containers
run: |
cd deployment/docker_compose
ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=true \
IMAGE_TAG=it \
docker compose -f docker-compose.dev.yml -p danswer-stack up -d --build
id: start_docker
- name: Wait for service to be ready
run: |
echo "Starting wait-for-service script..."
start_time=$(date +%s)
timeout=300 # 5 minutes in seconds
while true; do
current_time=$(date +%s)
elapsed_time=$((current_time - start_time))
if [ $elapsed_time -ge $timeout ]; then
echo "Timeout reached. Service did not become ready in 5 minutes."
exit 1
fi
# Use curl with error handling to ignore specific exit code 56
response=$(curl -s -o /dev/null -w "%{http_code}" http://localhost:8080/health || echo "curl_error")
if [ "$response" = "200" ]; then
echo "Service is ready!"
break
elif [ "$response" = "curl_error" ]; then
echo "Curl encountered an error, possibly exit code 56. Continuing to retry..."
else
echo "Service not ready yet (HTTP status $response). Retrying in 5 seconds..."
fi
sleep 5
done
echo "Finished waiting for service."
- name: Run integration tests
run: |
echo "Running integration tests..."
docker run --rm --network danswer-stack_default \
-e POSTGRES_HOST=relational_db \
-e POSTGRES_USER=postgres \
-e POSTGRES_PASSWORD=password \
-e POSTGRES_DB=postgres \
-e VESPA_HOST=index \
-e API_SERVER_HOST=api_server \
-e OPENAI_API_KEY=${OPENAI_API_KEY} \
danswer/integration-test-runner:it
continue-on-error: true
id: run_tests
- name: Check test results
run: |
if [ ${{ steps.run_tests.outcome }} == 'failure' ]; then
echo "Integration tests failed. Exiting with error."
exit 1
else
echo "All integration tests passed successfully."
fi
- name: Save Docker logs
if: success() || failure()
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p danswer-stack logs > docker-compose.log
mv docker-compose.log ${{ github.workspace }}/docker-compose.log
- name: Upload logs
if: success() || failure()
uses: actions/upload-artifact@v3
with:
name: docker-logs
path: ${{ github.workspace }}/docker-compose.log
- name: Stop Docker containers
run: |
cd deployment/docker_compose
docker compose -f docker-compose.dev.yml -p danswer-stack down -v

View File

@@ -161,3 +161,12 @@ def downgrade() -> None:
["cloud_provider_id"],
["id"],
)
# Recreate the foreign key constraint in embedding_model table
op.create_foreign_key(
"fk_embedding_provider_default_model",
"embedding_provider",
"embedding_model",
["default_model_id"],
["id"],
)

View File

@@ -0,0 +1,83 @@
FROM python:3.11.7-slim-bookworm
# Dockerfile for integration tests
# Currently needs all dependencies, since the ITs use some of the Danswer
# backend code.
# Install system dependencies
# cmake needed for psycopg (postgres)
# libpq-dev needed for psycopg (postgres)
# curl included just for users' convenience
# zip for Vespa step futher down
# ca-certificates for HTTPS
RUN apt-get update && \
apt-get install -y \
cmake \
curl \
zip \
ca-certificates \
libgnutls30=3.7.9-2+deb12u3 \
libblkid1=2.38.1-5+deb12u1 \
libmount1=2.38.1-5+deb12u1 \
libsmartcols1=2.38.1-5+deb12u1 \
libuuid1=2.38.1-5+deb12u1 \
libxmlsec1-dev \
pkg-config \
gcc && \
rm -rf /var/lib/apt/lists/* && \
apt-get clean
# Install Python dependencies
# Remove py which is pulled in by retry, py is not needed and is a CVE
COPY ./requirements/default.txt /tmp/requirements.txt
COPY ./requirements/ee.txt /tmp/ee-requirements.txt
RUN pip install --no-cache-dir --upgrade \
-r /tmp/requirements.txt \
-r /tmp/ee-requirements.txt && \
pip uninstall -y py && \
playwright install chromium && \
playwright install-deps chromium && \
ln -s /usr/local/bin/supervisord /usr/bin/supervisord
# Cleanup for CVEs and size reduction
# https://github.com/tornadoweb/tornado/issues/3107
# xserver-common and xvfb included by playwright installation but not needed after
# perl-base is part of the base Python Debian image but not needed for Danswer functionality
# perl-base could only be removed with --allow-remove-essential
RUN apt-get update && \
apt-get remove -y --allow-remove-essential \
perl-base \
xserver-common \
xvfb \
cmake \
libldap-2.5-0 \
libxmlsec1-dev \
pkg-config \
gcc && \
apt-get install -y libxmlsec1-openssl && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* && \
rm -f /usr/local/lib/python3.11/site-packages/tornado/test/test.key
# Set up application files
WORKDIR /app
# Enterprise Version Files
COPY ./ee /app/ee
COPY supervisord.conf /etc/supervisor/conf.d/supervisord.conf
# Set up application files
COPY ./danswer /app/danswer
COPY ./shared_configs /app/shared_configs
COPY ./alembic /app/alembic
COPY ./alembic.ini /app/alembic.ini
COPY supervisord.conf /usr/etc/supervisord.conf
# Integration test stuff
COPY ./requirements/dev.txt /tmp/dev-requirements.txt
RUN pip install --no-cache-dir --upgrade \
-r /tmp/dev-requirements.txt
COPY ./tests/integration /app/tests/integration
ENV PYTHONPATH /app
CMD ["pytest", "-s", "/app/tests/integration"]

View File

@@ -1,2 +1,7 @@
API_SERVER_URL = "http://localhost:8080"
import os
API_SERVER_PROTOCOL = os.getenv("API_SERVER_PROTOCOL") or "http"
API_SERVER_HOST = os.getenv("API_SERVER_HOST") or "localhost"
API_SERVER_PORT = os.getenv("API_SERVER_PORT") or "8080"
API_SERVER_URL = f"{API_SERVER_PROTOCOL}://{API_SERVER_HOST}:{API_SERVER_PORT}"
MAX_DELAY = 30

View File

@@ -1,10 +1,9 @@
import time
from danswer.server.features.document_set.models import DocumentSetCreationRequest
from tests.integration.common_utils.document_sets import DocumentSetClient
from tests.integration.common_utils.seed_documents import TestDocumentClient
from tests.integration.common_utils.vespa import TestVespaClient
from tests.integration.tests.document_set.utils import create_document_set
from tests.integration.tests.document_set.utils import fetch_document_sets
def test_multiple_document_sets_syncing_same_connnector(
@@ -15,7 +14,7 @@ def test_multiple_document_sets_syncing_same_connnector(
cc_pair_id = seed_result.cc_pair_id
# Create first document set
doc_set_1_id = create_document_set(
doc_set_1_id = DocumentSetClient.create_document_set(
DocumentSetCreationRequest(
name="Test Document Set 1",
description="First test document set",
@@ -26,7 +25,7 @@ def test_multiple_document_sets_syncing_same_connnector(
)
)
doc_set_2_id = create_document_set(
doc_set_2_id = DocumentSetClient.create_document_set(
DocumentSetCreationRequest(
name="Test Document Set 2",
description="Second test document set",
@@ -41,7 +40,7 @@ def test_multiple_document_sets_syncing_same_connnector(
max_delay = 45
start = time.time()
while True:
doc_sets = fetch_document_sets()
doc_sets = DocumentSetClient.fetch_document_sets()
doc_set_1 = next(
(doc_set for doc_set in doc_sets if doc_set.id == doc_set_1_id), None
)
@@ -64,7 +63,7 @@ def test_multiple_document_sets_syncing_same_connnector(
time.sleep(2)
# get names so we can compare to what is in vespa
doc_sets = fetch_document_sets()
doc_sets = DocumentSetClient.fetch_document_sets()
doc_set_names = {doc_set.name for doc_set in doc_sets}
# make sure documents are as expected

View File

@@ -1,26 +0,0 @@
from typing import cast
import requests
from danswer.server.features.document_set.models import DocumentSet
from danswer.server.features.document_set.models import DocumentSetCreationRequest
from tests.integration.common_utils.constants import API_SERVER_URL
def create_document_set(doc_set_creation_request: DocumentSetCreationRequest) -> int:
response = requests.post(
f"{API_SERVER_URL}/manage/admin/document-set",
json=doc_set_creation_request.dict(),
)
response.raise_for_status()
return cast(int, response.json())
def fetch_document_sets() -> list[DocumentSet]:
response = requests.get(f"{API_SERVER_URL}/manage/admin/document-set")
response.raise_for_status()
document_sets = [
DocumentSet.parse_obj(doc_set_data) for doc_set_data in response.json()
]
return document_sets

View File

@@ -1,7 +1,7 @@
version: '3'
services:
api_server:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -106,7 +106,7 @@ services:
max-file: "6"
background:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -207,7 +207,7 @@ services:
max-file: "6"
web_server:
image: danswer/danswer-web-server:latest
image: danswer/danswer-web-server:${IMAGE_TAG:-latest}
build:
context: ../../web
dockerfile: Dockerfile
@@ -236,7 +236,7 @@ services:
- ENABLE_PAID_ENTERPRISE_EDITION_FEATURES=${ENABLE_PAID_ENTERPRISE_EDITION_FEATURES:-false}
inference_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server
@@ -262,7 +262,7 @@ services:
max-file: "6"
indexing_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server

View File

@@ -1,7 +1,7 @@
version: '3'
services:
api_server:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -98,7 +98,7 @@ services:
background:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -201,7 +201,7 @@ services:
web_server:
image: danswer/danswer-web-server:latest
image: danswer/danswer-web-server:${IMAGE_TAG:-latest}
build:
context: ../../web
dockerfile: Dockerfile
@@ -226,7 +226,7 @@ services:
inference_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
# for GPU support, please read installation guidelines in the README.md
# bare minimum to get this working is to install nvidia-container-toolkit
deploy:
@@ -262,7 +262,7 @@ services:
indexing_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server

View File

@@ -1,7 +1,7 @@
version: '3'
services:
api_server:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -31,7 +31,7 @@ services:
background:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -60,7 +60,7 @@ services:
web_server:
image: danswer/danswer-web-server:latest
image: danswer/danswer-web-server:${IMAGE_TAG:-latest}
build:
context: ../../web
dockerfile: Dockerfile
@@ -87,7 +87,7 @@ services:
inference_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server
@@ -114,7 +114,7 @@ services:
indexing_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server

View File

@@ -1,7 +1,7 @@
version: '3'
services:
api_server:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -31,7 +31,7 @@ services:
background:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -59,7 +59,7 @@ services:
max-file: "6"
web_server:
image: danswer/danswer-web-server:latest
image: danswer/danswer-web-server:${IMAGE_TAG:-latest}
build:
context: ../../web
dockerfile: Dockerfile
@@ -101,7 +101,7 @@ services:
inference_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server
@@ -128,7 +128,7 @@ services:
indexing_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server

View File

@@ -1,7 +1,7 @@
version: '3'
services:
api_server:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -35,7 +35,7 @@ services:
background:
image: danswer/danswer-backend:latest
image: danswer/danswer-backend:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile
@@ -65,7 +65,7 @@ services:
web_server:
image: danswer/danswer-web-server:latest
image: danswer/danswer-web-server:${IMAGE_TAG:-latest}
build:
context: ../../web
dockerfile: Dockerfile
@@ -94,7 +94,7 @@ services:
inference_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server
@@ -118,7 +118,7 @@ services:
indexing_model_server:
image: danswer/danswer-model-server:latest
image: danswer/danswer-model-server:${IMAGE_TAG:-latest}
build:
context: ../../backend
dockerfile: Dockerfile.model_server